context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace ParentLoad.Business.ERCLevel { /// <summary> /// B10Level11111 (editable child object).<br/> /// This is a generated base class of <see cref="B10Level11111"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="B11Level111111Objects"/> of type <see cref="B11Level111111Coll"/> (1:M relation to <see cref="B12Level111111"/>)<br/> /// This class is an item of <see cref="B09Level11111Coll"/> collection. /// </remarks> [Serializable] public partial class B10Level11111 : BusinessBase<B10Level11111> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int narentID1 = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_1_1_ID"/> property. /// </summary> public static readonly PropertyInfo<int> Level_1_1_1_1_1_IDProperty = RegisterProperty<int>(p => p.Level_1_1_1_1_1_ID, "Level_1_1_1_1_1 ID"); /// <summary> /// Gets the Level_1_1_1_1_1 ID. /// </summary> /// <value>The Level_1_1_1_1_1 ID.</value> public int Level_1_1_1_1_1_ID { get { return GetProperty(Level_1_1_1_1_1_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_1_1_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Level_1_1_1_1_1_NameProperty = RegisterProperty<string>(p => p.Level_1_1_1_1_1_Name, "Level_1_1_1_1_1 Name"); /// <summary> /// Gets or sets the Level_1_1_1_1_1 Name. /// </summary> /// <value>The Level_1_1_1_1_1 Name.</value> public string Level_1_1_1_1_1_Name { get { return GetProperty(Level_1_1_1_1_1_NameProperty); } set { SetProperty(Level_1_1_1_1_1_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11Level111111SingleObject"/> property. /// </summary> public static readonly PropertyInfo<B11Level111111Child> B11Level111111SingleObjectProperty = RegisterProperty<B11Level111111Child>(p => p.B11Level111111SingleObject, "B11 Level111111 Single Object", RelationshipTypes.Child); /// <summary> /// Gets the B11 Level111111 Single Object ("parent load" child property). /// </summary> /// <value>The B11 Level111111 Single Object.</value> public B11Level111111Child B11Level111111SingleObject { get { return GetProperty(B11Level111111SingleObjectProperty); } private set { LoadProperty(B11Level111111SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11Level111111ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<B11Level111111ReChild> B11Level111111ASingleObjectProperty = RegisterProperty<B11Level111111ReChild>(p => p.B11Level111111ASingleObject, "B11 Level111111 ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the B11 Level111111 ASingle Object ("parent load" child property). /// </summary> /// <value>The B11 Level111111 ASingle Object.</value> public B11Level111111ReChild B11Level111111ASingleObject { get { return GetProperty(B11Level111111ASingleObjectProperty); } private set { LoadProperty(B11Level111111ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11Level111111Objects"/> property. /// </summary> public static readonly PropertyInfo<B11Level111111Coll> B11Level111111ObjectsProperty = RegisterProperty<B11Level111111Coll>(p => p.B11Level111111Objects, "B11 Level111111 Objects", RelationshipTypes.Child); /// <summary> /// Gets the B11 Level111111 Objects ("parent load" child property). /// </summary> /// <value>The B11 Level111111 Objects.</value> public B11Level111111Coll B11Level111111Objects { get { return GetProperty(B11Level111111ObjectsProperty); } private set { LoadProperty(B11Level111111ObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="B10Level11111"/> object. /// </summary> /// <returns>A reference to the created <see cref="B10Level11111"/> object.</returns> internal static B10Level11111 NewB10Level11111() { return DataPortal.CreateChild<B10Level11111>(); } /// <summary> /// Factory method. Loads a <see cref="B10Level11111"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="B10Level11111"/> object.</returns> internal static B10Level11111 GetB10Level11111(SafeDataReader dr) { B10Level11111 obj = new B10Level11111(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.LoadProperty(B11Level111111ObjectsProperty, B11Level111111Coll.NewB11Level111111Coll()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="B10Level11111"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> private B10Level11111() { // Prevent direct creation // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="B10Level11111"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(Level_1_1_1_1_1_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(B11Level111111SingleObjectProperty, DataPortal.CreateChild<B11Level111111Child>()); LoadProperty(B11Level111111ASingleObjectProperty, DataPortal.CreateChild<B11Level111111ReChild>()); LoadProperty(B11Level111111ObjectsProperty, DataPortal.CreateChild<B11Level111111Coll>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="B10Level11111"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Level_1_1_1_1_1_IDProperty, dr.GetInt32("Level_1_1_1_1_1_ID")); LoadProperty(Level_1_1_1_1_1_NameProperty, dr.GetString("Level_1_1_1_1_1_Name")); narentID1 = dr.GetInt32("NarentID1"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Loads child <see cref="B11Level111111Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B11Level111111Child child) { LoadProperty(B11Level111111SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="B11Level111111ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B11Level111111ReChild child) { LoadProperty(B11Level111111ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="B10Level11111"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(B08Level1111 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddB10Level11111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_ID", parent.Level_1_1_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_IDProperty)).Direction = ParameterDirection.Output; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_Name", ReadProperty(Level_1_1_1_1_1_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); LoadProperty(Level_1_1_1_1_1_IDProperty, (int) cmd.Parameters["@Level_1_1_1_1_1_ID"].Value); } FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="B10Level11111"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateB10Level11111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_IDProperty)).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_Name", ReadProperty(Level_1_1_1_1_1_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); } FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="B10Level11111"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { // flushes all pending data operations FieldManager.UpdateChildren(this); using (var cmd = new SqlCommand("DeleteB10Level11111", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_1_1_1_ID", ReadProperty(Level_1_1_1_1_1_IDProperty)).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } // removes all previous references to children LoadProperty(B11Level111111SingleObjectProperty, DataPortal.CreateChild<B11Level111111Child>()); LoadProperty(B11Level111111ASingleObjectProperty, DataPortal.CreateChild<B11Level111111ReChild>()); LoadProperty(B11Level111111ObjectsProperty, DataPortal.CreateChild<B11Level111111Coll>()); } #endregion #region Pseudo Events /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// *********************************************************************** // Copyright (c) 2012-2015 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Text.RegularExpressions; using System.Xml; #if NETSTANDARD1_4 using System.Xml.Linq; #endif namespace NUnit.Framework.Interfaces { /// <summary> /// TNode represents a single node in the XML representation /// of a Test or TestResult. It replaces System.Xml.XmlNode and /// System.Xml.Linq.XElement, providing a minimal set of methods /// for operating on the XML in a platform-independent manner. /// </summary> public class TNode { #region Constructors /// <summary> /// Constructs a new instance of TNode /// </summary> /// <param name="name">The name of the node</param> public TNode(string name) { Name = name; Attributes = new AttributeDictionary(); ChildNodes = new NodeList(); } /// <summary> /// Constructs a new instance of TNode with a value /// </summary> /// <param name="name">The name of the node</param> /// <param name="value">The text content of the node</param> public TNode(string name, string value) : this(name, value, false) { } /// <summary> /// Constructs a new instance of TNode with a value /// </summary> /// <param name="name">The name of the node</param> /// <param name="value">The text content of the node</param> /// <param name="valueIsCDATA">Flag indicating whether to use CDATA when writing the text</param> public TNode(string name, string value, bool valueIsCDATA) : this(name) { Value = value; ValueIsCDATA = valueIsCDATA; } #endregion #region Properties /// <summary> /// Gets the name of the node /// </summary> public string Name { get; } /// <summary> /// Gets the value of the node /// </summary> public string Value { get; set; } /// <summary> /// Gets a flag indicating whether the value should be output using CDATA. /// </summary> public bool ValueIsCDATA { get; } /// <summary> /// Gets the dictionary of attributes /// </summary> public AttributeDictionary Attributes { get; } /// <summary> /// Gets a list of child nodes /// </summary> public NodeList ChildNodes { get; } /// <summary> /// Gets the first ChildNode /// </summary> public TNode FirstChild { get { return ChildNodes.Count == 0 ? null : ChildNodes[0]; } } /// <summary> /// Gets the XML representation of this node. /// </summary> public string OuterXml { get { var stringWriter = new System.IO.StringWriter(); var settings = new XmlWriterSettings(); settings.ConformanceLevel = ConformanceLevel.Fragment; using (XmlWriter xmlWriter = XmlWriter.Create(stringWriter, settings)) { WriteTo(xmlWriter); } return stringWriter.ToString(); } } #endregion #region Static Methods /// <summary> /// Create a TNode from its XML text representation /// </summary> /// <param name="xmlText">The XML text to be parsed</param> /// <returns>A TNode</returns> public static TNode FromXml(string xmlText) { #if NETSTANDARD1_4 return FromXml(XElement.Parse(xmlText)); #else var doc = new XmlDocument(); doc.LoadXml(xmlText); return FromXml(doc.FirstChild); #endif } #endregion #region Instance Methods /// <summary> /// Adds a new element as a child of the current node and returns it. /// </summary> /// <param name="name">The element name.</param> /// <returns>The newly created child element</returns> public TNode AddElement(string name) { TNode childResult = new TNode(name); ChildNodes.Add(childResult); return childResult; } /// <summary> /// Adds a new element with a value as a child of the current node and returns it. /// </summary> /// <param name="name">The element name</param> /// <param name="value">The text content of the new element</param> /// <returns>The newly created child element</returns> public TNode AddElement(string name, string value) { TNode childResult = new TNode(name, EscapeInvalidXmlCharacters(value)); ChildNodes.Add(childResult); return childResult; } /// <summary> /// Adds a new element with a value as a child of the current node and returns it. /// The value will be output using a CDATA section. /// </summary> /// <param name="name">The element name</param> /// <param name="value">The text content of the new element</param> /// <returns>The newly created child element</returns> public TNode AddElementWithCDATA(string name, string value) { TNode childResult = new TNode(name, EscapeInvalidXmlCharacters(value), true); ChildNodes.Add(childResult); return childResult; } /// <summary> /// Adds an attribute with a specified name and value to the XmlNode. /// </summary> /// <param name="name">The name of the attribute.</param> /// <param name="value">The value of the attribute.</param> public void AddAttribute(string name, string value) { Attributes.Add(name, EscapeInvalidXmlCharacters(value)); } /// <summary> /// Finds a single descendant of this node matching an XPath /// specification. The format of the specification is /// limited to what is needed by NUnit and its tests. /// </summary> /// <param name="xpath"></param> /// <returns></returns> public TNode SelectSingleNode(string xpath) { NodeList nodes = SelectNodes(xpath); return nodes.Count > 0 ? nodes[0] as TNode : null; } /// <summary> /// Finds all descendants of this node matching an XPath /// specification. The format of the specification is /// limited to what is needed by NUnit and its tests. /// </summary> public NodeList SelectNodes(string xpath) { NodeList nodeList = new NodeList(); nodeList.Add(this); return ApplySelection(nodeList, xpath); } /// <summary> /// Writes the XML representation of the node to an XmlWriter /// </summary> /// <param name="writer"></param> public void WriteTo(XmlWriter writer) { writer.WriteStartElement(Name); foreach (string name in Attributes.Keys) writer.WriteAttributeString(name, Attributes[name]); if (Value != null) if (ValueIsCDATA) WriteCDataTo(writer); else writer.WriteString(Value); foreach (TNode node in ChildNodes) node.WriteTo(writer); writer.WriteEndElement(); } #endregion #region Helper Methods #if NETSTANDARD1_4 private static TNode FromXml(XElement xElement) { TNode tNode = new TNode(xElement.Name.ToString(), xElement.Value); foreach (var attr in xElement.Attributes()) tNode.AddAttribute(attr.Name.ToString(), attr.Value); foreach (var child in xElement.Elements()) tNode.ChildNodes.Add(FromXml(child)); return tNode; } #else private static TNode FromXml(XmlNode xmlNode) { TNode tNode = new TNode(xmlNode.Name, xmlNode.InnerText); foreach (XmlAttribute attr in xmlNode.Attributes) tNode.AddAttribute(attr.Name, attr.Value); foreach (XmlNode child in xmlNode.ChildNodes) if (child.NodeType == XmlNodeType.Element) tNode.ChildNodes.Add(FromXml(child)); return tNode; } #endif private static NodeList ApplySelection(NodeList nodeList, string xpath) { Guard.ArgumentNotNullOrEmpty(xpath, nameof(xpath)); if (xpath[0] == '/') throw new ArgumentException("XPath expressions starting with '/' are not supported", nameof(xpath)); if (xpath.IndexOf("//") >= 0) throw new ArgumentException("XPath expressions with '//' are not supported", nameof(xpath)); string head = xpath; string tail = null; int slash = xpath.IndexOf('/'); if (slash >= 0) { head = xpath.Substring(0, slash); tail = xpath.Substring(slash + 1); } NodeList resultNodes = new NodeList(); NodeFilter filter = new NodeFilter(head); foreach(TNode node in nodeList) foreach (TNode childNode in node.ChildNodes) if (filter.Pass(childNode)) resultNodes.Add(childNode); return tail != null ? ApplySelection(resultNodes, tail) : resultNodes; } private static readonly Regex InvalidXmlCharactersRegex = new Regex("[^\u0009\u000a\u000d\u0020-\ufffd]|([\ud800-\udbff](?![\udc00-\udfff]))|((?<![\ud800-\udbff])[\udc00-\udfff])", RegexOptions.Compiled); private static string EscapeInvalidXmlCharacters(string str) { if (str == null) return null; // Based on the XML spec http://www.w3.org/TR/xml/#charsets // For detailed explanation of the regex see http://mnaoumov.wordpress.com/2014/06/15/escaping-invalid-xml-unicode-characters/ return InvalidXmlCharactersRegex.Replace(str, match => CharToUnicodeSequence(match.Value[0])); } private static string CharToUnicodeSequence(char symbol) { return string.Format("\\u{0}", ((int)symbol).ToString("x4")); } private void WriteCDataTo(XmlWriter writer) { int start = 0; string text = Value; while (true) { int illegal = text.IndexOf("]]>", start); if (illegal < 0) break; writer.WriteCData(text.Substring(start, illegal - start + 2)); start = illegal + 2; if (start >= text.Length) return; } if (start > 0) writer.WriteCData(text.Substring(start)); else writer.WriteCData(text); } #endregion #region Nested NodeFilter class class NodeFilter { private readonly string _nodeName; private readonly string _propName; private readonly string _propValue; public NodeFilter(string xpath) { _nodeName = xpath; int lbrack = xpath.IndexOf('['); if (lbrack >= 0) { if (!xpath.EndsWith("]")) throw new ArgumentException("Invalid property expression", nameof(xpath)); _nodeName = xpath.Substring(0, lbrack); string filter = xpath.Substring(lbrack+1, xpath.Length - lbrack - 2); int equals = filter.IndexOf('='); if (equals < 0 || filter[0] != '@') throw new ArgumentException("Invalid property expression", nameof(xpath)); _propName = filter.Substring(1, equals - 1).Trim(); _propValue = filter.Substring(equals + 1).Trim(new char[] { ' ', '"', '\'' }); } } public bool Pass(TNode node) { if (node.Name != _nodeName) return false; if (_propName == null) return true; return node.Attributes[_propName] == _propValue; } } #endregion } /// <summary> /// Class used to represent a list of XmlResults /// </summary> public class NodeList : System.Collections.Generic.List<TNode> { } /// <summary> /// Class used to represent the attributes of a node /// </summary> public class AttributeDictionary : System.Collections.Generic.Dictionary<string, string> { /// <summary> /// Gets or sets the value associated with the specified key. /// Overridden to return null if attribute is not found. /// </summary> /// <param name="key">The key.</param> /// <returns>Value of the attribute or null</returns> public new string this[string key] { get { string value; if (TryGetValue(key, out value)) return value; return null; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Orleans.Runtime; namespace Orleans.Streams { [Serializable] internal class PubSubGrainState { public HashSet<PubSubPublisherState> Producers { get; set; } = new HashSet<PubSubPublisherState>(); public HashSet<PubSubSubscriptionState> Consumers { get; set; } = new HashSet<PubSubSubscriptionState>(); } [Providers.StorageProvider(ProviderName = "PubSubStore")] internal class PubSubRendezvousGrain : Grain<PubSubGrainState>, IPubSubRendezvousGrain { private Logger logger; private const bool DEBUG_PUB_SUB = false; private static readonly CounterStatistic counterProducersAdded; private static readonly CounterStatistic counterProducersRemoved; private static readonly CounterStatistic counterProducersTotal; private static readonly CounterStatistic counterConsumersAdded; private static readonly CounterStatistic counterConsumersRemoved; private static readonly CounterStatistic counterConsumersTotal; static PubSubRendezvousGrain() { counterProducersAdded = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_PRODUCERS_ADDED); counterProducersRemoved = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_PRODUCERS_REMOVED); counterProducersTotal = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_PRODUCERS_TOTAL); counterConsumersAdded = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_CONSUMERS_ADDED); counterConsumersRemoved = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_CONSUMERS_REMOVED); counterConsumersTotal = CounterStatistic.FindOrCreate(StatisticNames.STREAMS_PUBSUB_CONSUMERS_TOTAL); } public override async Task OnActivateAsync() { logger = GetLogger(GetType().Name + "-" + RuntimeIdentity + "-" + IdentityString); LogPubSubCounts("OnActivateAsync"); int numRemoved = RemoveDeadProducers(); if (numRemoved > 0) { if (State.Producers.Count > 0 || State.Consumers.Count > 0) await WriteStateAsync(); else await ClearStateAsync(); //State contains no producers or consumers, remove it from storage } logger.Verbose("OnActivateAsync-Done"); } public override Task OnDeactivateAsync() { LogPubSubCounts("OnDeactivateAsync"); return TaskDone.Done; } private int RemoveDeadProducers() { // Remove only those we know for sure are Dead. int numRemoved = 0; if (State.Producers != null && State.Producers.Count > 0) numRemoved = State.Producers.RemoveWhere(producerState => IsDeadProducer(producerState.Producer)); if (numRemoved > 0) { LogPubSubCounts("RemoveDeadProducers: removed {0} outdated producers", numRemoved); } return numRemoved; } /// accept and notify only Active producers. private static bool IsActiveProducer(IStreamProducerExtension producer) { var grainRef = producer as GrainReference; if (grainRef !=null && grainRef.GrainId.IsSystemTarget && grainRef.IsInitializedSystemTarget) return RuntimeClient.Current.GetSiloStatus(grainRef.SystemTargetSilo) == SiloStatus.Active; return true; } private static bool IsDeadProducer(IStreamProducerExtension producer) { var grainRef = producer as GrainReference; if (grainRef != null && grainRef.GrainId.IsSystemTarget && grainRef.IsInitializedSystemTarget) return RuntimeClient.Current.GetSiloStatus(grainRef.SystemTargetSilo) == SiloStatus.Dead; return false; } public async Task<ISet<PubSubSubscriptionState>> RegisterProducer(StreamId streamId, IStreamProducerExtension streamProducer) { counterProducersAdded.Increment(); if (!IsActiveProducer(streamProducer)) throw new ArgumentException($"Trying to register non active IStreamProducerExtension: {streamProducer}", "streamProducer"); try { int producersRemoved = RemoveDeadProducers(); var publisherState = new PubSubPublisherState(streamId, streamProducer); State.Producers.Add(publisherState); LogPubSubCounts("RegisterProducer {0}", streamProducer); await WriteStateAsync(); counterProducersTotal.DecrementBy(producersRemoved); counterProducersTotal.Increment(); } catch (Exception exc) { logger.Error(ErrorCode.Stream_RegisterProducerFailed, $"Failed to register a stream producer. Stream: {streamId}, Producer: {streamProducer}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } return State.Consumers.Where(c => !c.IsFaulted).ToSet(); } public async Task UnregisterProducer(StreamId streamId, IStreamProducerExtension streamProducer) { counterProducersRemoved.Increment(); try { int numRemoved = State.Producers.RemoveWhere(s => s.Equals(streamId, streamProducer)); LogPubSubCounts("UnregisterProducer {0} NumRemoved={1}", streamProducer, numRemoved); if (numRemoved > 0) { Task updateStorageTask = State.Producers.Count == 0 && State.Consumers.Count == 0 ? ClearStateAsync() //State contains no producers or consumers, remove it from storage : WriteStateAsync(); await updateStorageTask; } counterProducersTotal.DecrementBy(numRemoved); } catch (Exception exc) { logger.Error(ErrorCode.Stream_UnegisterProducerFailed, $"Failed to unregister a stream producer. Stream: {streamId}, Producer: {streamProducer}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } if (State.Producers.Count == 0 && State.Consumers.Count == 0) { DeactivateOnIdle(); // No producers or consumers left now, so flag ourselves to expedite Deactivation } } public async Task RegisterConsumer( GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, IStreamFilterPredicateWrapper filter) { counterConsumersAdded.Increment(); PubSubSubscriptionState pubSubState = State.Consumers.FirstOrDefault(s => s.Equals(subscriptionId)); if (pubSubState != null && pubSubState.IsFaulted) throw new FaultedSubscriptionException(subscriptionId, streamId); try { if (pubSubState == null) { pubSubState = new PubSubSubscriptionState(subscriptionId, streamId, streamConsumer); State.Consumers.Add(pubSubState); } if (filter != null) pubSubState.AddFilter(filter); LogPubSubCounts("RegisterConsumer {0}", streamConsumer); await WriteStateAsync(); counterConsumersTotal.Increment(); } catch (Exception exc) { logger.Error(ErrorCode.Stream_RegisterConsumerFailed, $"Failed to register a stream consumer. Stream: {streamId}, SubscriptionId {subscriptionId}, Consumer: {streamConsumer}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } int numProducers = State.Producers.Count; if (numProducers <= 0) return; if (logger.IsVerbose) logger.Info("Notifying {0} existing producer(s) about new consumer {1}. Producers={2}", numProducers, streamConsumer, Utils.EnumerableToString(State.Producers)); // Notify producers about a new streamConsumer. var tasks = new List<Task>(); var producers = State.Producers.ToList(); int initialProducerCount = producers.Count; try { foreach (var producerState in producers) { PubSubPublisherState producer = producerState; // Capture loop variable if (!IsActiveProducer(producer.Producer)) { // Producer is not active (could be stopping / shutting down) so skip if (logger.IsVerbose) logger.Verbose("Producer {0} on stream {1} is not active - skipping.", producer, streamId); continue; } tasks.Add(NotifyProducer(producer, subscriptionId, streamId, streamConsumer, filter)); } Exception exception = null; try { await Task.WhenAll(tasks); } catch (Exception exc) { exception = exc; } // if the number of producers has been changed, resave state. if (State.Producers.Count != initialProducerCount) { await WriteStateAsync(); counterConsumersTotal.DecrementBy(initialProducerCount - State.Producers.Count); } if (exception != null) { throw exception; } } catch (Exception exc) { logger.Error(ErrorCode.Stream_RegisterConsumerFailed, $"Failed to update producers while register a stream consumer. Stream: {streamId}, SubscriptionId {subscriptionId}, Consumer: {streamConsumer}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } } private async Task NotifyProducer(PubSubPublisherState producer, GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, IStreamFilterPredicateWrapper filter) { try { await producer.Producer.AddSubscriber(subscriptionId, streamId, streamConsumer, filter); } catch (GrainExtensionNotInstalledException) { RemoveProducer(producer); } catch (ClientNotAvailableException) { RemoveProducer(producer); } } private void RemoveProducer(PubSubPublisherState producer) { logger.Warn(ErrorCode.Stream_ProducerIsDead, "Producer {0} on stream {1} is no longer active - permanently removing producer.", producer, producer.Stream); State.Producers.Remove(producer); } public async Task UnregisterConsumer(GuidId subscriptionId, StreamId streamId) { counterConsumersRemoved.Increment(); if (State.Consumers.Any(c => c.IsFaulted && c.Equals(subscriptionId))) throw new FaultedSubscriptionException(subscriptionId, streamId); try { int numRemoved = State.Consumers.RemoveWhere(c => c.Equals(subscriptionId)); LogPubSubCounts("UnregisterSubscription {0} NumRemoved={1}", subscriptionId, numRemoved); if (await TryClearState()) { // If state was cleared expedite Deactivation DeactivateOnIdle(); } else { if (numRemoved != 0) { await WriteStateAsync(); } await NotifyProducersOfRemovedSubscription(subscriptionId, streamId); } counterConsumersTotal.DecrementBy(numRemoved); } catch (Exception exc) { logger.Error(ErrorCode.Stream_UnregisterConsumerFailed, $"Failed to unregister a stream consumer. Stream: {streamId}, SubscriptionId {subscriptionId}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } } public Task<int> ProducerCount(StreamId streamId) { return Task.FromResult(State.Producers.Count); } public Task<int> ConsumerCount(StreamId streamId) { return Task.FromResult(GetConsumersForStream(streamId).Length); } public Task<PubSubSubscriptionState[]> DiagGetConsumers(StreamId streamId) { return Task.FromResult(GetConsumersForStream(streamId)); } private PubSubSubscriptionState[] GetConsumersForStream(StreamId streamId) { return State.Consumers.Where(c => !c.IsFaulted && c.Stream.Equals(streamId)).ToArray(); } private void LogPubSubCounts(string fmt, params object[] args) { if (logger.IsVerbose || DEBUG_PUB_SUB) { int numProducers = 0; int numConsumers = 0; if (State?.Producers != null) numProducers = State.Producers.Count; if (State?.Consumers != null) numConsumers = State.Consumers.Count; string when = args != null && args.Length != 0 ? String.Format(fmt, args) : fmt; logger.Info("{0}. Now have total of {1} producers and {2} consumers. All Consumers = {3}, All Producers = {4}", when, numProducers, numConsumers, Utils.EnumerableToString(State.Consumers), Utils.EnumerableToString(State.Producers)); } } // Check that what we have cached locally matches what is in the persistent table. public async Task Validate() { var captureProducers = State.Producers; var captureConsumers = State.Consumers; await ReadStateAsync(); if (captureProducers.Count != State.Producers.Count) { throw new OrleansException( $"State mismatch between PubSubRendezvousGrain and its persistent state. captureProducers.Count={captureProducers.Count}, State.Producers.Count={State.Producers.Count}"); } if (captureProducers.Any(producer => !State.Producers.Contains(producer))) { throw new OrleansException( $"State mismatch between PubSubRendezvousGrain and its persistent state. captureProducers={Utils.EnumerableToString(captureProducers)}, State.Producers={Utils.EnumerableToString(State.Producers)}"); } if (captureConsumers.Count != State.Consumers.Count) { LogPubSubCounts("Validate: Consumer count mismatch"); throw new OrleansException( $"State mismatch between PubSubRendezvousGrain and its persistent state. captureConsumers.Count={captureConsumers.Count}, State.Consumers.Count={State.Consumers.Count}"); } if (captureConsumers.Any(consumer => !State.Consumers.Contains(consumer))) { throw new OrleansException( $"State mismatch between PubSubRendezvousGrain and its persistent state. captureConsumers={Utils.EnumerableToString(captureConsumers)}, State.Consumers={Utils.EnumerableToString(State.Consumers)}"); } } public Task<List<GuidId>> GetAllSubscriptions(StreamId streamId, IStreamConsumerExtension streamConsumer) { List<GuidId> subscriptionIds = State.Consumers.Where(c => !c.IsFaulted && c.Consumer.Equals(streamConsumer)) .Select(c => c.SubscriptionId) .ToList(); return Task.FromResult(subscriptionIds); } public async Task FaultSubscription(GuidId subscriptionId) { PubSubSubscriptionState pubSubState = State.Consumers.FirstOrDefault(s => s.Equals(subscriptionId)); if (pubSubState == null) { return; } try { pubSubState.Fault(); if (logger.IsVerbose) logger.Verbose("Setting subscription {0} to a faulted state.", subscriptionId.Guid); await WriteStateAsync(); await NotifyProducersOfRemovedSubscription(pubSubState.SubscriptionId, pubSubState.Stream); } catch (Exception exc) { logger.Error(ErrorCode.Stream_SetSubscriptionToFaultedFailed, $"Failed to set subscription state to faulted. SubscriptionId {subscriptionId}", exc); // Corrupted state, deactivate grain. DeactivateOnIdle(); throw; } } private async Task NotifyProducersOfRemovedSubscription(GuidId subscriptionId, StreamId streamId) { int numProducers = State.Producers.Count; if (numProducers > 0) { if (logger.IsVerbose) logger.Verbose("Notifying {0} existing producers about unregistered consumer.", numProducers); // Notify producers about unregistered consumer. List<Task> tasks = State.Producers.Where(producerState => IsActiveProducer(producerState.Producer)) .Select(producerState => producerState.Producer.RemoveSubscriber(subscriptionId, streamId)) .ToList(); await Task.WhenAll(tasks); } } /// <summary> /// Try clear state will only clear the state if there are no producers or consumers. /// </summary> /// <returns></returns> private async Task<bool> TryClearState() { if (State.Producers.Count == 0 && State.Consumers.Count == 0) // + we already know that numProducers == 0 from previous if-clause { await ClearStateAsync(); //State contains no producers or consumers, remove it from storage return true; } return false; } } }
// // Encog(tm) Core v3.3 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System; using System.Collections.Generic; using System.Text; using Encog.Util; using Encog.Util.Logging; namespace Encog.ML.Factory.Parse { /// <summary> /// This class is used to parse a Encog architecture string. /// </summary> /// public static class ArchitectureParse { /// <summary> /// parse a layer. /// </summary> /// /// <param name="line">The line to parse.</param> /// <param name="defaultValue">The default value.</param> /// <returns>The parsed ArchitectureLayer.</returns> public static ArchitectureLayer ParseLayer(String line, int defaultValue) { var layer = new ArchitectureLayer(); String check = line.Trim().ToUpper(); // first check for bias if (check.EndsWith(":B")) { check = check.Substring(0, (check.Length - 2) - (0)); layer.Bias = true; } // see if simple number try { layer.Count = Int32.Parse(check); if (layer.Count < 0) { throw new EncogError("Count cannot be less than zero."); } } catch (FormatException f) { EncogLogging.Log(f); } // see if it is a default if ("?".Equals(check)) { if (defaultValue < 0) { throw new EncogError("Default (?) in an invalid location."); } layer.Count = defaultValue; layer.UsedDefault = true; return layer; } // single item, no function int startIndex = check.IndexOf('('); int endIndex = check.LastIndexOf(')'); if (startIndex == -1) { layer.Name = check; return layer; } // function if (endIndex == -1) { throw new EncogError("Illegal parentheses."); } layer.Name = check.Substring(0, (startIndex) - (0)).Trim(); String paramStr = check.Substring(startIndex + 1, (endIndex) - (startIndex + 1)); IDictionary<String, String> paras = ParseParams(paramStr); EngineArray.PutAll(paras, layer.Params); return layer; } /// <summary> /// Parse all layers from a line of text. /// </summary> /// /// <param name="line">The line of text.</param> /// <returns>A list of the parsed layers.</returns> public static IList<String> ParseLayers(String line) { IList<String> result = new List<String>(); int bs = 0; bool done = false; do { String part; int index = line.IndexOf("->", bs); if (index != -1) { part = line.Substring(bs, (index) - (bs)).Trim(); bs = index + 2; } else { part = line.Substring(bs).Trim(); done = true; } bool bias = part.EndsWith("b"); if (bias) { part = part.Substring(0, (part.Length - 1) - (0)); } result.Add(part); } while (!done); return result; } /// <summary> /// Parse a name. /// </summary> /// /// <param name="parser">The parser to use.</param> /// <returns>The name.</returns> private static String ParseName(SimpleParser parser) { var result = new StringBuilder(); parser.EatWhiteSpace(); while (parser.IsIdentifier()) { result.Append(parser.ReadChar()); } return result.ToString(); } /// <summary> /// Parse parameters. /// </summary> /// /// <param name="line">The line to parse.</param> /// <returns>The parsed values.</returns> public static IDictionary<String, String> ParseParams(String line) { IDictionary<String, String> result = new Dictionary<String, String>(); var parser = new SimpleParser(line); while (!parser.EOL()) { String name = ParseName(parser) .ToUpper(); parser.EatWhiteSpace(); if (!parser.LookAhead("=", false)) { throw new EncogError("Missing equals(=) operator."); } parser.Advance(); String v = ParseValue(parser); result[name.ToUpper()] = v; if (!parser.ParseThroughComma()) { break; } } return result; } /// <summary> /// Parse a value. /// </summary> /// /// <param name="parser">The parser to use.</param> /// <returns>The newly parsed value.</returns> private static String ParseValue(SimpleParser parser) { bool quoted = false; var str = new StringBuilder(); parser.EatWhiteSpace(); if (parser.Peek() == '\"') { quoted = true; parser.Advance(); } while (!parser.EOL()) { if (parser.Peek() == '\"') { if (quoted) { parser.Advance(); if (parser.Peek() == '\"') { str.Append(parser.ReadChar()); } else { break; } } else { str.Append(parser.ReadChar()); } } else if (!quoted && (parser.IsWhiteSpace() || (parser.Peek() == ','))) { break; } else { str.Append(parser.ReadChar()); } } return str.ToString(); } } }
#region BSD License /* Copyright (c) 2011, Clarius Consulting All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Clarius Consulting nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #endregion namespace Clide.Diagnostics { using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Configuration; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using System.Threading; using System.Threading.Tasks; using System.Xml.Linq; using System.Xml.XPath; /// <summary> /// Implements the common tracer interface using <see cref="TraceSource"/> instances. /// </summary> /// <remarks> /// All tracing is performed asynchronously transparently for faster speed. /// </remarks> /// <nuget id="Tracer.SystemDiagnostics" /> partial class TracerManager : ITracerManager, IDisposable { /// <summary> /// Implicit default trace source name which can be used to setup /// global tracing and listeners. /// </summary> public const string DefaultSourceName = "*"; // To handle concurrency for the async tracing. private BlockingCollection<Tuple<ExecutionContext, Action>> traceQueue = new BlockingCollection<Tuple<ExecutionContext, Action>>(); private CancellationTokenSource cancellation = new CancellationTokenSource(); /// <summary> /// Initializes a new instance of the <see cref="TracerManager"/> class. /// </summary> public TracerManager() { // Note we have only one async task to perform all tracing. This // is an optimization, so that we don't consume too much resources // from the running app for this. Task.Factory.StartNew(DoTrace, cancellation.Token, TaskCreationOptions.LongRunning, TaskScheduler.Current); InitializeConfiguredSources(); } private void InitializeConfiguredSources() { var configFile = AppDomain.CurrentDomain.SetupInformation.ConfigurationFile; if (!File.Exists(configFile)) return; var sourceNames = from diagnostics in XDocument.Load(configFile).Root.Elements("system.diagnostics") from sources in diagnostics.Elements("sources") from source in sources.Elements("source") select source.Attribute("name").Value; foreach (var sourceName in sourceNames) { // Cause eager initialization, which is needed for the trace source configuration // to be properly read. GetSource(sourceName); } } /// <summary> /// Gets a tracer instance with the specified name. /// </summary> public ITracer Get(string name) { return new AggregateTracer(this, name, CompositeFor(name) .Select(tracerName => new DiagnosticsTracer( this.GetOrAdd(tracerName, sourceName => CreateSource(sourceName))))); } /// <summary> /// Gets the underlying <see cref="TraceSource"/> for the given name. /// </summary> public TraceSource GetSource(string name) { return this.GetOrAdd(name, sourceName => CreateSource(sourceName)); } /// <summary> /// Adds a listener to the source with the given <paramref name="sourceName"/>. /// </summary> public void AddListener(string sourceName, TraceListener listener) { this.GetOrAdd(sourceName, name => CreateSource(name)).Listeners.Add(listener); } /// <summary> /// Removes a listener from the source with the given <paramref name="sourceName"/>. /// </summary> public void RemoveListener(string sourceName, TraceListener listener) { this.GetOrAdd(sourceName, name => CreateSource(name)).Listeners.Remove(listener); } /// <summary> /// Removes a listener from the source with the given <paramref name="sourceName"/>. /// </summary> public void RemoveListener(string sourceName, string listenerName) { this.GetOrAdd(sourceName, name => CreateSource(name)).Listeners.Remove(listenerName); } /// <summary> /// Sets the tracing level for the source with the given <paramref name="sourceName"/> /// </summary> public void SetTracingLevel(string sourceName, SourceLevels level) { this.GetOrAdd(sourceName, name => CreateSource(name)).Switch.Level = level; } /// <summary> /// Cleans up the manager, cancelling any pending tracing /// messages. /// </summary> public void Dispose() { cancellation.Cancel(); traceQueue.Dispose(); } /// <summary> /// Enqueues the specified trace action to be executed by the trace /// async task. /// </summary> internal void Enqueue(Action traceAction) { traceQueue.Add(Tuple.Create(ExecutionContext.Capture(), traceAction)); } private TraceSource CreateSource(string name) { var source = new TraceSource(name); source.TraceInformation("Initialized with initial level {0}", source.Switch.Level); return source; } private void DoTrace() { foreach (var action in traceQueue.GetConsumingEnumerable()) { if (cancellation.IsCancellationRequested) break; // Tracing should never cause the app to fail. // Since this is async, it might if we don't catch. try { ExecutionContext.Run(action.Item1, state => action.Item2(), null); } catch { } } } /// <summary> /// Gets the list of trace source names that are used to inherit trace source logging for the given <paramref name="name"/>. /// </summary> private static IEnumerable<string> CompositeFor(string name) { if (name != DefaultSourceName) yield return DefaultSourceName; var indexOfGeneric = name.IndexOf('<'); var indexOfLastDot = name.LastIndexOf('.'); if (indexOfGeneric == -1 && indexOfLastDot == -1) { yield return name; yield break; } var parts = default(string[]); if (indexOfGeneric == -1) parts = name .Substring(0, name.LastIndexOf('.')) .Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries); else parts = name .Substring(0, indexOfGeneric) .Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries); for (int i = 1; i <= parts.Length; i++) { yield return string.Join(".", parts, 0, i); } yield return name; } /// <summary> /// Gets an AppDomain-cached trace source of the given name, or creates it. /// This means that even if multiple libraries are using their own /// trace manager instance, they will all still share the same /// underlying sources. /// </summary> private TraceSource GetOrAdd(string sourceName, Func<string, TraceSource> factory) { var cachedSources = AppDomain.CurrentDomain.GetData<ConcurrentDictionary<string, TraceSource>>(); if (cachedSources == null) { // This lock guarantees that throughout the current // app domain, only a single root trace source is // created ever. lock (AppDomain.CurrentDomain) { cachedSources = AppDomain.CurrentDomain.GetData<ConcurrentDictionary<string, TraceSource>>(); if (cachedSources == null) { cachedSources = new ConcurrentDictionary<string, TraceSource>(); AppDomain.CurrentDomain.SetData(cachedSources); } } } return cachedSources.GetOrAdd(sourceName, factory); } /// <summary> /// Logs to multiple tracers simulateously. Used for the /// source "inheritance" /// </summary> private class AggregateTracer : ITracer { private TracerManager manager; private List<DiagnosticsTracer> tracers; private string name; public AggregateTracer(TracerManager manager, string name, IEnumerable<DiagnosticsTracer> tracers) { this.manager = manager; this.name = name; this.tracers = tracers.ToList(); } /// <summary> /// Traces the specified message with the given <see cref="TraceEventType"/>. /// </summary> public void Trace(TraceEventType type, object message) { manager.Enqueue(() => tracers.AsParallel().ForAll(tracer => tracer.Trace(name, type, message))); } /// <summary> /// Traces the specified formatted message with the given <see cref="TraceEventType"/>. /// </summary> public void Trace(TraceEventType type, string format, params object[] args) { manager.Enqueue(() => tracers.AsParallel().ForAll(tracer => tracer.Trace(name, type, format, args))); } /// <summary> /// Traces an exception with the specified message and <see cref="TraceEventType"/>. /// </summary> public void Trace(TraceEventType type, Exception exception, object message) { manager.Enqueue(() => tracers.AsParallel().ForAll(tracer => tracer.Trace(name, type, exception, message))); } /// <summary> /// Traces an exception with the specified formatted message and <see cref="TraceEventType"/>. /// </summary> public void Trace(TraceEventType type, Exception exception, string format, params object[] args) { manager.Enqueue(() => tracers.AsParallel().ForAll(tracer => tracer.Trace(name, type, exception, format, args))); } public override string ToString() { return "Aggregate for " + this.name; } } partial class DiagnosticsTracer { private TraceSource source; public DiagnosticsTracer(TraceSource source) { this.source = source; } public void Trace(string sourceName, TraceEventType type, object message) { // Because we know there is a single tracer thread executing these, // we know it's safe to replace the name without locking. using (new SourceNameReplacer(source, sourceName)) { // Add support for Xml-based Service Trace Viewer-compatible // activity tracing. var data = message as XPathNavigator; // Transfers with a Guid payload should instead trace a transfer // with that as the related Guid. var guid = message as Guid?; if (data != null) source.TraceData(type, 0, data); else if (guid != null && type == TraceEventType.Transfer) source.TraceTransfer(0, "", guid.Value); else source.TraceEvent(type, 0, message.ToString()); } } public void Trace(string sourceName, TraceEventType type, string format, params object[] args) { // Because we know there is a single tracer thread executing these, // we know it's safe to replace the name without locking. using (new SourceNameReplacer(source, sourceName)) { source.TraceEvent(type, 0, format, args); } } public void Trace(string sourceName, TraceEventType type, Exception exception, object message) { // Because we know there is a single tracer thread executing these, // we know it's safe to replace the name without locking. using (new SourceNameReplacer(source, sourceName)) { source.TraceEvent(type, 0, message.ToString() + Environment.NewLine + exception); } } public void Trace(string sourceName, TraceEventType type, Exception exception, string format, params object[] args) { // Because we know there is a single tracer thread executing these, // we know it's safe to replace the name without locking. using (new SourceNameReplacer(source, sourceName)) { source.TraceEvent(type, 0, string.Format(format, args) + Environment.NewLine + exception); } } /// <summary> /// The TraceSource instance name matches the name of each of the "segments" /// we built the aggregate source from. This means that when we trace, we issue /// multiple trace statements, one for each. If a listener is added to (say) "*" /// source name, all traces done through it will appear as coming from the source /// "*", rather than (say) "Foo.Bar" which might be the actual source class. /// This diminishes the usefulness of hierarchical loggers significantly, since /// it now means that you need to add listeners too all trace sources you're /// interested in receiving messages from, and all its "children" potentially, /// some of them which might not have been created even yet. This is not feasible. /// Instead, since we issue the trace call to each trace source (which is what /// enables the configurability of all those sources in the app.config file), /// we need to fix the source name right before tracing, so that a configured /// listener at "*" still receives as the source name the original (aggregate) one, /// and not "*". This requires some private reflection, and a lock to guarantee /// proper logging, but this decreases its performance. However, since we log /// asynchronously, it's fine. /// </summary> private class SourceNameReplacer : IDisposable { // Private reflection needed here in order to make the inherited source names still // log as if the original source name was the one logging, so as not to lose the // originating class name. private static readonly FieldInfo sourceNameField = typeof(TraceSource).GetField("sourceName", BindingFlags.Instance | BindingFlags.NonPublic); private TraceSource source; private string originalName; public SourceNameReplacer(TraceSource source, string sourceName) { this.source = source; this.originalName = source.Name; // Transient change of the source name while the trace call // is issued. Multi-threading might still cause messages to come // out with wrong source names :( sourceNameField.SetValue(source, sourceName); } public void Dispose() { sourceNameField.SetValue(source, originalName); } } } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Drawing; using System.Drawing.Imaging; using System.IO; using System.Linq; using System.Runtime.InteropServices; using DotSpatial.Data; using DotSpatial.NTSExtension; namespace DotSpatial.Symbology { /// <summary> /// DesktopRasterExt contains extension methods for rasters. /// </summary> public static class DesktopRasterExt { #region Methods /// <summary> /// Create Hillshade of values ranging from 0 to 1, or -1 for no-data regions. /// This should be a little faster since we are accessing the Data field directly instead of working /// through a value parameter. /// </summary> /// <param name="raster">The raster to create the hillshade from.</param> /// <param name="shadedRelief">An implementation of IShadedRelief describing how the hillshade should be created.</param> /// <param name="progressHandler">An implementation of IProgressHandler for progress messages</param> /// <returns>The resulting hill shade array.</returns> public static float[][] CreateHillShade(this IRaster raster, IShadedRelief shadedRelief, IProgressHandler progressHandler = null) { if (progressHandler == null) progressHandler = raster.ProgressHandler; var pm = new ProgressMeter(progressHandler, SymbologyMessageStrings.DesktopRasterExt_CreatingShadedRelief, raster.NumRows); Func<int, int, double> getValue; if (raster.DataType == typeof(int)) { var r = raster.ToRaster<int>(); getValue = (row, col) => r.Data[row][col]; } else if (raster.DataType == typeof(float)) { var r = raster.ToRaster<float>(); getValue = (row, col) => r.Data[row][col]; } else if (raster.DataType == typeof(short)) { var r = raster.ToRaster<short>(); getValue = (row, col) => r.Data[row][col]; } else if (raster.DataType == typeof(byte)) { var r = raster.ToRaster<byte>(); getValue = (row, col) => r.Data[row][col]; } else if (raster.DataType == typeof(double)) { var r = raster.ToRaster<double>(); getValue = (row, col) => r.Data[row][col]; } else { getValue = (row, col) => raster.Value[row, col]; } return CreateHillShadeT(raster, getValue, shadedRelief, pm); } /// <summary> /// Create Hillshade of values ranging from 0 to 1, or -1 for no-data regions. /// This should be a little faster since we are accessing the Data field directly instead of working /// through a value parameter. /// </summary> /// <typeparam name="T">Type of the raster.</typeparam> /// <param name="raster">The raster to create the hillshade from.</param> /// <param name="shadedRelief">An implementation of IShadedRelief describing how the hillshade should be created.</param> /// <param name="progressMeter">An implementation of IProgressHandler for progress messages</param> /// <returns>The resulting hill shade array.</returns> public static float[][] CreateHillShadeT<T>(this Raster<T> raster, IShadedRelief shadedRelief, ProgressMeter progressMeter) where T : IEquatable<T>, IComparable<T> { return CreateHillShadeT(raster, (row, col) => raster.Data[row][col], shadedRelief, progressMeter); } /// <summary> /// Creates a bitmap from this raster using the specified rasterSymbolizer. /// </summary> /// <param name="raster">The raster to draw to a bitmap.</param> /// <param name="rasterSymbolizer">The raster symbolizer to use for assigning colors.</param> /// <param name="bitmap">This must be an Format32bbpArgb bitmap that has already been saved to a file so that it exists.</param> /// <param name="progressHandler">The progress handler to use.</param> /// <exception cref="ArgumentNullException">rasterSymbolizer cannot be null</exception> public static void DrawToBitmap(this IRaster raster, IRasterSymbolizer rasterSymbolizer, Bitmap bitmap, IProgressHandler progressHandler = null) { if (raster == null) throw new ArgumentNullException(nameof(raster)); if (rasterSymbolizer == null) throw new ArgumentNullException(nameof(rasterSymbolizer)); if (bitmap == null) throw new ArgumentNullException(nameof(bitmap)); if (rasterSymbolizer.Scheme.Categories == null || rasterSymbolizer.Scheme.Categories.Count == 0) return; BitmapData bmpData; var rect = new Rectangle(0, 0, raster.NumColumns, raster.NumRows); try { bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } catch (Exception) { // if they have not saved the bitmap yet, it can cause an exception var ms = new MemoryStream(); bitmap.Save(ms, ImageFormat.Bmp); ms.Position = 0; bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } var numRows = raster.NumRows; var numColumns = raster.NumColumns; // Prepare progress meter if (progressHandler == null) progressHandler = raster.ProgressHandler; var pm = new ProgressMeter(progressHandler, "Drawing to Bitmap", numRows); if (numRows * numColumns < 100000) pm.StepPercent = 50; if (numRows * numColumns < 500000) pm.StepPercent = 10; if (numRows * numColumns < 1000000) pm.StepPercent = 5; DrawToBitmap(raster, rasterSymbolizer, bmpData.Scan0, bmpData.Stride, pm); bitmap.UnlockBits(bmpData); rasterSymbolizer.ColorSchemeHasUpdated = true; } /// <summary> /// Creates a bitmap from this raster using the specified rasterSymbolizer. /// </summary> /// <param name="raster">The raster to draw to a bitmap.</param> /// <param name="rasterSymbolizer">The raster symbolizer to use for assigning colors.</param> /// <param name="rgbData">Byte values representing the ARGB image bytes.</param> /// <param name="stride">The stride</param> /// <param name="pm">The progress meter to use.</param> public static void DrawToBitmap(this IRaster raster, IRasterSymbolizer rasterSymbolizer, byte[] rgbData, int stride, ProgressMeter pm) { if (raster.DataType == typeof(int)) { DrawToBitmapT(raster.ToRaster<int>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(float)) { DrawToBitmapT(raster.ToRaster<float>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(short)) { DrawToBitmapT(raster.ToRaster<short>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(byte)) { DrawToBitmapT(raster.ToRaster<byte>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(double)) { DrawToBitmapT(raster.ToRaster<double>(), rasterSymbolizer, rgbData, stride, pm); } else { DrawToBitmapT(raster, raster.NoDataValue, (row, col) => raster.Value[row, col], i => rgbData[i], (i, b) => rgbData[i] = b, rasterSymbolizer, stride, pm); if (rasterSymbolizer.IsSmoothed) { var mySmoother = new Smoother(stride, raster.NumColumns, raster.NumRows, rgbData, pm.ProgressHandler); mySmoother.Smooth(); } } } /// <summary> /// Creates a bitmap from this raster using the specified rasterSymbolizer. /// </summary> /// <param name="raster">The raster to draw to a bitmap</param> /// <typeparam name="T">Type of the raster.</typeparam> /// <param name="rasterSymbolizer">The raster symbolizer to use for assigning colors</param> /// <param name="rgbData">Byte values representing the ARGB image bytes</param> /// <param name="stride">The stride</param> /// <param name="pm">The progress meter to use.</param> /// <exception cref="ArgumentNullException">rasterSymbolizer cannot be null</exception> public static void DrawToBitmapT<T>(Raster<T> raster, IRasterSymbolizer rasterSymbolizer, byte[] rgbData, int stride, ProgressMeter pm) where T : struct, IEquatable<T>, IComparable<T> { DrawToBitmapT(raster, GetNoData(raster), (row, col) => raster.Data[row][col], i => rgbData[i], (i, b) => rgbData[i] = b, rasterSymbolizer, stride, pm); if (rasterSymbolizer.IsSmoothed) { var mySmoother = new Smoother(stride, raster.NumColumns, raster.NumRows, rgbData, pm.ProgressHandler); mySmoother.Smooth(); } } /// <summary> /// This will sample randomly from the raster, preventing duplicates. /// If the sampleSize is larger than this raster, this returns all of the values from the raster. /// If a "Sample" has been prefetched and stored in the Sample array, then this will return that. /// </summary> /// <param name="raster">The raster to obtain the values from.</param> /// <param name="sampleSize">Number of values to get.</param> /// <returns>List of random double values contained in the raster.</returns> public static List<double> GetRandomValues(this IRaster raster, int sampleSize) { if (raster.Sample != null) return raster.Sample.ToList(); int numRows = raster.NumRows; int numCols = raster.NumColumns; List<double> result = new List<double>(); double noData = raster.NoDataValue; if (numRows * numCols < sampleSize) { for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { double val = raster.Value[row, col]; if (val != noData) result.Add(raster.Value[row, col]); } } return result; } Random rnd = new Random(DateTime.Now.Millisecond); if (numRows * (long)numCols < (long)sampleSize * 5 && numRows * (long)numCols < int.MaxValue) { // When the raster is only just barely larger than the sample size, // we want to prevent lots of repeat guesses that fail (hit the same previously sampled values). // We create a copy of all the values and sample from this reservoir while removing sampled values. List<double> resi = new List<double>(); for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { double val = raster.Value[row, col]; if (val != noData) resi.Add(val); } } // int count = numRows * numCols; // this could failed if there's lot of noDataValues long longcount = raster.NumValueCells; int count = numRows * numCols; if (count < int.MaxValue) count = (int)longcount; for (int i = 0; i < sampleSize; i++) { if (resi.Count == 0) break; int indx = rnd.Next(count); result.Add(resi[indx]); resi.RemoveAt(indx); count--; } raster.Sample = result; return result; } // Use a HashSet here, because it has O(1) lookup for preventing duplicates HashSet<long> exclusiveResults = new HashSet<long>(); int remaining = sampleSize; while (remaining > 0) { int row = rnd.Next(numRows); int col = rnd.Next(numCols); long index = (row * numCols) + col; if (exclusiveResults.Contains(index)) continue; exclusiveResults.Add(index); remaining--; } // Sorting is O(n ln(n)), but sorting once is better than using a SortedSet for previous lookups. List<long> sorted = exclusiveResults.ToList(); sorted.Sort(); // Sorted values are much faster to read than reading values in at random, since the file actually // is reading in a whole line at a time. If we can get more than one value from a line, then that // is better than getting one value, discarding the cache and then comming back later for the value // next to it. result = raster.GetValues(sorted); raster.Sample = result; return result; } /// <summary> /// Obtains an set of unique values. If there are more than maxCount values, the process stops and overMaxCount is set to true. /// </summary> /// <param name="raster">the raster to obtain the unique values from.</param> /// <param name="maxCount">An integer specifying the maximum number of values to add to the list of unique values</param> /// <param name="overMaxCount">A boolean that will be true if the process was halted prematurely.</param> /// <returns>A set of doubles representing the independant values.</returns> public static ISet<double> GetUniqueValues(this IRaster raster, int maxCount, out bool overMaxCount) { overMaxCount = false; var result = new HashSet<double>(); var totalPossibleCount = int.MaxValue; // Optimization for integer types if (raster.DataType == typeof(byte) || raster.DataType == typeof(int) || raster.DataType == typeof(sbyte) || raster.DataType == typeof(uint) || raster.DataType == typeof(short) || raster.DataType == typeof(ushort)) { totalPossibleCount = (int)(raster.Maximum - raster.Minimum + 1); } // NumRows and NumColumns - virtual properties, so copy them local variables for faster access var numRows = raster.NumRows; var numCols = raster.NumColumns; var valueGrid = raster.Value; for (var row = 0; row < numRows; row++) { for (var col = 0; col < numCols; col++) { double val = valueGrid[row, col]; if (result.Add(val)) { if (result.Count > maxCount) { overMaxCount = true; return result; } if (result.Count == totalPossibleCount) return result; } } } return result; } /// <summary> /// Creates a bitmap using only the colorscheme, even if a hillshade was specified. /// </summary> /// <param name="raster">The Raster containing values that need to be drawn to the bitmap as a color scheme.</param> /// <param name="rasterSymbolizer">The raster symbolizer to use.</param> /// <param name="bitmap">The bitmap to edit. Ensure that this has been created and saved at least once.</param> /// <param name="progressHandler">An IProgressHandler implementation to receive progress updates.</param> /// <exception cref="ArgumentNullException">rasterSymbolizer cannot be null.</exception> public static void PaintColorSchemeToBitmap(this IRaster raster, IRasterSymbolizer rasterSymbolizer, Bitmap bitmap, IProgressHandler progressHandler) { if (raster.DataType == typeof(int)) { PaintColorSchemeToBitmapT(raster.ToRaster<int>(), rasterSymbolizer, bitmap, progressHandler); } else if (raster.DataType == typeof(float)) { PaintColorSchemeToBitmapT(raster.ToRaster<float>(), rasterSymbolizer, bitmap, progressHandler); } else if (raster.DataType == typeof(short)) { PaintColorSchemeToBitmapT(raster.ToRaster<short>(), rasterSymbolizer, bitmap, progressHandler); } else if (raster.DataType == typeof(byte)) { PaintColorSchemeToBitmapT(raster.ToRaster<byte>(), rasterSymbolizer, bitmap, progressHandler); } else if (raster.DataType == typeof(double)) { PaintColorSchemeToBitmapT(raster.ToRaster<double>(), rasterSymbolizer, bitmap, progressHandler); } else { PaintColorSchemeToBitmapT(raster, raster.NoDataValue, (row, col) => raster.Value[row, col], rasterSymbolizer, bitmap, progressHandler); } } /// <summary> /// Creates a bitmap using only the colorscheme, even if a hillshade was specified. /// </summary> /// <param name="raster">The Raster containing values that need to be drawn to the bitmap as a color scheme.</param> /// <typeparam name="T">Type of the raster.</typeparam> /// <param name="rasterSymbolizer">The raster symbolizer to use.</param> /// <param name="bitmap">The bitmap to edit. Ensure that this has been created and saved at least once.</param> /// <param name="progressHandler">An IProgressHandler implementation to receive progress updates.</param> /// <exception cref="ArgumentNullException"><paramref name="rasterSymbolizer"/> cannot be null, <paramref name="raster"/> cannot be null, <paramref name="bitmap"/> cannot be null</exception> public static void PaintColorSchemeToBitmapT<T>(this Raster<T> raster, IRasterSymbolizer rasterSymbolizer, Bitmap bitmap, IProgressHandler progressHandler) where T : struct, IEquatable<T>, IComparable<T> { PaintColorSchemeToBitmapT(raster, GetNoData(raster), (row, col) => raster.Data[row][col], rasterSymbolizer, bitmap, progressHandler); } private static float[][] CreateHillShadeT<T>(this IRaster raster, Func<int, int, T> getValue, IShadedRelief shadedRelief, ProgressMeter progressMeter) where T : IEquatable<T>, IComparable<T> { if (!raster.IsInRam) return null; int numCols = raster.NumColumns; int numRows = raster.NumRows; var noData = Convert.ToSingle(raster.NoDataValue); float extrusion = shadedRelief.Extrusion; float elevationFactor = shadedRelief.ElevationFactor; float lightIntensity = shadedRelief.LightIntensity; float ambientIntensity = shadedRelief.AmbientIntensity; FloatVector3 lightDirection = shadedRelief.GetLightDirection(); float[] aff = new float[6]; // affine coefficients converted to float format for (int i = 0; i < 6; i++) { aff[i] = Convert.ToSingle(raster.Bounds.AffineCoefficients[i]); } float[][] hillshade = new float[numRows][]; if (progressMeter != null) progressMeter.BaseMessage = "Creating Shaded Relief"; for (int row = 0; row < numRows; row++) { hillshade[row] = new float[numCols]; for (int col = 0; col < numCols; col++) { // 3D position vectors of three points to create a triangle. FloatVector3 v1 = new FloatVector3(0f, 0f, 0f); FloatVector3 v2 = new FloatVector3(0f, 0f, 0f); FloatVector3 v3 = new FloatVector3(0f, 0f, 0f); float val = Convert.ToSingle(getValue(row, col)); // Cannot compute polygon ... make the best guess) if (col >= numCols - 1 || row <= 0) { if (col >= numCols - 1 && row <= 0) { v1.Z = val; v2.Z = val; v3.Z = val; } else if (col >= numCols - 1) { v1.Z = Convert.ToSingle(getValue(row, col - 1)); // 3 - 2 v2.Z = Convert.ToSingle(getValue(row - 1, col)); // | / v3.Z = Convert.ToSingle(getValue(row - 1, col - 1)); // 1 * } else if (row <= 0) { v1.Z = Convert.ToSingle(getValue(row + 1, col)); // 3* 2 v2.Z = Convert.ToSingle(getValue(row, col + 1)); // | / v3.Z = val; // 1 } } else { v1.Z = val; // 3 - 2 v2.Z = Convert.ToSingle(getValue(row - 1, col + 1)); // | / v3.Z = Convert.ToSingle(getValue(row - 1, col)); // 1* } // Test for no-data values and don't calculate hillshade in that case if (v1.Z == noData || v2.Z == noData || v3.Z == noData) { hillshade[row][col] = -1; // should never be negative otherwise. continue; } // Apply the Conversion Factor to put elevation into the same range as lat/lon v1.Z = v1.Z * elevationFactor * extrusion; v2.Z = v2.Z * elevationFactor * extrusion; v3.Z = v3.Z * elevationFactor * extrusion; // Complete the vectors using the latitude/longitude coordinates v1.X = aff[0] + (aff[1] * col) + (aff[2] * row); v1.Y = aff[3] + (aff[4] * col) + (aff[5] * row); v2.X = aff[0] + (aff[1] * (col + 1)) + (aff[2] * (row + 1)); v2.Y = aff[3] + (aff[4] * (col + 1)) + (aff[5] * (row + 1)); v3.X = aff[0] + (aff[1] * col) + (aff[2] * (row + 1)); v3.Y = aff[3] + (aff[4] * col) + (aff[5] * (row + 1)); // We need two direction vectors in order to obtain a cross product FloatVector3 dir2 = FloatVector3.Subtract(v2, v1); // points from 1 to 2 FloatVector3 dir3 = FloatVector3.Subtract(v3, v1); // points from 1 to 3 FloatVector3 cross = FloatVector3.CrossProduct(dir3, dir2); // right hand rule - cross direction should point into page... reflecting more if light direction is in the same direction // Normalizing this vector ensures that this vector is a pure direction and won't affect the intensity cross.Normalize(); // Hillshade now has an "intensity" modifier that should be applied to the R, G and B values of the color found at each pixel. hillshade[row][col] = (FloatVector3.Dot(cross, lightDirection) * lightIntensity) + ambientIntensity; } progressMeter?.Next(); } // Setting this indicates that a hillshade has been created more recently than characteristics have been changed. shadedRelief.HasChanged = false; return hillshade; } private static void DrawToBitmap(IRaster raster, IRasterSymbolizer rasterSymbolizer, IntPtr rgbData, int stride, ProgressMeter pm) { if (raster.DataType == typeof(int)) { DrawToBitmapT(raster.ToRaster<int>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(float)) { DrawToBitmapT(raster.ToRaster<float>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(short)) { DrawToBitmapT(raster.ToRaster<short>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(byte)) { DrawToBitmapT(raster.ToRaster<byte>(), rasterSymbolizer, rgbData, stride, pm); } else if (raster.DataType == typeof(double)) { DrawToBitmapT(raster.ToRaster<double>(), rasterSymbolizer, rgbData, stride, pm); } else { DrawToBitmapT(raster, raster.NoDataValue, (row, col) => raster.Value[row, col], i => Marshal.ReadByte(rgbData, i), (i, b) => Marshal.WriteByte(rgbData, i, b), rasterSymbolizer, stride, pm); if (rasterSymbolizer.IsSmoothed) { var mySmoother = new Smoother(stride, raster.NumColumns, raster.NumRows, rgbData, pm.ProgressHandler); mySmoother.Smooth(); } } } private static void DrawToBitmapT<T>(Raster<T> raster, IRasterSymbolizer rasterSymbolizer, IntPtr rgbData, int stride, ProgressMeter pm) where T : struct, IEquatable<T>, IComparable<T> { DrawToBitmapT(raster, GetNoData(raster), (row, col) => raster.Data[row][col], i => Marshal.ReadByte(rgbData, i), (i, b) => Marshal.WriteByte(rgbData, i, b), rasterSymbolizer, stride, pm); if (rasterSymbolizer.IsSmoothed) { var mySmoother = new Smoother(stride, raster.NumColumns, raster.NumRows, rgbData, pm.ProgressHandler); mySmoother.Smooth(); } } private static void DrawToBitmapT<T>(IRaster raster, T noData, Func<int, int, T> getValue, Func<int, byte> getByte, Action<int, byte> setByte, IRasterSymbolizer rasterSymbolizer, int stride, ProgressMeter pm) where T : struct, IEquatable<T>, IComparable<T> { if (raster == null) throw new ArgumentNullException(nameof(raster)); if (rasterSymbolizer == null) throw new ArgumentNullException(nameof(rasterSymbolizer)); if (rasterSymbolizer.Scheme.Categories == null || rasterSymbolizer.Scheme.Categories.Count == 0) return; float[][] hillshade = null; if (rasterSymbolizer.ShadedRelief.IsUsed) { pm.BaseMessage = "Calculating Shaded Relief"; hillshade = rasterSymbolizer.HillShade ?? raster.CreateHillShadeT(getValue, rasterSymbolizer.ShadedRelief, pm); } pm.BaseMessage = "Calculating Colors"; var sets = GetColorSets<T>(rasterSymbolizer.Scheme.Categories); var noDataColor = Argb.FromColor(rasterSymbolizer.NoDataColor); for (int row = 0; row < raster.NumRows; row++) { for (int col = 0; col < raster.NumColumns; col++) { var value = getValue(row, col); Argb argb; if (value.Equals(noData)) { argb = noDataColor; } else { // Usually values are not random, so check neighboring previous cells for same color int? srcOffset = null; if (col > 0) { if (value.Equals(getValue(row, col - 1))) { srcOffset = Offset(row, col - 1, stride); } } if (srcOffset == null && row > 0) { if (value.Equals(getValue(row - 1, col))) { srcOffset = Offset(row - 1, col, stride); } } if (srcOffset != null) { argb = new Argb(getByte((int)srcOffset + 3), getByte((int)srcOffset + 2), getByte((int)srcOffset + 1), getByte((int)srcOffset)); } else { argb = GetColor(sets, value); } } if (hillshade != null) { if (hillshade[row][col] == -1 || float.IsNaN(hillshade[row][col])) { argb = new Argb(argb.A, noDataColor.R, noDataColor.G, noDataColor.B); } else { var red = (int)(argb.R * hillshade[row][col]); var green = (int)(argb.G * hillshade[row][col]); var blue = (int)(argb.B * hillshade[row][col]); argb = new Argb(argb.A, red, green, blue); } } var offset = Offset(row, col, stride); setByte(offset, argb.B); setByte(offset + 1, argb.G); setByte(offset + 2, argb.R); setByte(offset + 3, argb.A); } pm.Next(); } } private static Argb GetColor<T>(IEnumerable<ColorSet<T>> sets, T value) where T : struct, IComparable<T> { foreach (var set in sets) { if (set.Contains(value)) { if (!set.Gradient) return set.Color; if (set.Min == null || set.Max == null) return set.Color; double lowVal = Convert.ToDouble(set.Min.Value); double range = Math.Abs(Convert.ToDouble(set.Max.Value) - lowVal); double p = 0; // the portion of the range, where 0 is LowValue & 1 is HighValue double ht; double dVal = Convert.ToDouble(value); switch (set.GradientModel) { case GradientModel.Linear: p = (dVal - lowVal) / range; break; case GradientModel.Exponential: ht = dVal; if (ht < 1) ht = 1.0; if (range > 1) p = Math.Pow(ht - lowVal, 2) / Math.Pow(range, 2); else return set.Color; break; case GradientModel.Logarithmic: ht = dVal; if (ht < 1) ht = 1.0; if (range > 1.0 && ht - lowVal > 1.0) p = Math.Log(ht - lowVal) / Math.Log(range); else return set.Color; break; } return new Argb(set.MinA + (int)(set.RangeA * p), set.MinR + (int)(set.RangeR * p), set.MinG + (int)(set.RangeG * p), set.MinB + (int)(set.RangeB * p)); } } return Argb.FromColor(Color.Transparent); } private static List<ColorSet<T>> GetColorSets<T>(IEnumerable<IColorCategory> categories) where T : struct, IComparable<T> { var result = new List<ColorSet<T>>(); foreach (var c in categories) { var cs = new ColorSet<T>(); Color high = c.HighColor; Color low = c.LowColor; cs.Color = Argb.FromColor(low); if (high != low) { cs.GradientModel = c.GradientModel; cs.Gradient = true; cs.MinA = low.A; cs.MinR = low.R; cs.MinG = low.G; cs.MinB = low.B; cs.RangeA = high.A - cs.MinA; cs.RangeR = high.R - cs.MinR; cs.RangeG = high.G - cs.MinG; cs.RangeB = high.B - cs.MinB; } cs.Max = Global.MaximumValue<T>(); var testMax = Convert.ToDouble(cs.Max); cs.Min = Global.MinimumValue<T>(); var testMin = Convert.ToDouble(cs.Min); if (c.Range.Maximum != null && c.Range.Maximum < testMax) { if (c.Range.Maximum < testMin) cs.Max = cs.Min; else cs.Max = (T)Convert.ChangeType(c.Range.Maximum.Value, typeof(T)); } if (c.Range.Minimum != null && c.Range.Minimum > testMin) { if (c.Range.Minimum > testMax) cs.Min = Global.MaximumValue<T>(); else cs.Min = (T)Convert.ChangeType(c.Range.Minimum.Value, typeof(T)); } cs.MinInclusive = c.Range.MinIsInclusive; cs.MaxInclusive = c.Range.MaxIsInclusive; result.Add(cs); } // The normal order uses "overwrite" behavior, so that each color is drawn // if it qualifies until all the ranges are tested, overwriting previous. // This can be mimicked by going through the sets in reverse and choosing // the first that qualifies. For lots of color ranges, opting out of // a large portion of the range testing should be faster. result.Reverse(); return result; } private static T GetNoData<T>(Raster<T> raster) where T : IEquatable<T>, IComparable<T> { // Get nodata value. var noData = default(T); try { noData = (T)Convert.ChangeType(raster.NoDataValue, typeof(T)); } catch (OverflowException) { // For whatever reason, GDAL occasionally is reporting noDataValues // That will not fit in the specified band type. Is this due to a // malformed GeoTiff file? // http://dotspatial.codeplex.com/workitem/343 Trace.WriteLine("OverflowException while getting NoDataValue"); } return noData; } private static int Offset(int row, int col, int stride) { return (row * stride) + (col * 4); } private static void PaintColorSchemeToBitmapT<T>(this IRaster raster, T noData, Func<int, int, T> getValue, IRasterSymbolizer rasterSymbolizer, Bitmap bitmap, IProgressHandler progressHandler) where T : struct, IEquatable<T>, IComparable<T> { if (raster == null) throw new ArgumentNullException(nameof(raster)); if (rasterSymbolizer == null) throw new ArgumentNullException(nameof(rasterSymbolizer)); if (bitmap == null) throw new ArgumentNullException(nameof(bitmap)); if (rasterSymbolizer.Scheme.Categories == null || rasterSymbolizer.Scheme.Categories.Count == 0) return; BitmapData bmpData; var numRows = raster.NumRows; var numColumns = raster.NumColumns; var rect = new Rectangle(0, 0, numColumns, numRows); try { bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } catch { var ms = new MemoryStream(); bitmap.Save(ms, ImageFormat.MemoryBmp); ms.Position = 0; bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } // Prepare progress meter var pm = new ProgressMeter(progressHandler, SymbologyMessageStrings.DesktopRasterExt_PaintingColorScheme, numRows); if (numRows * numColumns < 100000) pm.StepPercent = 50; if (numRows * numColumns < 500000) pm.StepPercent = 10; if (numRows * numColumns < 1000000) pm.StepPercent = 5; var sets = GetColorSets<T>(rasterSymbolizer.Scheme.Categories); var noDataColor = Argb.FromColor(rasterSymbolizer.NoDataColor); var alpha = Argb.ByteRange(Convert.ToInt32(rasterSymbolizer.Opacity * 255)); var ptr = bmpData.Scan0; for (var row = 0; row < numRows; row++) { for (var col = 0; col < numColumns; col++) { var val = getValue(row, col); Argb argb; if (val.Equals(noData)) { argb = noDataColor; } else { // Usually values are not random, so check neighboring previous cells for same color int? srcOffset = null; if (col > 0) { if (val.Equals(getValue(row, col - 1))) { srcOffset = Offset(row, col - 1, bmpData.Stride); } } if (srcOffset == null && row > 0) { if (val.Equals(getValue(row - 1, col))) { srcOffset = Offset(row - 1, col, bmpData.Stride); } } if (srcOffset != null) { argb = new Argb(Marshal.ReadByte(ptr, (int)srcOffset + 3), Marshal.ReadByte(ptr, (int)srcOffset + 2), Marshal.ReadByte(ptr, (int)srcOffset + 1), Marshal.ReadByte(ptr, (int)srcOffset)); } else { var color = GetColor(sets, val); argb = new Argb(alpha, color.R, color.G, color.B); } } var offset = Offset(row, col, bmpData.Stride); Marshal.WriteByte(ptr, offset, argb.B); Marshal.WriteByte(ptr, offset + 1, argb.G); Marshal.WriteByte(ptr, offset + 2, argb.R); Marshal.WriteByte(ptr, offset + 3, argb.A); } pm.CurrentValue = row; } pm.Reset(); if (rasterSymbolizer.IsSmoothed) { var mySmoother = new Smoother(bmpData.Stride, bmpData.Width, bmpData.Height, bmpData.Scan0, progressHandler); mySmoother.Smooth(); } bitmap.UnlockBits(bmpData); rasterSymbolizer.ColorSchemeHasUpdated = true; } #endregion #region Classes private class ColorSet<T> where T : struct, IComparable<T> { #region Properties public Argb Color { get; set; } // for non bivalue case. public bool Gradient { get; set; } public GradientModel GradientModel { get; set; } public T? Max { get; set; } public bool MaxInclusive { get; set; } public T? Min { get; set; } public int MinA { get; set; } public int MinB { get; set; } public int MinG { get; set; } public bool MinInclusive { get; set; } public int MinR { get; set; } public int RangeA { get; set; } public int RangeB { get; set; } public int RangeG { get; set; } public int RangeR { get; set; } #endregion #region Methods public bool Contains(T value) { // Checking for nulls if (Max == null && Min == null) return true; if (Min == null) return MaxInclusive ? value.CompareTo(Max.Value) <= 0 : value.CompareTo(Max.Value) < 0; if (Max == null) return MinInclusive ? value.CompareTo(Min.Value) >= 0 : value.CompareTo(Min.Value) > 0; // Normal checking double cMax = value.CompareTo(Max.Value); if (cMax > 0 || (!MaxInclusive && cMax == 0)) return false; // value bigger than max or max excluded double cMin = value.CompareTo(Min.Value); if (cMin < 0 || (cMin == 0 && !MinInclusive)) return false; // value smaller than min or min excluded return true; } #endregion } #endregion } }
#if (UNITY_WINRT || UNITY_WP_8_1) && !UNITY_EDITOR && !UNITY_WP8 #region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Globalization; using System.Reflection; using Newtonsoft.Json.Utilities; using System.Collections; using System.Linq; namespace Newtonsoft.Json.Serialization { /// <summary> /// Contract details for a <see cref="Type"/> used by the <see cref="JsonSerializer"/>. /// </summary> public class JsonArrayContract : JsonContainerContract { /// <summary> /// Gets the <see cref="Type"/> of the collection items. /// </summary> /// <value>The <see cref="Type"/> of the collection items.</value> public Type CollectionItemType { get; private set; } /// <summary> /// Gets a value indicating whether the collection type is a multidimensional array. /// </summary> /// <value><c>true</c> if the collection type is a multidimensional array; otherwise, <c>false</c>.</value> public bool IsMultidimensionalArray { get; private set; } private readonly bool _isCollectionItemTypeNullableType; private readonly Type _genericCollectionDefinitionType; private Type _genericWrapperType; private MethodCall<object, object> _genericWrapperCreator; private Func<object> _genericTemporaryCollectionCreator; internal bool IsArray { get; private set; } internal bool ShouldCreateWrapper { get; private set; } internal bool CanDeserialize { get; private set; } internal MethodBase ParametrizedConstructor { get; private set; } /// <summary> /// Initializes a new instance of the <see cref="JsonArrayContract"/> class. /// </summary> /// <param name="underlyingType">The underlying type for the contract.</param> public JsonArrayContract(Type underlyingType) : base(underlyingType) { ContractType = JsonContractType.Array; IsArray = CreatedType.IsArray; bool canDeserialize; Type tempCollectionType; if (IsArray) { CollectionItemType = ReflectionUtils.GetCollectionItemType(UnderlyingType); IsReadOnlyOrFixedSize = true; _genericCollectionDefinitionType = typeof(List<>).MakeGenericType(CollectionItemType); canDeserialize = true; IsMultidimensionalArray = (IsArray && UnderlyingType.GetArrayRank() > 1); } else if (typeof(IList).IsAssignableFrom(underlyingType)) { if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(ICollection<>), out _genericCollectionDefinitionType)) CollectionItemType = _genericCollectionDefinitionType.GetGenericArguments()[0]; else CollectionItemType = ReflectionUtils.GetCollectionItemType(underlyingType); if (underlyingType == typeof(IList)) CreatedType = typeof(List<object>); if (CollectionItemType != null) ParametrizedConstructor = CollectionUtils.ResolveEnumableCollectionConstructor(underlyingType, CollectionItemType); IsReadOnlyOrFixedSize = ReflectionUtils.InheritsGenericDefinition(underlyingType, typeof(ReadOnlyCollection<>)); canDeserialize = true; } else if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(ICollection<>), out _genericCollectionDefinitionType)) { CollectionItemType = _genericCollectionDefinitionType.GetGenericArguments()[0]; if (ReflectionUtils.IsGenericDefinition(underlyingType, typeof(ICollection<>)) || ReflectionUtils.IsGenericDefinition(underlyingType, typeof(IList<>))) CreatedType = typeof(List<>).MakeGenericType(CollectionItemType); if (ReflectionUtils.IsGenericDefinition(underlyingType, typeof(ISet<>))) CreatedType = typeof(HashSet<>).MakeGenericType(CollectionItemType); ParametrizedConstructor = CollectionUtils.ResolveEnumableCollectionConstructor(underlyingType, CollectionItemType); canDeserialize = true; ShouldCreateWrapper = true; } //#if !(NET40 || NET35 || NET20 || SILVERLIGHT || WINDOWS_PHONE || PORTABLE40) else if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IReadOnlyCollection<>), out tempCollectionType)) { CollectionItemType = underlyingType.GetGenericArguments()[0]; if (ReflectionUtils.IsGenericDefinition(underlyingType, typeof(IReadOnlyCollection<>)) || ReflectionUtils.IsGenericDefinition(underlyingType, typeof(IReadOnlyList<>))) CreatedType = typeof(ReadOnlyCollection<>).MakeGenericType(CollectionItemType); _genericCollectionDefinitionType = typeof(List<>).MakeGenericType(CollectionItemType); ParametrizedConstructor = CollectionUtils.ResolveEnumableCollectionConstructor(CreatedType, CollectionItemType); IsReadOnlyOrFixedSize = true; canDeserialize = (ParametrizedConstructor != null); } //#endif else if (ReflectionUtils.ImplementsGenericDefinition(underlyingType, typeof(IEnumerable<>), out tempCollectionType)) { CollectionItemType = tempCollectionType.GetGenericArguments()[0]; if (ReflectionUtils.IsGenericDefinition(UnderlyingType, typeof(IEnumerable<>))) CreatedType = typeof(List<>).MakeGenericType(CollectionItemType); ParametrizedConstructor = CollectionUtils.ResolveEnumableCollectionConstructor(underlyingType, CollectionItemType); if (underlyingType.IsGenericType() && underlyingType.GetGenericTypeDefinition() == typeof(IEnumerable<>)) { _genericCollectionDefinitionType = tempCollectionType; IsReadOnlyOrFixedSize = false; ShouldCreateWrapper = false; canDeserialize = true; } else { _genericCollectionDefinitionType = typeof(List<>).MakeGenericType(CollectionItemType); IsReadOnlyOrFixedSize = true; ShouldCreateWrapper = true; canDeserialize = (ParametrizedConstructor != null); } } else { // types that implement IEnumerable and nothing else canDeserialize = false; ShouldCreateWrapper = true; } CanDeserialize = canDeserialize; if (CollectionItemType != null) _isCollectionItemTypeNullableType = ReflectionUtils.IsNullableType(CollectionItemType); //#if !(NET20 || NET35 || NET40 || PORTABLE40) Type immutableCreatedType; MethodBase immutableParameterizedCreator; if (ImmutableCollectionsUtils.TryBuildImmutableForArrayContract(underlyingType, CollectionItemType, out immutableCreatedType, out immutableParameterizedCreator)) { CreatedType = immutableCreatedType; ParametrizedConstructor = immutableParameterizedCreator; IsReadOnlyOrFixedSize = true; CanDeserialize = true; } //#endif } internal IWrappedCollection CreateWrapper(object list) { if (_genericWrapperCreator == null) { _genericWrapperType = typeof(CollectionWrapper<>).MakeGenericType(CollectionItemType); Type constructorArgument; if (ReflectionUtils.InheritsGenericDefinition(_genericCollectionDefinitionType, typeof(List<>)) || _genericCollectionDefinitionType.GetGenericTypeDefinition() == typeof(IEnumerable<>)) constructorArgument = typeof(ICollection<>).MakeGenericType(CollectionItemType); else constructorArgument = _genericCollectionDefinitionType; ConstructorInfo genericWrapperConstructor = _genericWrapperType.GetConstructor(new[] { constructorArgument }); _genericWrapperCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateMethodCall<object>(genericWrapperConstructor); } return (IWrappedCollection)_genericWrapperCreator(null, list); } internal IList CreateTemporaryCollection() { if (_genericTemporaryCollectionCreator == null) { // multidimensional array will also have array instances in it Type collectionItemType = (IsMultidimensionalArray) ? typeof(object) : CollectionItemType; Type temporaryListType = typeof(List<>).MakeGenericType(collectionItemType); _genericTemporaryCollectionCreator = JsonTypeReflector.ReflectionDelegateFactory.CreateDefaultConstructor<object>(temporaryListType); } return (IList)_genericTemporaryCollectionCreator(); } } } #endif
using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using GitMind.Utils; using GitMind.Utils.Git; using GitMind.Utils.Git.Private; namespace GitMind.GitModel.Private { internal class CommitsService : ICommitsService { private readonly IGitLogService gitLogService; public CommitsService(IGitLogService gitLogService) { this.gitLogService = gitLogService; } public async Task AddNewCommitsAsync(MRepository repository) { int addedCount = 0; CancellationTokenSource cts = new CancellationTokenSource(); int seenCount = 0; void OnCommit(GitCommit commit) { CommitId commitId = new CommitId(commit.Sha); if (repository.GitCommits.TryGetValue(commitId, out _)) { if (commit.ParentIds.All(p => repository.GitCommits.TryGetValue(commitId, out _))) { seenCount++; if (seenCount > 5000) { Log.Debug($"Commit {commitId} already cached"); cts.Cancel(); } } else { seenCount = 0; } } else { seenCount = 0; repository.GitCommits[commitId] = commit; addedCount++; } } R result = await gitLogService.GetLogAsync(OnCommit, cts.Token); if (result.IsFaulted) { Log.Warn($"Failed to add new commits, {result}"); } Log.Debug($"Added {addedCount} to cache"); } public void AddBranchCommits(IReadOnlyList<GitBranch> branches, MRepository repository) { GitStatus status = repository.Status; Timing t = new Timing(); IEnumerable<CommitSha> rootCommits = branches.Select(b => b.TipSha); if (branches.TryGetCurrent(out GitBranch current) && current.IsDetached) { rootCommits = rootCommits.Concat(new[] { current.TipSha }); } if (!rootCommits.Any()) { AddVirtualEmptyCommit(repository); rootCommits = new[] { CommitSha.NoCommits }; } rootCommits = rootCommits.ToList(); t.Log("Root commit ids"); Dictionary<CommitSha, object> added = new Dictionary<CommitSha, object>(); Dictionary<CommitId, BranchName> branchNameByCommitId = new Dictionary<CommitId, BranchName>(); Dictionary<CommitId, BranchName> subjectBranchNameByCommitId = new Dictionary<CommitId, BranchName>(); Stack<CommitSha> commitShas = new Stack<CommitSha>(); rootCommits.ForEach(sha => commitShas.Push(sha)); rootCommits.ForEach(sha => added[sha] = null); t.Log("Pushed roots on stack"); while (commitShas.Any()) { CommitSha commitSha = commitShas.Pop(); CommitId commitId = new CommitId(commitSha.Sha); GitCommit gitCommit; IEnumerable<CommitSha> parentIds = null; if (!repository.GitCommits.TryGetValue(commitId, out gitCommit)) { Log.Warn($"Unknown commit {commitSha}"); continue; } if (IsMergeCommit(gitCommit)) { TrySetBranchNameFromSubject(commitId, gitCommit, branchNameByCommitId, subjectBranchNameByCommitId); } MCommit commit = repository.Commit(commitId); if (!commit.IsSet) { if (commit.Id == CommitId.NoCommits) { commit.IsVirtual = true; commit.SetBranchName("master"); } AddCommit(commit, gitCommit); if (parentIds == null) { parentIds = gitCommit.ParentIds.Select(id => repository.GitCommits[id].Sha); } AddParents(parentIds, commitShas, added); } BranchName branchName; if (branchNameByCommitId.TryGetValue(commitId, out branchName)) { // Branch name set by a child commit (pull merge commit) commit.SetBranchName(branchName); } BranchName subjectBranchName; if (subjectBranchNameByCommitId.TryGetValue(commitId, out subjectBranchName)) { // Subject branch name set by a child commit (merge commit) gitCommit.SetBranchNameFromSubject(subjectBranchName); } } if (!status.OK) { // Adding a virtual "uncommitted" commit since current working folder status has changes AddVirtualUncommitted(current, status, repository); } } private void AddCommit(MCommit commit, GitCommit gitCommit) { //string subject = gitCommit.Subject; string tickets = ""; // GetTickets(subject); commit.Tickets = tickets; // Pre-create all parents commit.ParentIds.ForEach(pid => commit.Repository.Commit(pid)); SetChildOfParents(commit); commit.IsSet = true; } private void AddVirtualUncommitted(GitBranch currentBranch, GitStatus status, MRepository repository) { MCommit commit = repository.Commit(CommitId.Uncommitted); repository.Uncommitted = commit; commit.IsVirtual = true; CommitId headCommitId = CommitId.NoCommits; if (currentBranch != null) { CommitId headId = new CommitId(currentBranch.TipSha.Sha); MCommit headCommit = repository.Commit(headId); headCommitId = headCommit.Id; } CopyToUncommitedCommit(currentBranch, repository, status, commit, headCommitId); SetChildOfParents(commit); } private void AddVirtualEmptyCommit(MRepository repository) { CommitSha virtualSha = CommitSha.NoCommits; CommitId virtualId = new CommitId(virtualSha); GitCommit gitCommit = new GitCommit( virtualSha, "<Repository with no commits yet ...>", "<Repository with no commits yet ...>", "", DateTime.Now, DateTime.Now, new List<CommitId>()); repository.GitCommits[virtualId] = gitCommit; } private static void AddParents( IEnumerable<CommitSha> parents, Stack<CommitSha> commitShas, Dictionary<CommitSha, object> added) { parents.ForEach(parent => { if (!added.ContainsKey(parent)) { commitShas.Push(parent); added[parent] = null; } }); } private static void TrySetBranchNameFromSubject( CommitId commitId, GitCommit gitCommit, IDictionary<CommitId, BranchName> branchNameByCommitId, IDictionary<CommitId, BranchName> subjectBranchNameByCommitId) { // Trying to parse source and target branch names from subject. They can be like // "Merge branch 'branch-name' of remote-repository-path" // This is considered a "pull merge", where branch-name is both source and target. These are // usually automatically created by tools and thus more trustworthy. // Other merge merge subjects are less trustworthy since they sometiems are manually edited // like: // "Merge source-branch" // which contains a source branch name, but sometimes they contain a target like // "Merge source-branch into target-branch" MergeBranchNames mergeNames = BranchNameParser.ParseBranchNamesFromSubject(gitCommit.Subject); if (IsPullMergeCommit(mergeNames)) { // Pull merge subjects (source branch same as target) (trust worthy, so use branch name branchNameByCommitId[commitId] = mergeNames.SourceBranchName; branchNameByCommitId[gitCommit.ParentIds[0]] = mergeNames.SourceBranchName; branchNameByCommitId[gitCommit.ParentIds[1]] = mergeNames.SourceBranchName; // But also note the barnch name from subjects subjectBranchNameByCommitId[commitId] = mergeNames.SourceBranchName; subjectBranchNameByCommitId[gitCommit.ParentIds[0]] = mergeNames.SourceBranchName; subjectBranchNameByCommitId[gitCommit.ParentIds[1]] = mergeNames.SourceBranchName; } else { // Normal merge subject (less trustworthy) if (mergeNames.TargetBranchName != null) { // There was a target branch name subjectBranchNameByCommitId[commitId] = mergeNames.TargetBranchName; } if (mergeNames.SourceBranchName != null) { // There was a source branch name subjectBranchNameByCommitId[gitCommit.ParentIds[1]] = mergeNames.SourceBranchName; } } } private static void SetChildOfParents(MCommit commit) { bool isFirstParent = true; foreach (MCommit parent in commit.Parents) { IList<CommitId> childIds = parent.ChildIds; if (!childIds.Contains(commit.Id)) { childIds.Add(commit.Id); } if (isFirstParent) { isFirstParent = false; IList<CommitId> firstChildIds = parent.FirstChildIds; if (!firstChildIds.Contains(commit.Id)) { firstChildIds.Add(commit.Id); } } } } private static void CopyToUncommitedCommit( GitBranch currentBranch, MRepository repository, GitStatus status, MCommit commit, CommitId parentId) { int modifiedCount = status.AllChanges; int conflictCount = status.Conflicted; string subject = $"{modifiedCount} uncommitted changes in working folder"; if (conflictCount > 0) { subject = $"{conflictCount} conflicts and {modifiedCount} changes, {ShortSubject(status)}"; commit.HasConflicts = true; } else if (status.IsMerging) { subject = $"{modifiedCount} changes, {ShortSubject(status)}"; commit.IsMerging = true; } GitCommit gitCommit = new GitCommit( CommitSha.Uncommitted, subject, subject, "", DateTime.Now, DateTime.Now, new List<CommitId> { parentId }); repository.GitCommits[CommitId.Uncommitted] = gitCommit; commit.SetBranchName(currentBranch?.Name ?? "master"); commit.Tickets = ""; commit.BranchId = null; } private static string ShortSubject(GitStatus status) { string subject = status.MergeMessage?.Trim() ?? ""; string firstLine = subject.Split("\n".ToCharArray())[0]; return firstLine; } private static bool IsMergeCommit(GitCommit gitCommit) { return gitCommit.ParentIds.Count > 1; } private static bool IsPullMergeCommit(MergeBranchNames branchNames) { return branchNames.SourceBranchName != null && branchNames.SourceBranchName == branchNames.TargetBranchName; } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using Ecosim.SceneData; using Ecosim; public class RenderTileIcons : MonoBehaviour { private class DetailMap { public int[,] map; public DetailPrototype proto; public bool isEmpty; } public struct RenderSettings { public RenderSettings(float angleH, float angleV, float distance, float offsetV) { this.angleH = angleH; this.angleV = angleV; this.distance = distance; this.offsetV = offsetV; emptySurroundings = true; } public float angleH; public float angleV; public float distance; public float offsetV; public bool emptySurroundings; } public static RenderTileIcons self; public Texture2D placeholderIcon; public Camera renderCamera; public UnityEngine.TerrainData originalTerrainData; UnityEngine.TerrainData data; public const int TERRAIN_SIZE = 32; public const float TERRAIN_SCALE = 20f; RenderSettings defaultSettings = new RenderSettings(60f, 30f, 30f, 4f); private DetailMap[] detailMaps; List<GameObject> objects = new List<GameObject>(); // used to check if the tile layout has changed or not. private long renderId = 1; RenderTileIconStencil[] stencils; /** * Adds tile to icon generation queue, returns placeholder icon to show till the icon is generated */ public static Texture2D RenderTile(TileType tile) { if (self == null) return null; QueueEntry entry = new QueueEntry(); entry.tile = tile; if (self.tail != null) { self.tail.next = entry; } if (self.head == null) { self.head = entry; } self.tail = entry; return self.placeholderIcon; } class QueueEntry { public TileType tile; public QueueEntry next; } QueueEntry head; QueueEntry tail; void Awake() { self = this; } void OnDestroy() { self = null; } public void SetupDetailMap(UnityEngine.TerrainData data) { DetailPrototype[] details = data.detailPrototypes; detailMaps = new DetailMap[details.Length]; for (int i = 0; i < details.Length; i++) { DetailMap d = new DetailMap(); d.map = new int[TERRAIN_SIZE, TERRAIN_SIZE]; d.proto = details[i]; detailMaps[i] = d; } } void Start() { // UnityEngine.TerrainData originalTerrainData = GameObject.Find ("Terrain").GetComponent<Terrain>().terrainData; // Terrain terrain = gameObject.AddComponent<Terrain>(); data = new UnityEngine.TerrainData(); data.baseMapResolution = TERRAIN_SIZE; data.alphamapResolution = TERRAIN_SIZE; data.SetDetailResolution(TERRAIN_SIZE, 64); data.heightmapResolution = TERRAIN_SIZE; data.size = new Vector3(TERRAIN_SIZE * TERRAIN_SCALE, 100f, TERRAIN_SIZE * TERRAIN_SCALE); // terrain.terrainData = data; data.splatPrototypes = originalTerrainData.splatPrototypes; data.treePrototypes = originalTerrainData.treePrototypes; data.detailPrototypes = originalTerrainData.detailPrototypes; GameObject terrainGO = Terrain.CreateTerrainGameObject(data); terrainGO.transform.parent = transform; terrainGO.transform.localPosition = Vector3.zero; terrainGO.transform.localRotation = Quaternion.identity; terrainGO.transform.localScale = Vector3.one; terrainGO.layer = Layers.L_GUI; SetupDetailMap(data); // TerrainCollider collider = gameObject.AddComponent<TerrainCollider>(); // collider.terrainData = data; StartCoroutine(COHandleThumbnails()); stencils = new RenderTileIconStencil[EcoTerrainElements.self.decals.Length]; } IEnumerator COHandleThumbnails() { yield return new WaitForSeconds(1f); while (true) { if (head != null) { TileType tt = head.tile; Texture2D icon = new Texture2D(64, 64, TextureFormat.RGB24, false, false); yield return new WaitForEndOfFrame (); Render(defaultSettings, ref icon, tt); tt.SetIcon(icon); head = head.next; if (head == null) tail = null; yield return new WaitForSeconds(0.1f); } else { yield return new WaitForSeconds(0.5f); } } } public long Render(RenderSettings settings, ref Texture2D resultTex, TileType tile, Mesh mesh, Material material, GameObject road) { return ReRender(settings, 0L, ref resultTex, tile, mesh, material, road); } public long Render(RenderSettings settings, ref Texture2D resultTex, TileType tile, Mesh mesh, Material material) { return ReRender(settings, 0L, ref resultTex, tile, mesh, material, null); } public long Render(RenderSettings settings, ref Texture2D resultTex, TileType tile) { return ReRender(settings, 0L, ref resultTex, tile, null, null, null); } public long ReRender(RenderSettings settings, long currentId, ref Texture2D resultTex, TileType tile) { return ReRender(settings, currentId, ref resultTex, tile, null, null, null); } public long ReRender(RenderSettings settings, long currentId, ref Texture2D resultTex, TileType tile, Mesh mesh, Material material) { return ReRender(settings, 0L, ref resultTex, tile, mesh, material, null); } public long ReRender(RenderSettings settings, long currentId, ref Texture2D resultTex, TileType tile, Mesh mesh, Material material, GameObject road) { if (currentId != renderId) { TileType surroundings = (settings.emptySurroundings)?(tile.vegetationType.tiles[0]):tile; // first generate the terrain... Generate(tile, surroundings); if (mesh != null) { // we have an object to show on terrain (something like a building) GameObject go = new GameObject("extra"); go.transform.parent = transform; go.transform.localPosition = new Vector3 ((TERRAIN_SIZE / 2) * TERRAIN_SCALE, 0f, (TERRAIN_SIZE / 2) * TERRAIN_SCALE); go.transform.localRotation = Quaternion.identity; go.transform.localScale = Vector3.one; go.AddComponent<MeshFilter>().sharedMesh = mesh; go.AddComponent<MeshRenderer>().sharedMaterial = material; objects.Add(go); } if (road != null) { GameObject go = (GameObject) Instantiate (road); go.transform.parent = transform; go.transform.localPosition = new Vector3 ((TERRAIN_SIZE / 2) * TERRAIN_SCALE, 0f, (TERRAIN_SIZE / 2) * TERRAIN_SCALE); go.transform.localRotation = Quaternion.identity; go.transform.localScale = Vector3.one; RoadInstance ri = go.GetComponent <RoadInstance> (); Roads.Road rdata = new Roads.Road (); rdata.points = new List<Vector3> (); rdata.points.Add (new Vector3 (-(TERRAIN_SIZE / 2) * TERRAIN_SCALE, 0f, 0f)); rdata.points.Add (new Vector3 (0, 0, 0)); rdata.points.Add (new Vector3 ((TERRAIN_SIZE / 2) * TERRAIN_SCALE, 0f, (TERRAIN_SIZE / 2) * TERRAIN_SCALE)); ri.Setup (rdata); objects.Add(go); } renderId++; } float offset = (0.5f + TERRAIN_SCALE) * TERRAIN_SIZE / 2; Transform cameraT = renderCamera.transform; float distH = Mathf.Cos(Mathf.Deg2Rad * settings.angleV) * settings.distance; float distV = Mathf.Sin(Mathf.Deg2Rad * settings.angleV) * settings.distance; cameraT.localPosition = new Vector3(offset + distH * Mathf.Cos(Mathf.Deg2Rad * settings.angleH), distV + settings.offsetV, offset + distH * Mathf.Sin(Mathf.Deg2Rad * settings.angleH) ); Vector3 centreP = transform.position + new Vector3(offset, 0f, offset); cameraT.LookAt(centreP + new Vector3(0f, settings.offsetV, 0f), Vector3.up); if ((resultTex == null) || (resultTex.format != TextureFormat.RGB24)) { if (resultTex != null) { Destroy(resultTex); } resultTex = new Texture2D(128, 128, TextureFormat.RGB24, false); } RenderTexture rt = RenderTexture.GetTemporary(resultTex.width, resultTex.height, 24, RenderTextureFormat.ARGB32); rt.useMipMap = false; rt.wrapMode = TextureWrapMode.Clamp; renderCamera.targetTexture = rt; renderCamera.Render(); RenderTexture.active = rt; resultTex.ReadPixels(new Rect(0, 0, resultTex.width, resultTex.height), 0, 0, false); RenderTexture.active = null; RenderTexture.ReleaseTemporary(rt); resultTex.Apply(); Resources.UnloadUnusedAssets(); System.GC.Collect(); return renderId; } void Generate(TileType centre, TileType surroundings) { int centrePos = TERRAIN_SIZE / 2; // first remove old objects from previous generates... foreach (GameObject obj in objects) { if (obj) DestroyImmediate(obj); } objects.Clear(); // remove old stencils for (int i = 0; i < stencils.Length; i++) { if (stencils[i] != null) { RenderTileIconStencil.DestroyStencil(stencils[i]); stencils[i] = null; } } foreach (DetailMap d in detailMaps) { if (!d.isEmpty) { d.isEmpty = true; d.map = new int[TERRAIN_SIZE, TERRAIN_SIZE]; } } // alpha map (terrain ground colours) float[,,] alpha = new float[TERRAIN_SIZE, TERRAIN_SIZE, 4]; List<TreeInstance> treeList = new List<TreeInstance>(); GameObject[] prefabs = EcoTerrainElements.self.tileObjects; int randomSeed = 1; bool noRandom = false; for (int y = 0; y < TERRAIN_SIZE; y++) { for (int x = 0; x < TERRAIN_SIZE; x++) { System.Random rnd = new System.Random((randomSeed + x + 3333 * y) | x | y); TileType tile = ((x == centrePos) && (y == centrePos))?centre:surroundings; // first handle terrain colour alpha[y, x, 0] = tile.splat0; alpha[y, x, 1] = tile.splat1; alpha[y, x, 2] = tile.splat2; alpha[y, x, 3] = 1f - tile.splat0 - tile.splat1 - tile.splat2; // place trees foreach (TileType.TreeData treeData in tile.trees) { TreeInstance ti = new TreeInstance(); float tx = treeData.x; float ty = treeData.y; float rad = treeData.r; if ((rad > 0f) && (!noRandom)) { float angle = (float) rnd.NextDouble() * Mathf.PI * 2; rad = rad * (float) rnd.NextDouble(); tx += Mathf.Sin(angle) * rad; ty += Mathf.Cos(angle) * rad; } // ti.position = TreePos(ref heightMap, x + Mathf.Clamp(tx, 0f, 0.999f), y + Mathf.Clamp(ty, 0f, 0.999f)); tx = x + Mathf.Clamp(tx, 0f, 0.999f); ty = y + Mathf.Clamp(ty, 0f, 0.999f); ti.position = new Vector3(tx / TERRAIN_SIZE, 0f, ty / TERRAIN_SIZE); ti.prototypeIndex = treeData.prototypeIndex; if (noRandom) { ti.heightScale = 0.5f * (treeData.minHeight + treeData.maxHeight); ti.widthScale = ti.heightScale * 0.5f * (treeData.minWidthVariance + treeData.maxWidthVariance); Color c = treeData.colorTo; ti.color = c; } else { ti.heightScale = RndUtil.RndRange(ref rnd, treeData.minHeight, treeData.maxHeight); ti.widthScale = ti.heightScale * RndUtil.RndRange(ref rnd, treeData.minWidthVariance, treeData.maxWidthVariance); Color c = RndUtil.RndRange(ref rnd, treeData.colorFrom, treeData.colorTo); ti.color = c; } ti.lightmapColor = Color.white; treeList.Add(ti); } // objects foreach (TileType.ObjectData objData in tile.objects) { GameObject go = (GameObject) GameObject.Instantiate(prefabs[objData.index]); go.layer = Layers.L_GUI; Transform t = go.transform; t.parent = transform; float tx = objData.x; float ty = objData.y; float rad = objData.r; t.localRotation = Quaternion.Euler(0f, objData.angle, 0f); if ((rad > 0f) && (!noRandom)) { float angle = (float) rnd.NextDouble() * Mathf.PI * 2; rad = rad * (float) rnd.NextDouble(); tx += Mathf.Sin(angle) * rad; ty += Mathf.Cos(angle) * rad; } tx = x + Mathf.Clamp(tx, 0f, 0.999f); ty = y + Mathf.Clamp(ty, 0f, 0.999f); t.localPosition = new Vector3(tx * TERRAIN_SCALE, 0f, ty * TERRAIN_SCALE); if (noRandom) { float heightScale = 0.5f * (objData.minHeight + objData.maxHeight); float widthScale = heightScale * 0.5f * (objData.minWidthVariance + objData.maxWidthVariance); t.localScale = new Vector3(widthScale, heightScale, widthScale); } else { float heightScale = RndUtil.RndRange(ref rnd, objData.minHeight, objData.maxHeight); float widthScale = heightScale * RndUtil.RndRange(ref rnd, objData.minWidthVariance, objData.maxWidthVariance); t.localScale = new Vector3(widthScale, heightScale, widthScale); } objects.Add(go); } // detail for (int i = 0; i < tile.detailCounts.Length; i++) { int dc = tile.detailCounts[i]; if (dc > 0) { DetailMap dMap = detailMaps[i]; if (dMap.isEmpty) { dMap.isEmpty = false; } dMap.map[y, x] = dc; } } for (int i = 0; i < tile.decals.Length; i++) { int id = tile.decals[i]; RenderTileIconStencil stencil = stencils[id]; if (stencil == null) { stencil = RenderTileIconStencil.CreateStencil(transform, id); stencils[id] = stencil; } stencil.AddTile(x, y); } } } data.SetAlphamaps(0, 0, alpha); data.treeInstances = treeList.ToArray(); List<DetailPrototype> detailPrototypes = new List<DetailPrototype>(); foreach (DetailMap d in detailMaps) { if (!d.isEmpty) { detailPrototypes.Add(d.proto); } } data.SetDetailResolution(TERRAIN_SIZE, 32); data.detailPrototypes = detailPrototypes.ToArray(); int j = 0; foreach (DetailMap d in detailMaps) { if (!d.isEmpty) { data.SetDetailLayer(0, 0, j++, d.map); } } for (int i = 0; i < stencils.Length; i++) { if (stencils[i] != null) { stencils[i].GenerateMesh(); } } } }
using System; using SubSonic.Schema; using SubSonic.DataProviders; using System.Data; namespace Solution.DataAccess.DataModel { /// <summary> /// Table: MeetingRoomApply /// Primary Key: Id /// </summary> public class MeetingRoomApplyStructs: DatabaseTable { public MeetingRoomApplyStructs(IDataProvider provider):base("MeetingRoomApply",provider){ ClassName = "MeetingRoomApply"; SchemaName = "dbo"; Columns.Add(new DatabaseColumn("Id", this) { IsPrimaryKey = true, DataType = DbType.Int64, IsNullable = false, AutoIncrement = true, IsForeignKey = false, MaxLength = 0, PropertyName = "Id" }); Columns.Add(new DatabaseColumn("Code", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "Code" }); Columns.Add(new DatabaseColumn("Name", this) { IsPrimaryKey = false, DataType = DbType.String, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 100, PropertyName = "Name" }); Columns.Add(new DatabaseColumn("MeetingRoom_Code", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "MeetingRoom_Code" }); Columns.Add(new DatabaseColumn("MeetingRoom_Name", this) { IsPrimaryKey = false, DataType = DbType.String, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 100, PropertyName = "MeetingRoom_Name" }); Columns.Add(new DatabaseColumn("ApplyDate", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "ApplyDate" }); Columns.Add(new DatabaseColumn("StartTime", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "StartTime" }); Columns.Add(new DatabaseColumn("EndTime", this) { IsPrimaryKey = false, DataType = DbType.DateTime, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "EndTime" }); Columns.Add(new DatabaseColumn("Employee_EmpId", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "Employee_EmpId" }); Columns.Add(new DatabaseColumn("Employee_Name", this) { IsPrimaryKey = false, DataType = DbType.String, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 100, PropertyName = "Employee_Name" }); Columns.Add(new DatabaseColumn("DepartId", this) { IsPrimaryKey = false, DataType = DbType.AnsiString, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 16, PropertyName = "DepartId" }); Columns.Add(new DatabaseColumn("DepartName", this) { IsPrimaryKey = false, DataType = DbType.String, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 100, PropertyName = "DepartName" }); Columns.Add(new DatabaseColumn("IsVideo", this) { IsPrimaryKey = false, DataType = DbType.Byte, IsNullable = false, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "IsVideo" }); Columns.Add(new DatabaseColumn("Remark", this) { IsPrimaryKey = false, DataType = DbType.String, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 500, PropertyName = "Remark" }); Columns.Add(new DatabaseColumn("IsVaild", this) { IsPrimaryKey = false, DataType = DbType.Byte, IsNullable = true, AutoIncrement = false, IsForeignKey = false, MaxLength = 0, PropertyName = "IsVaild" }); } public IColumn Id{ get{ return this.GetColumn("Id"); } } public IColumn Code{ get{ return this.GetColumn("Code"); } } public IColumn Name{ get{ return this.GetColumn("Name"); } } public IColumn MeetingRoom_Code{ get{ return this.GetColumn("MeetingRoom_Code"); } } public IColumn MeetingRoom_Name{ get{ return this.GetColumn("MeetingRoom_Name"); } } public IColumn ApplyDate{ get{ return this.GetColumn("ApplyDate"); } } public IColumn StartTime{ get{ return this.GetColumn("StartTime"); } } public IColumn EndTime{ get{ return this.GetColumn("EndTime"); } } public IColumn Employee_EmpId{ get{ return this.GetColumn("Employee_EmpId"); } } public IColumn Employee_Name{ get{ return this.GetColumn("Employee_Name"); } } public IColumn DepartId{ get{ return this.GetColumn("DepartId"); } } public IColumn DepartName{ get{ return this.GetColumn("DepartName"); } } public IColumn IsVideo{ get{ return this.GetColumn("IsVideo"); } } public IColumn Remark{ get{ return this.GetColumn("Remark"); } } public IColumn IsVaild{ get{ return this.GetColumn("IsVaild"); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // UnionQueryOperator.cs // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections.Generic; using System.Diagnostics; using System.Threading; namespace System.Linq.Parallel { /// <summary> /// Operator that yields the union of two data sources. /// </summary> /// <typeparam name="TInputOutput"></typeparam> internal sealed class UnionQueryOperator<TInputOutput> : BinaryQueryOperator<TInputOutput, TInputOutput, TInputOutput> { private readonly IEqualityComparer<TInputOutput> _comparer; // An equality comparer. //--------------------------------------------------------------------------------------- // Constructs a new union operator. // internal UnionQueryOperator(ParallelQuery<TInputOutput> left, ParallelQuery<TInputOutput> right, IEqualityComparer<TInputOutput> comparer) : base(left, right) { Debug.Assert(left != null && right != null, "child data sources cannot be null"); _comparer = comparer; _outputOrdered = LeftChild.OutputOrdered || RightChild.OutputOrdered; } //--------------------------------------------------------------------------------------- // Just opens the current operator, including opening the child and wrapping it with // partitions as needed. // internal override QueryResults<TInputOutput> Open( QuerySettings settings, bool preferStriping) { // We just open our child operators, left and then right. Do not propagate the preferStriping value, but // instead explicitly set it to false. Regardless of whether the parent prefers striping or range // partitioning, the output will be hash-partitioned. QueryResults<TInputOutput> leftChildResults = LeftChild.Open(settings, false); QueryResults<TInputOutput> rightChildResults = RightChild.Open(settings, false); return new BinaryQueryOperatorResults(leftChildResults, rightChildResults, this, settings, false); } public override void WrapPartitionedStream<TLeftKey, TRightKey>( PartitionedStream<TInputOutput, TLeftKey> leftStream, PartitionedStream<TInputOutput, TRightKey> rightStream, IPartitionedStreamRecipient<TInputOutput> outputRecipient, bool preferStriping, QuerySettings settings) { Debug.Assert(leftStream.PartitionCount == rightStream.PartitionCount); int partitionCount = leftStream.PartitionCount; // Wrap both child streams with hash repartition if (LeftChild.OutputOrdered) { PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftHashStream = ExchangeUtilities.HashRepartitionOrdered<TInputOutput, NoKeyMemoizationRequired, TLeftKey>( leftStream, null, null, _comparer, settings.CancellationState.MergedCancellationToken); WrapPartitionedStreamFixedLeftType<TLeftKey, TRightKey>( leftHashStream, rightStream, outputRecipient, partitionCount, settings.CancellationState.MergedCancellationToken); } else { PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, int> leftHashStream = ExchangeUtilities.HashRepartition<TInputOutput, NoKeyMemoizationRequired, TLeftKey>( leftStream, null, null, _comparer, settings.CancellationState.MergedCancellationToken); WrapPartitionedStreamFixedLeftType<int, TRightKey>( leftHashStream, rightStream, outputRecipient, partitionCount, settings.CancellationState.MergedCancellationToken); } } //--------------------------------------------------------------------------------------- // A helper method that allows WrapPartitionedStream to fix the TLeftKey type parameter. // private void WrapPartitionedStreamFixedLeftType<TLeftKey, TRightKey>( PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftHashStream, PartitionedStream<TInputOutput, TRightKey> rightStream, IPartitionedStreamRecipient<TInputOutput> outputRecipient, int partitionCount, CancellationToken cancellationToken) { if (RightChild.OutputOrdered) { PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightHashStream = ExchangeUtilities.HashRepartitionOrdered<TInputOutput, NoKeyMemoizationRequired, TRightKey>( rightStream, null, null, _comparer, cancellationToken); WrapPartitionedStreamFixedBothTypes<TLeftKey, TRightKey>( leftHashStream, rightHashStream, outputRecipient, partitionCount, cancellationToken); } else { PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, int> rightHashStream = ExchangeUtilities.HashRepartition<TInputOutput, NoKeyMemoizationRequired, TRightKey>( rightStream, null, null, _comparer, cancellationToken); WrapPartitionedStreamFixedBothTypes<TLeftKey, int>( leftHashStream, rightHashStream, outputRecipient, partitionCount, cancellationToken); } } //--------------------------------------------------------------------------------------- // A helper method that allows WrapPartitionedStreamHelper to fix the TRightKey type parameter. // private void WrapPartitionedStreamFixedBothTypes<TLeftKey, TRightKey>( PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftHashStream, PartitionedStream<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightHashStream, IPartitionedStreamRecipient<TInputOutput> outputRecipient, int partitionCount, CancellationToken cancellationToken) { if (LeftChild.OutputOrdered || RightChild.OutputOrdered) { IComparer<ConcatKey<TLeftKey, TRightKey>> compoundKeyComparer = ConcatKey<TLeftKey, TRightKey>.MakeComparer(leftHashStream.KeyComparer, rightHashStream.KeyComparer); PartitionedStream<TInputOutput, ConcatKey<TLeftKey, TRightKey>> outputStream = new PartitionedStream<TInputOutput, ConcatKey<TLeftKey, TRightKey>>(partitionCount, compoundKeyComparer, OrdinalIndexState.Shuffled); for (int i = 0; i < partitionCount; i++) { outputStream[i] = new OrderedUnionQueryOperatorEnumerator<TLeftKey, TRightKey>( leftHashStream[i], rightHashStream[i], LeftChild.OutputOrdered, RightChild.OutputOrdered, _comparer, compoundKeyComparer, cancellationToken); } outputRecipient.Receive(outputStream); } else { PartitionedStream<TInputOutput, int> outputStream = new PartitionedStream<TInputOutput, int>(partitionCount, Util.GetDefaultComparer<int>(), OrdinalIndexState.Shuffled); for (int i = 0; i < partitionCount; i++) { outputStream[i] = new UnionQueryOperatorEnumerator<TLeftKey, TRightKey>( leftHashStream[i], rightHashStream[i], _comparer, cancellationToken); } outputRecipient.Receive(outputStream); } } //--------------------------------------------------------------------------------------- // Returns an enumerable that represents the query executing sequentially. // internal override IEnumerable<TInputOutput> AsSequentialQuery(CancellationToken token) { IEnumerable<TInputOutput> wrappedLeftChild = CancellableEnumerable.Wrap(LeftChild.AsSequentialQuery(token), token); IEnumerable<TInputOutput> wrappedRightChild = CancellableEnumerable.Wrap(RightChild.AsSequentialQuery(token), token); return wrappedLeftChild.Union(wrappedRightChild, _comparer); } //--------------------------------------------------------------------------------------- // Whether this operator performs a premature merge that would not be performed in // a similar sequential operation (i.e., in LINQ to Objects). // internal override bool LimitsParallelism { get { return false; } } //--------------------------------------------------------------------------------------- // This enumerator performs the union operation incrementally. It does this by maintaining // a history -- in the form of a set -- of all data already seen. It is careful not to // return any duplicates. // private class UnionQueryOperatorEnumerator<TLeftKey, TRightKey> : QueryOperatorEnumerator<TInputOutput, int> { private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source. private QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> _rightSource; // Right data source. private Set<TInputOutput> _hashLookup; // The hash lookup, used to produce the union. private readonly CancellationToken _cancellationToken; private Shared<int> _outputLoopCount; private readonly IEqualityComparer<TInputOutput> _comparer; //--------------------------------------------------------------------------------------- // Instantiates a new union operator. // internal UnionQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource, QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightSource, IEqualityComparer<TInputOutput> comparer, CancellationToken cancellationToken) { Debug.Assert(leftSource != null); Debug.Assert(rightSource != null); _leftSource = leftSource; _rightSource = rightSource; _comparer = comparer; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // Walks the two data sources, left and then right, to produce the union. // internal override bool MoveNext(ref TInputOutput currentElement, ref int currentKey) { if (_hashLookup == null) { _hashLookup = new Set<TInputOutput>(_comparer); _outputLoopCount = new Shared<int>(0); } Debug.Assert(_hashLookup != null); // Enumerate the left and then right data source. When each is done, we set the // field to null so we will skip it upon subsequent calls to MoveNext. if (_leftSource != null) { // Iterate over this set's elements until we find a unique element. TLeftKey keyUnused = default(TLeftKey); Pair<TInputOutput, NoKeyMemoizationRequired> currentLeftElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>); int i = 0; while (_leftSource.MoveNext(ref currentLeftElement, ref keyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // We ensure we never return duplicates by tracking them in our set. if (_hashLookup.Add(currentLeftElement.First)) { #if DEBUG currentKey = unchecked((int)0xdeadbeef); #endif currentElement = currentLeftElement.First; return true; } } _leftSource.Dispose(); _leftSource = null; } if (_rightSource != null) { // Iterate over this set's elements until we find a unique element. TRightKey keyUnused = default(TRightKey); Pair<TInputOutput, NoKeyMemoizationRequired> currentRightElement = default(Pair<TInputOutput, NoKeyMemoizationRequired>); while (_rightSource.MoveNext(ref currentRightElement, ref keyUnused)) { if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // We ensure we never return duplicates by tracking them in our set. if (_hashLookup.Add(currentRightElement.First)) { #if DEBUG currentKey = unchecked((int)0xdeadbeef); #endif currentElement = currentRightElement.First; return true; } } _rightSource.Dispose(); _rightSource = null; } return false; } protected override void Dispose(bool disposing) { if (_leftSource != null) { _leftSource.Dispose(); } if (_rightSource != null) { _rightSource.Dispose(); } } } private class OrderedUnionQueryOperatorEnumerator<TLeftKey, TRightKey> : QueryOperatorEnumerator<TInputOutput, ConcatKey<TLeftKey, TRightKey>> { private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> _leftSource; // Left data source. private readonly QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> _rightSource; // Right data source. private readonly IComparer<ConcatKey<TLeftKey, TRightKey>> _keyComparer; // Comparer for compound order keys. private IEnumerator<KeyValuePair<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>>> _outputEnumerator; // Enumerator over the output of the union. private readonly bool _leftOrdered; // Whether the left data source is ordered. private readonly bool _rightOrdered; // Whether the right data source is ordered. private readonly IEqualityComparer<TInputOutput> _comparer; // Comparer for the elements. private readonly CancellationToken _cancellationToken; //--------------------------------------------------------------------------------------- // Instantiates a new union operator. // internal OrderedUnionQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TLeftKey> leftSource, QueryOperatorEnumerator<Pair<TInputOutput, NoKeyMemoizationRequired>, TRightKey> rightSource, bool leftOrdered, bool rightOrdered, IEqualityComparer<TInputOutput> comparer, IComparer<ConcatKey<TLeftKey, TRightKey>> keyComparer, CancellationToken cancellationToken) { Debug.Assert(leftSource != null); Debug.Assert(rightSource != null); _leftSource = leftSource; _rightSource = rightSource; _keyComparer = keyComparer; _leftOrdered = leftOrdered; _rightOrdered = rightOrdered; _comparer = comparer; if (_comparer == null) { _comparer = EqualityComparer<TInputOutput>.Default; } _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // Walks the two data sources, left and then right, to produce the union. // internal override bool MoveNext(ref TInputOutput currentElement, ref ConcatKey<TLeftKey, TRightKey> currentKey) { Debug.Assert(_leftSource != null); Debug.Assert(_rightSource != null); if (_outputEnumerator == null) { IEqualityComparer<Wrapper<TInputOutput>> wrapperComparer = new WrapperEqualityComparer<TInputOutput>(_comparer); Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>> union = new Dictionary<Wrapper<TInputOutput>, Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>>(wrapperComparer); Pair<TInputOutput, NoKeyMemoizationRequired> elem = default(Pair<TInputOutput, NoKeyMemoizationRequired>); TLeftKey leftKey = default(TLeftKey); int i = 0; while (_leftSource.MoveNext(ref elem, ref leftKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); ConcatKey<TLeftKey, TRightKey> key = ConcatKey<TLeftKey, TRightKey>.MakeLeft(_leftOrdered ? leftKey : default(TLeftKey)); Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>> oldEntry; Wrapper<TInputOutput> wrappedElem = new Wrapper<TInputOutput>(elem.First); if (!union.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(key, oldEntry.Second) < 0) { union[wrappedElem] = new Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>(elem.First, key); } } TRightKey rightKey = default(TRightKey); while (_rightSource.MoveNext(ref elem, ref rightKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); ConcatKey<TLeftKey, TRightKey> key = ConcatKey<TLeftKey, TRightKey>.MakeRight(_rightOrdered ? rightKey : default(TRightKey)); Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>> oldEntry; Wrapper<TInputOutput> wrappedElem = new Wrapper<TInputOutput>(elem.First); if (!union.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(key, oldEntry.Second) < 0) { union[wrappedElem] = new Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>>(elem.First, key); } } _outputEnumerator = union.GetEnumerator(); } if (_outputEnumerator.MoveNext()) { Pair<TInputOutput, ConcatKey<TLeftKey, TRightKey>> current = _outputEnumerator.Current.Value; currentElement = current.First; currentKey = current.Second; return true; } return false; } protected override void Dispose(bool disposing) { Debug.Assert(_leftSource != null && _rightSource != null); _leftSource.Dispose(); _rightSource.Dispose(); } } } }
//------------------------------------------------------------------------------ // <copyright file="SegmentedBag.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <disclaimer> // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY // KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR // PURPOSE. // </disclaimer> //------------------------------------------------------------------------------ using System; using System.Collections.Generic; using System.Collections.Concurrent; using System.Linq; using System.Text; using System.Threading; using System.Diagnostics; using Microsoft.Research.Joins.Paper; #if true namespace Microsoft.Research.Joins.Paper.Bag { enum Stat : int { NULLED = 0, PENDING = 1, CLAIMED = 2, CONSUMED = 3, WOKEN = 4 }; internal abstract class Segment { internal const int SEGSIZEEXP = 5; internal const int CACHELINEEXP = 3; internal const int SEGSIZE = 1 << SEGSIZEEXP; internal const int ALLOCSIZE = SEGSIZE; internal const int CACHELINE = 1 << CACHELINEEXP; // words internal readonly object mChan; internal abstract Segment GetNext(); /// <summary> /// Lower bound on index of first unconsumed item (if any). /// </summary> internal int mLow; /// <summary> /// Upper bound on index of last item. /// </summary> internal int mHigh; internal int High { get { return Math.Min(mHigh, SEGSIZE - 1); } } internal bool IsFull() { return mHigh >= SEGSIZE - 1; } protected int IdxOf(int i) { //Swap high and low parts of address to decrease cache locality... return ((i & (CACHELINE - 1)) << (SEGSIZEEXP - CACHELINEEXP)) | ((i & (~(CACHELINE - 1))) >> CACHELINEEXP); } abstract internal Signal GetWakeUpSignal(int i); abstract internal Stat GetStatus(int i); abstract internal void SetStatus(int i, Stat s); internal abstract R GetResult<R>(int i); internal abstract void SetResult<R>(int i, R result); internal abstract void SetException(int i, Exception exception); internal abstract Msg GetWaker(int i); internal abstract void SetWaker(int i, Msg waker); /// <summary> /// Attempt to move status from PENDING to newStatus. /// </summary> /// <returns>The previous status; move has succeeded only when the previous status was PENDING.</returns> abstract internal Stat TryMoveFromPending(int i, Stat newStatus); protected Segment(object chan) { mChan = chan; } } interface INodeFields<A,R> { void GetSignalForWakeUp(); A payload { get; set; } // remaining properties are for synchronous channels only: Signal wakeUp { get; } R result { get; set;} Exception exception { get; set; } Msg asyncWaker { get; set; } } internal class Segment<A, R, NodeFields > : Segment where NodeFields : struct, INodeFields<A,R> { private struct Node { public volatile int status; public NodeFields fields; } internal Segment<A,R,NodeFields> mNext; private Node[] mNodes; internal override Signal GetWakeUpSignal(int i) { return mNodes[IdxOf(i)].fields.wakeUp; } internal A GetPayload(int i) { return mNodes[IdxOf(i)].fields.payload; } #warning "this is a GVM" internal override R1 GetResult<R1>(int i) { var This = (Segment<A,R1,SyncNodeFields<A,R1>>) (object) this; var node = (This.mNodes)[IdxOf(i)]; if (node.fields.exception == null) return node.fields.result; else throw node.fields.exception; } #warning "this is a GVM" internal override void SetResult<R1>(int i, R1 result) { var This = (Segment<A, R1,SyncNodeFields<A,R1>>)(object)this; (This.mNodes)[IdxOf(i)].fields.result = result; } internal override void SetException(int i, Exception exception) { (this.mNodes)[IdxOf(i)].fields.exception = exception; } internal override Msg GetWaker(int i) { return mNodes[IdxOf(i)].fields.asyncWaker; } internal override void SetWaker(int i, Msg waker) { mNodes[IdxOf(i)].fields.asyncWaker = waker; } internal override Stat GetStatus(int i) { return (Stat)mNodes[IdxOf(i)].status; } internal override void SetStatus(int i, Stat s) { mNodes[IdxOf(i)].status = (int)s; } internal override Stat TryMoveFromPending(int i, Stat newStat) { // before issuing CAS, check the status ---- ACTUALLY HURTS PERFORMANCE #warning "TODO: investigate - does this optimization actually hurt performance or not?" var s = (Stat)mNodes[IdxOf(i)].status; if (s != Stat.PENDING) return s; return (Stat)Interlocked.CompareExchange(ref mNodes[IdxOf(i)].status, (int)newStat, (int)Stat.PENDING); } internal Segment(object chan) : base(chan) { mNodes = new Node[ALLOCSIZE]; mLow = 0; mHigh = -1; } /// <summary> /// Creates and links in a new tail segment. /// </summary> /// <returns>The new tail segment.</returns> internal Segment<A,R,NodeFields> Grow() { var seg = new Segment<A,R, NodeFields>(mChan); mNext = seg; return seg; } /// <summary> /// Attempt to add a new item to the segment; will start in PENDING status. /// </summary> /// <param name="payload">The item to add.</param> /// <param name="tail">The tail pointer for the Bag (updated if growing).</param> /// <param name="initialStatus">The initial status of the node.</param> /// <returns>A MessageRef, or default(MessageRef) if unsuccessful.</returns> internal Msg TryAdd(A payload, ref Segment<A,R, NodeFields> tail, Stat initialStatus) { if (mHigh >= SEGSIZE - 1) { return default(Msg); // another thread is growing the Bag; spin } var index = Interlocked.Increment(ref this.mHigh); // NOTE: mHigh can overrun SEGSIZE if (index < SEGSIZE) { int ixOf = IdxOf(index); mNodes[ixOf].fields.payload = payload; #warning "Why do this for async channels?" // mNodes[ixOf].wakeUp = Signal.GetTLSignal(); mNodes[ixOf].fields.GetSignalForWakeUp(); // status is volatile, so ordering is guaranteed mNodes[ixOf].status = (int)initialStatus; } // time to grow? if (index == SEGSIZE - 1) tail = Grow(); if (index < SEGSIZE) return new Msg(this, index); else return default(Msg); // another thread is growing the Bag, must retry } internal override Segment GetNext() { var wait = new SpinWait(); while (mNext == null) wait.SpinOnce(); return mNext; } } internal struct Msg { internal Segment mSeg; internal int mIdx; public static readonly Msg Null; public object Chan() { return /* (mSeg == null) ? null : */ mSeg.mChan; } public bool Is(Msg m) { return mSeg == m.mSeg && mIdx == m.mIdx; } public override string ToString () { if (mSeg != null) return mIdx.ToString(); return "-"; } internal Msg(Segment seg) { mSeg = seg; mIdx = seg.mLow; Stabilize(); } internal Msg(Segment seg, int idx) { mSeg = seg; mIdx = idx; Stabilize(); } private void Stabilize() { // while Grow in progress while (mIdx >= Segment.SEGSIZE && mSeg.mHigh >= Segment.SEGSIZE - 1) { mSeg = mSeg.GetNext(); mIdx = mSeg.mLow; } if (mIdx > mSeg.High) mSeg = null; } internal bool MoveNext() { mIdx++; Stabilize(); return !IsNull; } internal Msg Next(out bool success) { Msg ret = this; success = ret.MoveNext(); return ret; } internal R GetResult<R>() { return mSeg.GetResult<R>(mIdx); } internal void SetResult<R>(R result) { mSeg.SetResult(mIdx, result); } internal void SetException(Exception exception) { mSeg.SetException(mIdx, exception); } internal void SetAsyncWaker(Msg waker) { mSeg.SetWaker(mIdx, waker); mSeg.SetStatus(mIdx, Stat.WOKEN); } internal bool HasAsyncWaker { get { return !(mSeg.GetWaker(mIdx).IsNull); } } internal Msg AsyncWaker { get { return mSeg.GetWaker(mIdx); } set { mSeg.SetWaker(mIdx, value); } } internal Signal Signal { get { return mSeg.GetWakeUpSignal(mIdx); } } internal Stat Status { get { Stat s = mSeg.GetStatus(mIdx); if (s == Stat.CONSUMED && mSeg.mLow == mIdx) mSeg.mLow = mIdx + 1; // catch up low water mark when possible. return s; } set { if (IsNull) return; mSeg.SetStatus(mIdx, value); } } internal bool IsConsumed { get { if (IsNull) return false; if (mSeg.GetStatus(mIdx) == Stat.CONSUMED) { if (mSeg.mLow == mIdx) mSeg.mLow = mIdx + 1; // catch up low water mark when possible. return true; } return false; } } internal bool IsWoken { get { if (IsNull) return false; return mSeg.GetStatus(mIdx) == Stat.WOKEN; } } internal bool IsNull { get { return mSeg == null; } } /// <summary> /// Move the status from CLAIMED to PENDING. /// </summary> internal void Rollback() { if (IsNull) return; mSeg.SetStatus(mIdx, Stat.PENDING); //mSeg = null; } /// <summary> /// Move the status from CLAIMED to CONSUMED. /// </summary> internal void Consume() { if (IsNull) return; if (mSeg.mLow == mIdx) mSeg.mLow = mIdx + 1; mSeg.SetStatus(mIdx, Stat.CONSUMED); //mSeg = null; } /// <summary> /// Attempt to move status from PENDING to newStatus. /// </summary> /// <returns>The previous status; move has succeeded only when the previous status was PENDING.</returns> internal Stat TryMoveFromPending(Stat newStatus) { var s = mSeg.TryMoveFromPending(mIdx, newStatus); if (s == Stat.CONSUMED && mSeg.mLow == mIdx) mSeg.mLow = mIdx + 1; // catch up low water mark when possible. return s; } internal bool TryClaim() { if (IsNull) return true; //while (!IsNull) { Stat s = TryMoveFromPending(Stat.CLAIMED); if (s == Stat.PENDING) return true; // else if (s == Status.CLAIMED) // retry = true; // MoveNext(); // } return false; } #warning "this method is dead" internal bool TryClaimAnyPending(ref bool retry) { if (IsNull) return true; while (!IsNull) { Stat s = TryMoveFromPending(Stat.CLAIMED); if (s == Stat.PENDING) return true; else if (s == Stat.CLAIMED) retry = true; MoveNext(); } return false; } #warning "this method is dead" internal void Clear() { mSeg = null; mIdx = -1; } } internal abstract class Bag { internal abstract Msg Head(); } internal struct AsyncNodeFields<A> : INodeFields<A,Unit> { private A _payload; public A payload { get { return _payload; } set { _payload = value; ; } } public Signal wakeUp { get { return null; } set { ; } } public Unit result { get { return Unit.Null; } set { ; } } public Exception exception { get { return null; } set { ; } } public Msg asyncWaker { get { return Msg.Null; } set { ; } } public void GetSignalForWakeUp() { // do nothing } } internal struct SyncNodeFields<A,R> : INodeFields<A,R> { private A _payload; public A payload { get { return _payload; } set { _payload = value; } } private Signal _wakeUp; public Signal wakeUp { get { return _wakeUp ; } set { _wakeUp = value; } } private R _result; public R result { get { return _result; } set { _result = value ; } } private Exception _exception; public Exception exception { get { return _exception; } set { _exception = value; } } private Msg _asyncWaker; public Msg asyncWaker { get { return _asyncWaker; } set { _asyncWaker = value; } } public void GetSignalForWakeUp() { _wakeUp = Signal.GetTLSignal(); } } internal class Bag<A,R,NodeFields> : Bag where NodeFields :struct, INodeFields<A,R> { private Segment<A,R,NodeFields> mHead; private Segment<A,R,NodeFields> mTail; internal Bag(object chan) { mHead = mTail = new Segment<A,R,NodeFields>(chan); } internal A GetPayload(Msg msg) { var seg = (Segment<A, R, NodeFields>) msg.mSeg; return seg.GetPayload(msg.mIdx); } /// <summary> /// Add a message to the queue; will typically start in PENDING status. /// </summary> /// <param name="a">The payload</param> /// <param name="initialStatus">The intial status of the message (typically PENDING) </param> /// <returns>A reference to the enqueued message</returns> internal Msg Add(A a, Stat initialStatus) { var backoff = new Backoff(); while (true) { var ret = mTail.TryAdd(a, ref mTail, initialStatus); if (!ret.IsNull) return ret; backoff.Once(); } } protected void CatchUpHead() { for (var head = mHead; head.mLow >= Segment.SEGSIZE && head.mNext != null; ) head = (mHead = head.mNext); } internal override Msg Head() { CatchUpHead(); return new Msg(mHead); } internal Msg FindPendingMsg(out bool sawClaimed) { sawClaimed = false; Msg m = Head(); while (!m.IsNull) { switch (m.Status) { case Stat.PENDING: { return m; } case Stat.CLAIMED: sawClaimed = true; break; default: break; } m.MoveNext(); } return Msg.Null; } } } #endif
/* ==================================================================== */ using System; using System.Collections.Generic; using System.Text; using System.IO; using System.Drawing; namespace Oranikle.Report.Engine { public enum EMFBrushType { SolidColor = 0x00000000, HatchFill = 0x00000001, TextureFill = 0x00000002, PathGradient = 0x00000003, LinearGradient = 0x00000004 } public class EMFBrush : EMFRecordObject { //public EMFBrushType BrushType; public Brush myBrush; public EMFBrush() { ObjectType = EmfObjectType.brush; } public static EMFBrush getEMFBrush(byte[] RecordData) { return ProcessBrush(RecordData); } public static EMFBrush ProcessBrush(byte[] RecordData) { //put the Data into a stream and use a binary reader to read the data MemoryStream _ms = null; BinaryReader _br = null; try { _ms = new MemoryStream(RecordData); _br = new BinaryReader(_ms); UInt32 Version = _br.ReadUInt32(); UInt32 BrushType = _br.ReadUInt32(); switch (BrushType) { case (UInt32)EMFBrushType.SolidColor: return new EmfSolidBrush(_br); case (UInt32)EMFBrushType.HatchFill: return new EmfHatchFillBrush(_br); case (UInt32)EMFBrushType.TextureFill: throw new NotSupportedException("TextureFill brush Not Supported Yet!"); case (UInt32)EMFBrushType.PathGradient: throw new NotSupportedException("PathGradient brush Not Supported Yet!"); case (UInt32)EMFBrushType.LinearGradient: return new EmfLinearGradientBrush(_br); } return null; } finally { if (_br != null) _br.Close(); if (_ms != null) _ms.Dispose(); } } } public class EmfLinearGradientBrush : EMFBrush { public EmfLinearGradientBrush(BinaryReader _br) { UInt32 BrushFlags = _br.ReadUInt32(); Int32 WrapMode = _br.ReadInt32(); Single X = _br.ReadSingle(); Single Y = _br.ReadSingle(); Single Width = _br.ReadSingle(); Single Height = _br.ReadSingle(); RectangleF rf = new RectangleF(X,Y,Width,Height); byte sA, sR, sG, sB; sB = _br.ReadByte(); sG = _br.ReadByte(); sR = _br.ReadByte(); sA = _br.ReadByte(); byte eA, eR, eG, eB; eB = _br.ReadByte(); eG = _br.ReadByte(); eR = _br.ReadByte(); eA = _br.ReadByte(); _br.ReadBytes(8); System.Drawing.Drawing2D.LinearGradientBrush tmpB = new System.Drawing.Drawing2D.LinearGradientBrush(rf, Color.FromArgb(sA, sR, sG, sB), Color.FromArgb(eA, eR, eG, eB), 0f); tmpB.WrapMode = (System.Drawing.Drawing2D.WrapMode)WrapMode; bool BrushDataTransform = ((BrushFlags & 0x00000002) == 0x00000002); bool BrushDataPresetColors = ((BrushFlags & 0x00000002) == 0x00000004); bool BrushDataBlendFactorsH = ((BrushFlags & 0x00000002) == 0x00000008); bool BrushDataBlendFactorsV = ((BrushFlags & 0x00000002) == 0x00000010); bool BrushDataIsGammaCorrected = ((BrushFlags & 0x00000002) == 0x00000080); tmpB.GammaCorrection = BrushDataIsGammaCorrected; if (BrushDataTransform) { _br.ReadBytes(24); //Transform matrix (ignored for now) } if (BrushDataPresetColors || BrushDataBlendFactorsH || BrushDataBlendFactorsV) { //there must be a blend pattern if (BrushDataPresetColors) { //blend colors object } else { if (!BrushDataBlendFactorsV) { //BlendFactors object on vertical (???) } else { if (!BrushDataBlendFactorsH) { //Blendfactors on horizontal (???) } else { //BlendFactors on vertical and horizontal } } } } myBrush = tmpB; } } public class EmfHatchFillBrush: EMFBrush { public EmfHatchFillBrush(BinaryReader _br) { Int32 HatchStyle = _br.ReadInt32(); byte fA,fR,fG,fB; fB = _br.ReadByte(); fG = _br.ReadByte(); fR = _br.ReadByte(); fA = _br.ReadByte(); byte bA,bR,bG,bB; bB = _br.ReadByte(); bG = _br.ReadByte(); bR = _br.ReadByte(); bA = _br.ReadByte(); myBrush = new System.Drawing.Drawing2D.HatchBrush((System.Drawing.Drawing2D.HatchStyle) HatchStyle,Color.FromArgb(fA,fR,fG,fB),Color.FromArgb(bA,bR,bG,bB)); } } public class EmfSolidBrush : EMFBrush { public EmfSolidBrush(byte A, byte R, byte G, byte B) { myBrush = new SolidBrush(Color.FromArgb(A, R, G, B)); } public EmfSolidBrush(BinaryReader _br) { byte A,R,G,B; B = _br.ReadByte(); G = _br.ReadByte(); R = _br.ReadByte(); A = _br.ReadByte(); myBrush = new SolidBrush(Color.FromArgb(A,R,G,B)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Reflection; using System.Diagnostics; using CultureInfo = System.Globalization.CultureInfo; namespace System { #if CORERT public sealed #else internal #endif partial class DefaultBinder : Binder { // This method is passed a set of methods and must choose the best // fit. The methods all have the same number of arguments and the object // array args. On exit, this method will choice the best fit method // and coerce the args to match that method. By match, we mean all primitive // arguments are exact matchs and all object arguments are exact or subclasses // of the target. If the target OR is an interface, the object must implement // that interface. There are a couple of exceptions // thrown when a method cannot be returned. If no method matchs the args and // ArgumentException is thrown. If multiple methods match the args then // an AmbiguousMatchException is thrown. // // The most specific match will be selected. // public sealed override MethodBase BindToMethod( BindingFlags bindingAttr, MethodBase[] match, ref object?[] args, ParameterModifier[]? modifiers, CultureInfo? cultureInfo, string[]? names, out object? state) { if (match == null || match.Length == 0) throw new ArgumentException(SR.Arg_EmptyArray, nameof(match)); MethodBase?[] candidates = (MethodBase[])match.Clone(); int i; int j; state = null; #region Map named parameters to candidate parameter positions // We are creating an paramOrder array to act as a mapping // between the order of the args and the actual order of the // parameters in the method. This order may differ because // named parameters (names) may change the order. If names // is not provided, then we assume the default mapping (0,1,...) int[][] paramOrder = new int[candidates.Length][]; for (i = 0; i < candidates.Length; i++) { ParameterInfo[] par = candidates[i]!.GetParametersNoCopy(); // args.Length + 1 takes into account the possibility of a last paramArray that can be omitted paramOrder[i] = new int[(par.Length > args.Length) ? par.Length : args.Length]; if (names == null) { // Default mapping for (j = 0; j < args.Length; j++) paramOrder[i][j] = j; } else { // Named parameters, reorder the mapping. If CreateParamOrder fails, it means that the method // doesn't have a name that matchs one of the named parameters so we don't consider it any further. if (!CreateParamOrder(paramOrder[i], par, names)) candidates[i] = null; } } #endregion Type[] paramArrayTypes = new Type[candidates.Length]; Type[] argTypes = new Type[args.Length]; #region Cache the type of the provided arguments // object that contain a null are treated as if they were typeless (but match either object // references or value classes). We mark this condition by placing a null in the argTypes array. for (i = 0; i < args.Length; i++) { if (args[i] != null) { argTypes[i] = args[i]!.GetType(); } } #endregion // Find the method that matches... int CurIdx = 0; bool defaultValueBinding = ((bindingAttr & BindingFlags.OptionalParamBinding) != 0); Type? paramArrayType; #region Filter methods by parameter count and type for (i = 0; i < candidates.Length; i++) { paramArrayType = null; // If we have named parameters then we may have a hole in the candidates array. if (candidates[i] == null) continue; // Validate the parameters. ParameterInfo[] par = candidates[i]!.GetParametersNoCopy(); // TODO-NULLABLE: Indexer nullability tracked (https://github.com/dotnet/roslyn/issues/34644) #region Match method by parameter count if (par.Length == 0) { #region No formal parameters if (args.Length != 0) { if ((candidates[i]!.CallingConvention & CallingConventions.VarArgs) == 0) // TODO-NULLABLE: Indexer nullability tracked (https://github.com/dotnet/roslyn/issues/34644) continue; } // This is a valid routine so we move it up the candidates list. paramOrder[CurIdx] = paramOrder[i]; candidates[CurIdx++] = candidates[i]; continue; #endregion } else if (par.Length > args.Length) { #region Shortage of provided parameters // If the number of parameters is greater than the number of args then // we are in the situation were we may be using default values. for (j = args.Length; j < par.Length - 1; j++) { if (par[j].DefaultValue == System.DBNull.Value) break; } if (j != par.Length - 1) continue; if (par[j].DefaultValue == System.DBNull.Value) { if (!par[j].ParameterType.IsArray) continue; if (!par[j].IsDefined(typeof(ParamArrayAttribute), true)) continue; paramArrayType = par[j].ParameterType.GetElementType(); } #endregion } else if (par.Length < args.Length) { #region Excess provided parameters // test for the ParamArray case int lastArgPos = par.Length - 1; if (!par[lastArgPos].ParameterType.IsArray) continue; if (!par[lastArgPos].IsDefined(typeof(ParamArrayAttribute), true)) continue; if (paramOrder[i][lastArgPos] != lastArgPos) continue; paramArrayType = par[lastArgPos].ParameterType.GetElementType(); #endregion } else { #region Test for paramArray, save paramArray type int lastArgPos = par.Length - 1; if (par[lastArgPos].ParameterType.IsArray && par[lastArgPos].IsDefined(typeof(ParamArrayAttribute), true) && paramOrder[i][lastArgPos] == lastArgPos) { if (!par[lastArgPos].ParameterType.IsAssignableFrom(argTypes[lastArgPos])) paramArrayType = par[lastArgPos].ParameterType.GetElementType(); } #endregion } #endregion Type pCls; int argsToCheck = (paramArrayType != null) ? par.Length - 1 : args.Length; #region Match method by parameter type for (j = 0; j < argsToCheck; j++) { #region Classic argument coersion checks // get the formal type pCls = par[j].ParameterType; if (pCls.IsByRef) pCls = pCls.GetElementType()!; // the type is the same if (pCls == argTypes[paramOrder[i][j]]) continue; // a default value is available if (defaultValueBinding && args[paramOrder[i][j]] == Type.Missing) continue; // the argument was null, so it matches with everything if (args[paramOrder[i][j]] == null) continue; // the type is Object, so it will match everything if (pCls == typeof(object)) continue; // now do a "classic" type check if (pCls.IsPrimitive) { if (argTypes[paramOrder[i][j]] == null || !CanChangePrimitive(args[paramOrder[i][j]]?.GetType(), pCls)) { break; } } else { if (argTypes[paramOrder[i][j]] == null) continue; if (!pCls.IsAssignableFrom(argTypes[paramOrder[i][j]])) { if (argTypes[paramOrder[i][j]].IsCOMObject) { if (pCls.IsInstanceOfType(args[paramOrder[i][j]])) continue; } break; } } #endregion } if (paramArrayType != null && j == par.Length - 1) { #region Check that excess arguments can be placed in the param array for (; j < args.Length; j++) { if (paramArrayType.IsPrimitive) { if (argTypes[j] == null || !CanChangePrimitive(args[j]?.GetType(), paramArrayType)) break; } else { if (argTypes[j] == null) continue; if (!paramArrayType.IsAssignableFrom(argTypes[j])) { if (argTypes[j].IsCOMObject) { if (paramArrayType.IsInstanceOfType(args[j])) continue; } break; } } } #endregion } #endregion if (j == args.Length) { #region This is a valid routine so we move it up the candidates list paramOrder[CurIdx] = paramOrder[i]; paramArrayTypes[CurIdx] = paramArrayType!; candidates[CurIdx++] = candidates[i]; #endregion } } #endregion // If we didn't find a method if (CurIdx == 0) throw new MissingMethodException(SR.MissingMember); if (CurIdx == 1) { #region Found only one method if (names != null) { state = new BinderState((int[])paramOrder[0].Clone(), args.Length, paramArrayTypes[0] != null); ReorderParams(paramOrder[0], args); } // If the parameters and the args are not the same length or there is a paramArray // then we need to create a argument array. ParameterInfo[] parms = candidates[0]!.GetParametersNoCopy(); if (parms.Length == args.Length) { if (paramArrayTypes[0] != null) { object[] objs = new object[parms.Length]; int lastPos = parms.Length - 1; Array.Copy(args, 0, objs, 0, lastPos); objs[lastPos] = Array.CreateInstance(paramArrayTypes[0], 1); ((Array)objs[lastPos]).SetValue(args[lastPos], 0); args = objs; } } else if (parms.Length > args.Length) { object?[] objs = new object[parms.Length]; for (i = 0; i < args.Length; i++) objs[i] = args[i]; for (; i < parms.Length - 1; i++) objs[i] = parms[i].DefaultValue; if (paramArrayTypes[0] != null) objs[i] = Array.CreateInstance(paramArrayTypes[0], 0); // create an empty array for the else objs[i] = parms[i].DefaultValue; args = objs; } else { if ((candidates[0]!.CallingConvention & CallingConventions.VarArgs) == 0) { object[] objs = new object[parms.Length]; int paramArrayPos = parms.Length - 1; Array.Copy(args, 0, objs, 0, paramArrayPos); objs[paramArrayPos] = Array.CreateInstance(paramArrayTypes[0], args.Length - paramArrayPos); Array.Copy(args, paramArrayPos, (System.Array)objs[paramArrayPos], 0, args.Length - paramArrayPos); args = objs; } } #endregion return candidates[0]!; } int currentMin = 0; bool ambig = false; for (i = 1; i < CurIdx; i++) { #region Walk all of the methods looking the most specific method to invoke int newMin = FindMostSpecificMethod(candidates[currentMin]!, paramOrder[currentMin], paramArrayTypes[currentMin], candidates[i]!, paramOrder[i], paramArrayTypes[i], argTypes, args); if (newMin == 0) { ambig = true; } else if (newMin == 2) { currentMin = i; ambig = false; } #endregion } if (ambig) throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); // Reorder (if needed) if (names != null) { state = new BinderState((int[])paramOrder[currentMin].Clone(), args.Length, paramArrayTypes[currentMin] != null); ReorderParams(paramOrder[currentMin], args); } // If the parameters and the args are not the same length or there is a paramArray // then we need to create a argument array. ParameterInfo[] parameters = candidates[currentMin]!.GetParametersNoCopy(); if (parameters.Length == args.Length) { if (paramArrayTypes[currentMin] != null) { object[] objs = new object[parameters.Length]; int lastPos = parameters.Length - 1; Array.Copy(args, 0, objs, 0, lastPos); objs[lastPos] = Array.CreateInstance(paramArrayTypes[currentMin], 1); ((Array)objs[lastPos]).SetValue(args[lastPos], 0); args = objs; } } else if (parameters.Length > args.Length) { object?[] objs = new object[parameters.Length]; for (i = 0; i < args.Length; i++) objs[i] = args[i]; for (; i < parameters.Length - 1; i++) objs[i] = parameters[i].DefaultValue; if (paramArrayTypes[currentMin] != null) { objs[i] = Array.CreateInstance(paramArrayTypes[currentMin], 0); } else { objs[i] = parameters[i].DefaultValue; } args = objs; } else { if ((candidates[currentMin]!.CallingConvention & CallingConventions.VarArgs) == 0) { object[] objs = new object[parameters.Length]; int paramArrayPos = parameters.Length - 1; Array.Copy(args, 0, objs, 0, paramArrayPos); objs[paramArrayPos] = Array.CreateInstance(paramArrayTypes[currentMin], args.Length - paramArrayPos); Array.Copy(args, paramArrayPos, (System.Array)objs[paramArrayPos], 0, args.Length - paramArrayPos); args = objs; } } return candidates[currentMin]!; } // Given a set of fields that match the base criteria, select a field. // if value is null then we have no way to select a field public sealed override FieldInfo BindToField(BindingFlags bindingAttr, FieldInfo[] match, object value, CultureInfo? cultureInfo) { if (match == null) { throw new ArgumentNullException(nameof(match)); } int i; // Find the method that match... int CurIdx = 0; Type valueType; FieldInfo[] candidates = (FieldInfo[])match.Clone(); // If we are a FieldSet, then use the value's type to disambiguate if ((bindingAttr & BindingFlags.SetField) != 0) { valueType = value.GetType(); for (i = 0; i < candidates.Length; i++) { Type pCls = candidates[i].FieldType; if (pCls == valueType) { candidates[CurIdx++] = candidates[i]; continue; } if (value == Empty.Value) { // the object passed in was null which would match any non primitive non value type if (pCls.IsClass) { candidates[CurIdx++] = candidates[i]; continue; } } if (pCls == typeof(object)) { candidates[CurIdx++] = candidates[i]; continue; } if (pCls.IsPrimitive) { if (CanChangePrimitive(valueType, pCls)) { candidates[CurIdx++] = candidates[i]; continue; } } else { if (pCls.IsAssignableFrom(valueType)) { candidates[CurIdx++] = candidates[i]; continue; } } } if (CurIdx == 0) throw new MissingFieldException(SR.MissingField); if (CurIdx == 1) return candidates[0]; } // Walk all of the methods looking the most specific method to invoke int currentMin = 0; bool ambig = false; for (i = 1; i < CurIdx; i++) { int newMin = FindMostSpecificField(candidates[currentMin], candidates[i]); if (newMin == 0) ambig = true; else { if (newMin == 2) { currentMin = i; ambig = false; } } } if (ambig) throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); return candidates[currentMin]; } // Given a set of methods that match the base criteria, select a method based // upon an array of types. This method should return null if no method matchs // the criteria. public sealed override MethodBase? SelectMethod(BindingFlags bindingAttr, MethodBase[] match, Type[] types, ParameterModifier[]? modifiers) { int i; int j; Type[] realTypes = new Type[types.Length]; for (i = 0; i < types.Length; i++) { realTypes[i] = types[i].UnderlyingSystemType; if (!(realTypes[i].IsRuntimeImplemented() || realTypes[i] is SignatureType)) throw new ArgumentException(SR.Arg_MustBeType, nameof(types)); } types = realTypes; // We don't automatically jump out on exact match. if (match == null || match.Length == 0) throw new ArgumentException(SR.Arg_EmptyArray, nameof(match)); MethodBase[] candidates = (MethodBase[])match.Clone(); // Find all the methods that can be described by the types parameter. // Remove all of them that cannot. int CurIdx = 0; for (i = 0; i < candidates.Length; i++) { ParameterInfo[] par = candidates[i].GetParametersNoCopy(); if (par.Length != types.Length) continue; for (j = 0; j < types.Length; j++) { Type pCls = par[j].ParameterType; if (types[j].MatchesParameterTypeExactly(par[j])) continue; if (pCls == typeof(object)) continue; Type? type = types[j]; if (type is SignatureType signatureType) { if (!(candidates[i] is MethodInfo methodInfo)) break; type = signatureType.TryResolveAgainstGenericMethod(methodInfo); if (type == null) break; } if (pCls.IsPrimitive) { if (!type.UnderlyingSystemType.IsRuntimeImplemented() || !CanChangePrimitive(type.UnderlyingSystemType, pCls.UnderlyingSystemType)) break; } else { if (!pCls.IsAssignableFrom(type)) break; } } if (j == types.Length) candidates[CurIdx++] = candidates[i]; } if (CurIdx == 0) return null; if (CurIdx == 1) return candidates[0]; // Walk all of the methods looking the most specific method to invoke int currentMin = 0; bool ambig = false; int[] paramOrder = new int[types.Length]; for (i = 0; i < types.Length; i++) paramOrder[i] = i; for (i = 1; i < CurIdx; i++) { int newMin = FindMostSpecificMethod(candidates[currentMin], paramOrder, null, candidates[i], paramOrder, null, types, null); if (newMin == 0) ambig = true; else { if (newMin == 2) { currentMin = i; ambig = false; currentMin = i; } } } if (ambig) throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); return candidates[currentMin]; } // Given a set of properties that match the base criteria, select one. public sealed override PropertyInfo? SelectProperty(BindingFlags bindingAttr, PropertyInfo[] match, Type? returnType, Type[]? indexes, ParameterModifier[]? modifiers) { // Allow a null indexes array. But if it is not null, every element must be non-null as well. if (indexes != null) { foreach (Type index in indexes) { if (index == null) throw new ArgumentNullException(nameof(indexes)); } } if (match == null || match.Length == 0) throw new ArgumentException(SR.Arg_EmptyArray, nameof(match)); PropertyInfo[] candidates = (PropertyInfo[])match.Clone(); int i, j = 0; // Find all the properties that can be described by type indexes parameter int CurIdx = 0; int indexesLength = (indexes != null) ? indexes.Length : 0; for (i = 0; i < candidates.Length; i++) { if (indexes != null) { ParameterInfo[] par = candidates[i].GetIndexParameters(); if (par.Length != indexesLength) continue; for (j = 0; j < indexesLength; j++) { Type pCls = par[j].ParameterType; // If the classes exactly match continue if (pCls == indexes[j]) continue; if (pCls == typeof(object)) continue; if (pCls.IsPrimitive) { if (!indexes[j].UnderlyingSystemType.IsRuntimeImplemented() || !CanChangePrimitive(indexes[j].UnderlyingSystemType, pCls.UnderlyingSystemType)) break; } else { if (!pCls.IsAssignableFrom(indexes[j])) break; } } } if (j == indexesLength) { if (returnType != null) { if (candidates[i].PropertyType.IsPrimitive) { if (!returnType.UnderlyingSystemType.IsRuntimeImplemented() || !CanChangePrimitive(returnType.UnderlyingSystemType, candidates[i].PropertyType.UnderlyingSystemType)) continue; } else { if (!candidates[i].PropertyType.IsAssignableFrom(returnType)) continue; } } candidates[CurIdx++] = candidates[i]; } } if (CurIdx == 0) return null; if (CurIdx == 1) return candidates[0]; // Walk all of the properties looking the most specific method to invoke int currentMin = 0; bool ambig = false; int[] paramOrder = new int[indexesLength]; for (i = 0; i < indexesLength; i++) paramOrder[i] = i; for (i = 1; i < CurIdx; i++) { int newMin = FindMostSpecificType(candidates[currentMin].PropertyType, candidates[i].PropertyType, returnType); if (newMin == 0 && indexes != null) newMin = FindMostSpecific(candidates[currentMin].GetIndexParameters(), paramOrder, null, candidates[i].GetIndexParameters(), paramOrder, null, indexes, null); if (newMin == 0) { newMin = FindMostSpecificProperty(candidates[currentMin], candidates[i]); if (newMin == 0) ambig = true; } if (newMin == 2) { ambig = false; currentMin = i; } } if (ambig) throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); return candidates[currentMin]; } // ChangeType // The default binder doesn't support any change type functionality. // This is because the default is built into the low level invoke code. public override object ChangeType(object value, Type type, CultureInfo? cultureInfo) { throw new NotSupportedException(SR.NotSupported_ChangeType); } public sealed override void ReorderArgumentArray(ref object?[] args, object state) { BinderState binderState = (BinderState)state; ReorderParams(binderState._argsMap, args); if (binderState._isParamArray) { int paramArrayPos = args.Length - 1; if (args.Length == binderState._originalSize) { args[paramArrayPos] = ((object[])args[paramArrayPos]!)[0]; } else { // must be args.Length < state.originalSize object[] newArgs = new object[args.Length]; Array.Copy(args, 0, newArgs, 0, paramArrayPos); for (int i = paramArrayPos, j = 0; i < newArgs.Length; i++, j++) { newArgs[i] = ((object[])args[paramArrayPos]!)[j]; } args = newArgs; } } else { if (args.Length > binderState._originalSize) { object[] newArgs = new object[binderState._originalSize]; Array.Copy(args, 0, newArgs, 0, binderState._originalSize); args = newArgs; } } } // Return any exact bindings that may exist. (This method is not defined on the // Binder and is used by RuntimeType.) public static MethodBase? ExactBinding(MethodBase[] match, Type[] types, ParameterModifier[]? modifiers) { if (match == null) throw new ArgumentNullException(nameof(match)); MethodBase[] aExactMatches = new MethodBase[match.Length]; int cExactMatches = 0; for (int i = 0; i < match.Length; i++) { ParameterInfo[] par = match[i].GetParametersNoCopy(); if (par.Length == 0) { continue; } int j; for (j = 0; j < types.Length; j++) { Type pCls = par[j].ParameterType; // If the classes exactly match continue if (!pCls.Equals(types[j])) break; } if (j < types.Length) continue; // Add the exact match to the array of exact matches. aExactMatches[cExactMatches] = match[i]; cExactMatches++; } if (cExactMatches == 0) return null; if (cExactMatches == 1) return aExactMatches[0]; return FindMostDerivedNewSlotMeth(aExactMatches, cExactMatches); } // Return any exact bindings that may exist. (This method is not defined on the // Binder and is used by RuntimeType.) public static PropertyInfo? ExactPropertyBinding(PropertyInfo[] match, Type? returnType, Type[]? types, ParameterModifier[]? modifiers) { if (match == null) throw new ArgumentNullException(nameof(match)); PropertyInfo? bestMatch = null; int typesLength = (types != null) ? types.Length : 0; for (int i = 0; i < match.Length; i++) { ParameterInfo[] par = match[i].GetIndexParameters(); int j; for (j = 0; j < typesLength; j++) { Type pCls = par[j].ParameterType; // If the classes exactly match continue if (pCls != types![j]) break; } if (j < typesLength) continue; if (returnType != null && returnType != match[i].PropertyType) continue; if (bestMatch != null) throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); bestMatch = match[i]; } return bestMatch; } private static int FindMostSpecific(ParameterInfo[] p1, int[] paramOrder1, Type? paramArrayType1, ParameterInfo[] p2, int[] paramOrder2, Type? paramArrayType2, Type[] types, object?[]? args) { // A method using params is always less specific than one not using params if (paramArrayType1 != null && paramArrayType2 == null) return 2; if (paramArrayType2 != null && paramArrayType1 == null) return 1; // now either p1 and p2 both use params or neither does. bool p1Less = false; bool p2Less = false; for (int i = 0; i < types.Length; i++) { if (args != null && args[i] == Type.Missing) continue; Type c1, c2; // If a param array is present, then either // the user re-ordered the parameters in which case // the argument to the param array is either an array // in which case the params is conceptually ignored and so paramArrayType1 == null // or the argument to the param array is a single element // in which case paramOrder[i] == p1.Length - 1 for that element // or the user did not re-order the parameters in which case // the paramOrder array could contain indexes larger than p.Length - 1 (see VSW 577286) // so any index >= p.Length - 1 is being put in the param array if (paramArrayType1 != null && paramOrder1[i] >= p1.Length - 1) c1 = paramArrayType1; else c1 = p1[paramOrder1[i]].ParameterType; if (paramArrayType2 != null && paramOrder2[i] >= p2.Length - 1) c2 = paramArrayType2; else c2 = p2[paramOrder2[i]].ParameterType; if (c1 == c2) continue; switch (FindMostSpecificType(c1, c2, types[i])) { case 0: return 0; case 1: p1Less = true; break; case 2: p2Less = true; break; } } // Two way p1Less and p2Less can be equal. All the arguments are the // same they both equal false, otherwise there were things that both // were the most specific type on.... if (p1Less == p2Less) { // if we cannot tell which is a better match based on parameter types (p1Less == p2Less), // let's see which one has the most matches without using the params array (the longer one wins). if (!p1Less && args != null) { if (p1.Length > p2.Length) { return 1; } else if (p2.Length > p1.Length) { return 2; } } return 0; } else { return p1Less ? 1 : 2; } } private static int FindMostSpecificType(Type c1, Type c2, Type? t) { // If the two types are exact move on... if (c1 == c2) return 0; if (t is SignatureType signatureType) { if (signatureType.MatchesExactly(c1)) return 1; if (signatureType.MatchesExactly(c2)) return 2; } else { if (c1 == t) return 1; if (c2 == t) return 2; } bool c1FromC2; bool c2FromC1; if (c1.IsByRef || c2.IsByRef) { if (c1.IsByRef && c2.IsByRef) { c1 = c1.GetElementType()!; c2 = c2.GetElementType()!; } else if (c1.IsByRef) { if (c1.GetElementType() == c2) return 2; c1 = c1.GetElementType()!; } else // if (c2.IsByRef) { if (c2.GetElementType() == c1) return 1; c2 = c2.GetElementType()!; } } if (c1.IsPrimitive && c2.IsPrimitive) { c1FromC2 = CanChangePrimitive(c2, c1); c2FromC1 = CanChangePrimitive(c1, c2); } else { c1FromC2 = c1.IsAssignableFrom(c2); c2FromC1 = c2.IsAssignableFrom(c1); } if (c1FromC2 == c2FromC1) return 0; if (c1FromC2) { return 2; } else { return 1; } } private static int FindMostSpecificMethod(MethodBase m1, int[] paramOrder1, Type? paramArrayType1, MethodBase m2, int[] paramOrder2, Type? paramArrayType2, Type[] types, object?[]? args) { // Find the most specific method based on the parameters. int res = FindMostSpecific(m1.GetParametersNoCopy(), paramOrder1, paramArrayType1, m2.GetParametersNoCopy(), paramOrder2, paramArrayType2, types, args); // If the match was not ambigous then return the result. if (res != 0) return res; // Check to see if the methods have the exact same name and signature. if (CompareMethodSig(m1, m2)) { // Determine the depth of the declaring types for both methods. int hierarchyDepth1 = GetHierarchyDepth(m1.DeclaringType!); int hierarchyDepth2 = GetHierarchyDepth(m2.DeclaringType!); // The most derived method is the most specific one. if (hierarchyDepth1 == hierarchyDepth2) { return 0; } else if (hierarchyDepth1 < hierarchyDepth2) { return 2; } else { return 1; } } // The match is ambigous. return 0; } private static int FindMostSpecificField(FieldInfo cur1, FieldInfo cur2) { // Check to see if the fields have the same name. if (cur1.Name == cur2.Name) { int hierarchyDepth1 = GetHierarchyDepth(cur1.DeclaringType!); int hierarchyDepth2 = GetHierarchyDepth(cur2.DeclaringType!); if (hierarchyDepth1 == hierarchyDepth2) { Debug.Assert(cur1.IsStatic != cur2.IsStatic, "hierarchyDepth1 == hierarchyDepth2"); return 0; } else if (hierarchyDepth1 < hierarchyDepth2) return 2; else return 1; } // The match is ambigous. return 0; } private static int FindMostSpecificProperty(PropertyInfo cur1, PropertyInfo cur2) { // Check to see if the fields have the same name. if (cur1.Name == cur2.Name) { int hierarchyDepth1 = GetHierarchyDepth(cur1.DeclaringType!); int hierarchyDepth2 = GetHierarchyDepth(cur2.DeclaringType!); if (hierarchyDepth1 == hierarchyDepth2) { return 0; } else if (hierarchyDepth1 < hierarchyDepth2) return 2; else return 1; } // The match is ambigous. return 0; } public static bool CompareMethodSig(MethodBase m1, MethodBase m2) { ParameterInfo[] params1 = m1.GetParametersNoCopy(); ParameterInfo[] params2 = m2.GetParametersNoCopy(); if (params1.Length != params2.Length) return false; int numParams = params1.Length; for (int i = 0; i < numParams; i++) { if (params1[i].ParameterType != params2[i].ParameterType) return false; } return true; } private static int GetHierarchyDepth(Type t) { int depth = 0; Type? currentType = t; do { depth++; currentType = currentType.BaseType; } while (currentType != null); return depth; } internal static MethodBase? FindMostDerivedNewSlotMeth(MethodBase[] match, int cMatches) { int deepestHierarchy = 0; MethodBase? methWithDeepestHierarchy = null; for (int i = 0; i < cMatches; i++) { // Calculate the depth of the hierarchy of the declaring type of the // current method. int currentHierarchyDepth = GetHierarchyDepth(match[i].DeclaringType!); // The two methods have the same name, signature, and hierarchy depth. // This can only happen if at least one is vararg or generic. if (currentHierarchyDepth == deepestHierarchy) { throw new AmbiguousMatchException(SR.Arg_AmbiguousMatchException); } // Check to see if this method is on the most derived class. if (currentHierarchyDepth > deepestHierarchy) { deepestHierarchy = currentHierarchyDepth; methWithDeepestHierarchy = match[i]; } } return methWithDeepestHierarchy; } // This method will sort the vars array into the mapping order stored // in the paramOrder array. private static void ReorderParams(int[] paramOrder, object?[] vars) { object?[] varsCopy = new object[vars.Length]; for (int i = 0; i < vars.Length; i++) varsCopy[i] = vars[i]; for (int i = 0; i < vars.Length; i++) vars[i] = varsCopy[paramOrder[i]]; } // This method will create the mapping between the Parameters and the underlying // data based upon the names array. The names array is stored in the same order // as the values and maps to the parameters of the method. We store the mapping // from the parameters to the names in the paramOrder array. All parameters that // don't have matching names are then stored in the array in order. private static bool CreateParamOrder(int[] paramOrder, ParameterInfo[] pars, string[] names) { bool[] used = new bool[pars.Length]; // Mark which parameters have not been found in the names list for (int i = 0; i < pars.Length; i++) paramOrder[i] = -1; // Find the parameters with names. for (int i = 0; i < names.Length; i++) { int j; for (j = 0; j < pars.Length; j++) { if (names[i].Equals(pars[j].Name)) { paramOrder[j] = i; used[i] = true; break; } } // This is an error condition. The name was not found. This // method must not match what we sent. if (j == pars.Length) return false; } // Now we fill in the holes with the parameters that are unused. int pos = 0; for (int i = 0; i < pars.Length; i++) { if (paramOrder[i] == -1) { for (; pos < pars.Length; pos++) { if (!used[pos]) { paramOrder[i] = pos; pos++; break; } } } } return true; } // CanChangePrimitive // This will determine if the source can be converted to the target type internal static bool CanChangePrimitive(Type? source, Type? target) { if ((source == typeof(IntPtr) && target == typeof(IntPtr)) || (source == typeof(UIntPtr) && target == typeof(UIntPtr))) return true; Primitives widerCodes = s_primitiveConversions[(int)(Type.GetTypeCode(source))]; Primitives targetCode = (Primitives)(1 << (int)(Type.GetTypeCode(target))); return (widerCodes & targetCode) != 0; } private static readonly Primitives[] s_primitiveConversions = { /* Empty */ 0, // not primitive /* Object */ 0, // not primitive /* DBNull */ 0, // not primitive /* Boolean */ Primitives.Boolean, /* Char */ Primitives.Char | Primitives.UInt16 | Primitives.UInt32 | Primitives.Int32 | Primitives.UInt64 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* SByte */ Primitives.SByte | Primitives.Int16 | Primitives.Int32 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* Byte */ Primitives.Byte | Primitives.Char | Primitives.UInt16 | Primitives.Int16 | Primitives.UInt32 | Primitives.Int32 | Primitives.UInt64 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* Int16 */ Primitives.Int16 | Primitives.Int32 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* UInt16 */ Primitives.UInt16 | Primitives.UInt32 | Primitives.Int32 | Primitives.UInt64 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* Int32 */ Primitives.Int32 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* UInt32 */ Primitives.UInt32 | Primitives.UInt64 | Primitives.Int64 | Primitives.Single | Primitives.Double, /* Int64 */ Primitives.Int64 | Primitives.Single | Primitives.Double, /* UInt64 */ Primitives.UInt64 | Primitives.Single | Primitives.Double, /* Single */ Primitives.Single | Primitives.Double, /* Double */ Primitives.Double, /* Decimal */ Primitives.Decimal, /* DateTime */ Primitives.DateTime, /* [Unused] */ 0, /* String */ Primitives.String, }; [Flags] private enum Primitives { Boolean = 1 << TypeCode.Boolean, Char = 1 << TypeCode.Char, SByte = 1 << TypeCode.SByte, Byte = 1 << TypeCode.Byte, Int16 = 1 << TypeCode.Int16, UInt16 = 1 << TypeCode.UInt16, Int32 = 1 << TypeCode.Int32, UInt32 = 1 << TypeCode.UInt32, Int64 = 1 << TypeCode.Int64, UInt64 = 1 << TypeCode.UInt64, Single = 1 << TypeCode.Single, Double = 1 << TypeCode.Double, Decimal = 1 << TypeCode.Decimal, DateTime = 1 << TypeCode.DateTime, String = 1 << TypeCode.String, } internal class BinderState { internal readonly int[] _argsMap; internal readonly int _originalSize; internal readonly bool _isParamArray; internal BinderState(int[] argsMap, int originalSize, bool isParamArray) { _argsMap = argsMap; _originalSize = originalSize; _isParamArray = isParamArray; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using Microsoft.PowerShell.Commands; namespace System.Management.Automation { /// <summary> /// This class holds the integer constants used in Session State. /// </summary> internal static class SessionStateConstants { /// <summary> /// The default maximum for the number of variables. /// </summary> internal const int DefaultVariableCapacity = 4096; /// <summary> /// Max # of variables allowed in a scope in Session State. /// </summary> internal const int MaxVariablesCapacity = 32768; /// <summary> /// Min # of variables allows in a scope in Session State. /// </summary> internal const int MinVariablesCapacity = 1024; /// <summary> /// The default maximum for the number of aliases. /// </summary> internal const int DefaultAliasCapacity = 4096; /// <summary> /// Max # of aliases allowed in a scope in Session State. /// </summary> internal const int MaxAliasCapacity = 32768; /// <summary> /// Min # of aliases allowed in a scope in Session State. /// </summary> internal const int MinAliasCapacity = 1024; /// <summary> /// The default maximum for the number of functions. /// </summary> internal const int DefaultFunctionCapacity = 4096; /// <summary> /// Max # of functions allowed in a scope in Session State. /// </summary> internal const int MaxFunctionCapacity = 32768; /// <summary> /// Min # of functions allowed in a scope in Session State. /// </summary> internal const int MinFunctionCapacity = 1024; /// <summary> /// The default maximum for the number of drives. /// </summary> internal const int DefaultDriveCapacity = 4096; /// <summary> /// Max # of drives allowed in a scope in Session State. /// </summary> internal const int MaxDriveCapacity = 32768; /// <summary> /// Min # of drives allowed in a scope in Session State. /// </summary> internal const int MinDriveCapacity = 1024; /// <summary> /// The default maximum for the number of errors. /// </summary> internal const int DefaultErrorCapacity = 256; /// <summary> /// Max # of errors allowed in a scope in Session State. /// </summary> internal const int MaxErrorCapacity = 32768; /// <summary> /// Min # of errors allowed in a scope in Session State. /// </summary> internal const int MinErrorCapacity = 256; /// <summary> /// The default capacity for a Dictionary store. /// </summary> internal const int DefaultDictionaryCapacity = 100; /// <summary> /// Default load factor on a hash table. /// </summary> internal const float DefaultHashTableLoadFactor = 0.25F; } /// <summary> /// This class has static methods that are used in Session State. /// </summary> internal static class SessionStateUtilities { /// <summary> /// Converts the specified array into a collection of the specified type. /// </summary> /// <param name="array"> /// The array to be converted. /// </param> /// <returns> /// A collection of the elements that were in the array. /// </returns> internal static Collection<T> ConvertArrayToCollection<T>(T[] array) { Collection<T> result = new Collection<T>(); if (array != null) { foreach (T element in array) { result.Add(element); } } return result; } /// <summary> /// Compares the elements in the specified collection with value specified. If /// the string comparer is specified it is used for the comparison, else the /// .Equals method is used. /// </summary> /// <param name="collection"> /// The collection to check for the value. /// </param> /// <param name="value"> /// The value to check for. /// </param> /// <param name="comparer"> /// If specified the comparer will be used instead of .Equals. /// </param> /// <returns> /// true if the value is contained in the collection or false otherwise. /// </returns> /// <exception cref="ArgumentNullException"> /// If <paramref name="collection"/> is null. /// </exception> internal static bool CollectionContainsValue(IEnumerable collection, object value, IComparer comparer) { if (collection == null) { throw new ArgumentNullException("collection"); } bool result = false; foreach (object item in collection) { if (comparer != null) { if (comparer.Compare(item, value) == 0) { result = true; break; } } else { if (item.Equals(value)) { result = true; break; } } } return result; } /// <summary> /// Constructs a collection of WildcardPatterns for the specified /// string collection. /// </summary> /// <param name="globPatterns"> /// The string patterns to construct the WildcardPatterns for. /// </param> /// <param name="options"> /// The options to create the WildcardPatterns with. /// </param> /// <returns> /// A collection of WildcardPatterns that represent the string patterns /// that were passed. /// </returns> internal static Collection<WildcardPattern> CreateWildcardsFromStrings( IEnumerable<string> globPatterns, WildcardOptions options) { Collection<WildcardPattern> result = new Collection<WildcardPattern>(); if (globPatterns != null) { // Loop through the patterns and construct a wildcard pattern for each one foreach (string pattern in globPatterns) { if (!string.IsNullOrEmpty(pattern)) { result.Add( WildcardPattern.Get( pattern, options)); } } } return result; } /// <summary> /// Determines if the specified text matches any of the patterns. /// </summary> /// <param name="text"> /// The text to check against the wildcard pattern. /// </param> /// <param name="patterns"> /// An array of wildcard patterns. If the array is empty or null the text is deemed /// to be a match. /// </param> /// <param name="defaultValue"> /// The default value that should be returned if <paramref name="patterns"/> /// is empty or null. /// </param> /// <returns> /// True if the text matches any of the patterns OR if patterns is null or empty and defaultValue is True. /// </returns> internal static bool MatchesAnyWildcardPattern( string text, IEnumerable<WildcardPattern> patterns, bool defaultValue) { bool result = false; bool patternsNonEmpty = false; if (patterns != null) { // Loop through each of the patterns until a match is found foreach (WildcardPattern pattern in patterns) { patternsNonEmpty = true; if (pattern.IsMatch(text)) { result = true; break; } } } if (!patternsNonEmpty) { // Since no pattern was specified return the default value result = defaultValue; } return result; } /// <summary> /// Converts an OpenMode enum value to a FileMode. /// </summary> /// <param name="openMode"> /// The OpenMode value to be converted. /// </param> /// <returns> /// The FileMode representation of the OpenMode. /// </returns> internal static FileMode GetFileModeFromOpenMode(OpenMode openMode) { FileMode result = FileMode.Create; switch (openMode) { case OpenMode.Add: result = FileMode.Append; break; case OpenMode.New: result = FileMode.CreateNew; break; case OpenMode.Overwrite: result = FileMode.Create; break; } return result; } } } namespace Microsoft.PowerShell.Commands { /// <summary> /// The enum used by commands to allow the user to specify how /// a file (or other item) should be opened. /// </summary> public enum OpenMode { /// <summary> /// This opens the file for appending (similar to FileMode.Append) /// </summary> Add, /// <summary> /// The file must be created new. If the file exists it is an error (similar to FileMode.CreateNew) /// </summary> New, /// <summary> /// Creates a new file, if the file already exists it is overwritten (similar to FileMode.Create) /// </summary> Overwrite } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Data; using System.Linq; using System.Runtime.InteropServices; using System.Threading; using System.Windows; using System.Windows.Controls; using System.Windows.Input; using System.Windows.Interop; using ado_net_gemfirexd_client.Models; using ado_net_gemfirexd_client.ViewModels; namespace ado_net_gemfirexd_client.Views { /// <summary> /// Interaction logic for BulkInsertUpdateDeleteDataView.xaml /// </summary> public partial class BulkInsertUpdateDeleteDataView : Window { private const int GWL_STYLE = -16; private const int WS_SYSMENU = 0x80000; [DllImport("user32.dll", SetLastError = true)] private static extern int GetWindowLong(IntPtr hWnd, int nIndex); [DllImport("user32.dll")] private static extern int SetWindowLong(IntPtr hWnd, int nIndex, int dwNewLong); private ObservableCollection<string> _tableList = new ObservableCollection<string>(); BulkInsertUpdateDeleteViewModel _stvm = new BulkInsertUpdateDeleteViewModel(); private string _selectedTable; public BulkInsertUpdateDeleteDataView() { InitializeComponent(); ExtendedInitialization(); tables.DataContext = _tableList; tables.ItemsSource = _tableList; } private void ExtendedInitialization() { _tableList.Add("Sales.SalesReason"); _tableList.Add("dbo.TestData1"); _tableList.Add("dbo.TestData2"); _tableList.Add("dbo.TestData3"); _tableList.Add("dbo.TestData4"); _tableList.Add("dbo.TestData5"); } private void Window_Loaded(object sender, RoutedEventArgs e) { var hwnd = new WindowInteropHelper(this).Handle; SetWindowLong(hwnd, GWL_STYLE, GetWindowLong(hwnd, GWL_STYLE) & ~WS_SYSMENU); } private void Table_SelectionChanged(object sender, SelectionChangedEventArgs e) { if (tables.HasItems) { _selectedTable = (tables.SelectedItem == null) ? tables.Items.CurrentItem.ToString() : tables.SelectedItem.ToString(); if (!string.IsNullOrEmpty(_selectedTable)) { _stvm.LoadTableRowListData(_selectedTable); DataRow.DataContext = _stvm.GridData.AsDataView(); } } } private void tables_MouseDoubleClick(object sender, MouseButtonEventArgs e) { if (tables.HasItems) { _selectedTable = (tables.SelectedItem == null) ? tables.Items.CurrentItem.ToString() : tables.SelectedItem.ToString(); if (!string.IsNullOrEmpty(_selectedTable)) { _stvm.LoadTableRowListData(_selectedTable); DataRow.DataContext = _stvm.GridData.AsDataView(); } } } private void InsertBulkRows_Click(object sender, RoutedEventArgs e) { if (tables.HasItems) { //_selectedTable = (tables.SelectedItem == null) // ? tables.Items.CurrentItem.ToString() // : tables.SelectedItem.ToString(); IList list = tables.SelectedItems; var nList = list.OfType<string>().ToList(); var selectedQ = new List<string>(nList.Count); selectedQ.AddRange(nList.Select(s => s)); if (selectedQ.Count > 0) { ExcuteBulkTasks("I", selectedQ); } else { MessageBox.Show("Please select a item from the table list first"); } } } private void UpdateBulkRows_Click(object sender, RoutedEventArgs e) { if (tables.HasItems) { //_selectedTable = (tables.SelectedItem == null) // ? tables.Items.CurrentItem.ToString() // : tables.SelectedItem.ToString(); IList list = tables.SelectedItems; var nList = list.OfType<string>().ToList(); var selectedQ = new List<string>(nList.Count); selectedQ.AddRange(nList.Select(s => s)); if (selectedQ.Count > 0) { ExcuteBulkTasks("U", selectedQ); } else { MessageBox.Show("Please select a item from the table list first"); } } } private void DeleteBulkRows_Click(object sender, RoutedEventArgs e) { if (tables.HasItems) { IList list = tables.SelectedItems; var nList = list.OfType<string>().ToList(); var selectedQ = new List<string>(nList.Count); selectedQ.AddRange(nList.Select(s => s)); if (selectedQ.Count > 0) { ExcuteBulkTasks("D", selectedQ); } else { MessageBox.Show("Please select a item from the table list first"); } } } private void IUDTransaction_Click(object sender, RoutedEventArgs e) { if (tables.HasItems) { IList list = tables.SelectedItems; var nList = list.OfType<string>().ToList(); var selectedQ = new List<string>(nList.Count); selectedQ.AddRange(nList.Select(s => s)); if (selectedQ.Count > 0) { ExcuteTransaction(selectedQ); } else { MessageBox.Show("Please select a item from the table list first"); } } } private void ExcuteTransaction(List<string> selectedQ) { try { var da = new DataAccess(); da.ExcuteTransaction(selectedQ); } catch (Exception) { {} throw; } } private void CloseClick(object sender, RoutedEventArgs e) { Close(); } private void ExcuteBulkTasks(string operation, List<string> selectedQ) { int tc = selectedQ.Count; var domains = new AppDomain[tc]; var threads = new Thread[tc]; for (int i = 0; i < tc; i++) { domains[i] = AppDomain.CreateDomain("myDomain_" + i.ToString()); domains[i].SetData("MyMessage", selectedQ[i]); domains[i].SetData("Operation", operation); threads[i] = new Thread(ThreadProc); } for (int i = 0; i < tc; i++) threads[i].Start(domains[i]); for (int i = 0; i < tc; i++) threads[i].Join(); for (int i = 0; i < tc; i++) AppDomain.Unload(domains[i]); } // This is thread specific method [LoaderOptimization(LoaderOptimization.MultiDomainHost)] private static void ThreadProc(object state) { var domain = (AppDomain)state; domain.DoCallBack(Login); } private static void Login() { try { //Thread.Sleep(3000); var message = (string)AppDomain.CurrentDomain.GetData("MyMessage"); var op = (string)AppDomain.CurrentDomain.GetData("Operation"); var da = new DataAccess(); switch (op) { case "I": da.InsertBulkData(message); break; case "D": da.DeleteBulkData(message); break; case "U": da.UpdateBulkData(message); break; case "T": break; } string[] args = { AppDomain.CurrentDomain.FriendlyName, AppDomain.CurrentDomain.Id.ToString(), Thread.CurrentThread.ManagedThreadId.ToString() }; string content = "This is my AppDomainFriendlyName : " + args[0] + ", my ID is : " + args[1] + " and my ThreadID is : " + args[2] + " and TimeStamp : " + DateTime.Now.ToString(); // Create a new file. //const string fileName = "executeQueryResults.txt"; //using (var w = new StreamWriter(fileName, true)) //{ // w.WriteLine(content); //} } catch (Exception ex) { string err = ex.Message; // Handle exception here; } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests { public partial class IOperationTests : SemanticModelTestBase { [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_DynamicArgument() { string source = @" class C { void M(C c, dynamic d) { var x = /*<bind>*/c[d]/*</bind>*/; } public int this[int i] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[d]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_MultipleApplicableSymbols() { string source = @" class C { void M(C c, dynamic d) { var x = /*<bind>*/c[d]/*</bind>*/; } public int this[int i] => 0; public int this[long i] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[d]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_MultipleArgumentsAndApplicableSymbols() { string source = @" class C { void M(C c, dynamic d) { char ch = 'c'; var x = /*<bind>*/c[d, ch]/*</bind>*/; } public int this[int i, char ch] => 0; public int this[long i, char ch] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[d, ch]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(2): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ILocalReferenceOperation: ch (OperationKind.LocalReference, Type: System.Char) (Syntax: 'ch') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_ArgumentNames() { string source = @" class C { void M(C c, dynamic d, dynamic e) { var x = /*<bind>*/c[i: d, ch: e]/*</bind>*/; } public int this[int i, char ch] => 0; public int this[long i, char ch] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[i: d, ch: e]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(2): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') IParameterReferenceOperation: e (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'e') ArgumentNames(2): ""i"" ""ch"" ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_ArgumentRefKinds() { string source = @" class C { void M(C c, dynamic d, dynamic e) { var x = /*<bind>*/c[i: d, ch: ref e]/*</bind>*/; } public int this[int i, ref dynamic ch] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[i: d, ch: ref e]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(2): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') IParameterReferenceOperation: e (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'e') ArgumentNames(2): ""i"" ""ch"" ArgumentRefKinds(2): None Ref "; var expectedDiagnostics = new DiagnosticDescription[] { // CS0631: ref and out are not valid in this context // public int this[int i, ref dynamic ch] => 0; Diagnostic(ErrorCode.ERR_IllegalRefParam, "ref").WithLocation(9, 28) }; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_WithDynamicReceiver() { string source = @" class C { void M(dynamic d, int i) { var x = /*<bind>*/d[i]/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'd[i]') Expression: IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') Arguments(1): IParameterReferenceOperation: i (OperationKind.ParameterReference, Type: System.Int32) (Syntax: 'i') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_WithDynamicMemberReceiver() { string source = @" class C { void M(dynamic c, int i) { var x = /*<bind>*/c.M2[i]/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c.M2[i]') Expression: IDynamicMemberReferenceOperation (Member Name: ""M2"", Containing Type: null) (OperationKind.DynamicMemberReference, Type: dynamic) (Syntax: 'c.M2') Type Arguments(0) Instance Receiver: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'c') Arguments(1): IParameterReferenceOperation: i (OperationKind.ParameterReference, Type: System.Int32) (Syntax: 'i') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_WithDynamicTypedMemberReceiver() { string source = @" class C { dynamic M2 = null; void M(C c, int i) { var x = /*<bind>*/c.M2[i]/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c.M2[i]') Expression: IFieldReferenceOperation: dynamic C.M2 (OperationKind.FieldReference, Type: dynamic) (Syntax: 'c.M2') Instance Receiver: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(1): IParameterReferenceOperation: i (OperationKind.ParameterReference, Type: System.Int32) (Syntax: 'i') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_AllFields() { string source = @" class C { void M(C c, dynamic d) { int i = 0; var x = /*<bind>*/c[ref i, c: d]/*</bind>*/; } public int this[ref int i, char c] => 0; public int this[ref int i, long c] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic) (Syntax: 'c[ref i, c: d]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(2): ILocalReferenceOperation: i (OperationKind.LocalReference, Type: System.Int32) (Syntax: 'i') IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(2): ""null"" ""c"" ArgumentRefKinds(2): Ref None "; var expectedDiagnostics = new DiagnosticDescription[] { // CS0631: ref and out are not valid in this context // public int this[ref int i, char c] => 0; Diagnostic(ErrorCode.ERR_IllegalRefParam, "ref").WithLocation(10, 21), // CS0631: ref and out are not valid in this context // public int this[ref int i, long c] => 0; Diagnostic(ErrorCode.ERR_IllegalRefParam, "ref").WithLocation(11, 21) }; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_ErrorBadDynamicMethodArgLambda() { string source = @" using System; class C { public void M(C c) { dynamic y = null; var x = /*<bind>*/c[delegate { }, y]/*</bind>*/; } public int this[Action a, Action y] => 0; } "; string expectedOperationTree = @" IDynamicIndexerAccessOperation (OperationKind.DynamicIndexerAccess, Type: dynamic, IsInvalid) (Syntax: 'c[delegate { }, y]') Expression: IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C) (Syntax: 'c') Arguments(2): IAnonymousFunctionOperation (Symbol: lambda expression) (OperationKind.AnonymousFunction, Type: null, IsInvalid) (Syntax: 'delegate { }') IBlockOperation (1 statements) (OperationKind.Block, Type: null, IsInvalid) (Syntax: '{ }') IReturnOperation (OperationKind.Return, Type: null, IsInvalid, IsImplicit) (Syntax: '{ }') ReturnedValue: null ILocalReferenceOperation: y (OperationKind.LocalReference, Type: dynamic) (Syntax: 'y') ArgumentNames(0) ArgumentRefKinds(0) "; var expectedDiagnostics = new DiagnosticDescription[] { // CS1977: Cannot use a lambda expression as an argument to a dynamically dispatched operation without first casting it to a delegate or expression tree type. // var x = /*<bind>*/c[delegate { }, y]/*</bind>*/; Diagnostic(ErrorCode.ERR_BadDynamicMethodArgLambda, "delegate { }").WithLocation(9, 29) }; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicIndexerAccessExpression_OverloadResolutionFailure() { string source = @" using System; class C { void M(C c, dynamic d) { var x = /*<bind>*/c[d]/*</bind>*/; } public int this[int i, int j] => 0; public int this[int i, int j, int k] => 0; } "; string expectedOperationTree = @" IInvalidOperation (OperationKind.Invalid, Type: System.Int32, IsInvalid) (Syntax: 'c[d]') Children(2): IParameterReferenceOperation: c (OperationKind.ParameterReference, Type: C, IsInvalid) (Syntax: 'c') IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic, IsInvalid) (Syntax: 'd') "; var expectedDiagnostics = new DiagnosticDescription[] { // CS1501: No overload for method 'this' takes 1 arguments // var x = /*<bind>*/c[d]/*</bind>*/; Diagnostic(ErrorCode.ERR_BadArgCount, "c[d]").WithArguments("this", "1").WithLocation(8, 27) }; VerifyOperationTreeAndDiagnosticsForTest<ElementAccessExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Runtime.Intrinsics; namespace System.Runtime.Intrinsics.X86 { /// <summary> /// This class provides access to Intel SSE4.1 hardware instructions via intrinsics /// </summary> [CLSCompliant(false)] public static class Sse41 { public static bool IsSupported { get { return false; } } /// <summary> /// __m128i _mm_blend_epi16 (__m128i a, __m128i b, const int imm8) /// PBLENDW xmm, xmm/m128 imm8 /// </summary> public static Vector128<short> Blend(Vector128<short> left, Vector128<short> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_blend_epi16 (__m128i a, __m128i b, const int imm8) /// PBLENDW xmm, xmm/m128 imm8 /// </summary> public static Vector128<ushort> Blend(Vector128<ushort> left, Vector128<ushort> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_blend_ps (__m128 a, __m128 b, const int imm8) /// BLENDPS xmm, xmm/m128, imm8 /// </summary> public static Vector128<float> Blend(Vector128<float> left, Vector128<float> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_blend_pd (__m128d a, __m128d b, const int imm8) /// BLENDPD xmm, xmm/m128, imm8 /// </summary> public static Vector128<double> Blend(Vector128<double> left, Vector128<double> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_blendv_epi8 (__m128i a, __m128i b, __m128i mask) /// PBLENDVB xmm, xmm/m128, xmm /// </summary> public static Vector128<sbyte> BlendVariable(Vector128<sbyte> left, Vector128<sbyte> right, Vector128<sbyte> mask) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_blendv_epi8 (__m128i a, __m128i b, __m128i mask) /// PBLENDVB xmm, xmm/m128, xmm /// </summary> public static Vector128<byte> BlendVariable(Vector128<byte> left, Vector128<byte> right, Vector128<byte> mask) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_blendv_ps (__m128 a, __m128 b, __m128 mask) /// BLENDVPS xmm, xmm/m128, xmm0 /// </summary> public static Vector128<float> BlendVariable(Vector128<float> left, Vector128<float> right, Vector128<float> mask) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_blendv_pd (__m128d a, __m128d b, __m128d mask) /// BLENDVPD xmm, xmm/m128, xmm0 /// </summary> public static Vector128<double> BlendVariable(Vector128<double> left, Vector128<double> right, Vector128<double> mask) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_ceil_ps (__m128 a) /// ROUNDPS xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<float> Ceiling(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_ceil_pd (__m128d a) /// ROUNDPD xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<double> Ceiling(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_ceil_sd (__m128d a) /// ROUNDSD xmm, xmm/m128, imm8(10) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> CeilingScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_ceil_ss (__m128 a) /// ROUNDSD xmm, xmm/m128, imm8(10) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> CeilingScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_ceil_sd (__m128d a, __m128d b) /// ROUNDSD xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<double> CeilingScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_ceil_ss (__m128 a, __m128 b) /// ROUNDSS xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<float> CeilingScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cmpeq_epi64 (__m128i a, __m128i b) /// PCMPEQQ xmm, xmm/m128 /// </summary> public static Vector128<long> CompareEqual(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cmpeq_epi64 (__m128i a, __m128i b) /// PCMPEQQ xmm, xmm/m128 /// </summary> public static Vector128<ulong> CompareEqual(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi8_epi16 (__m128i a) /// PMOVSXBW xmm, xmm/m64 /// </summary> public static Vector128<short> ConvertToVector128Int16(Vector128<sbyte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu8_epi16 (__m128i a) /// PMOVZXBW xmm, xmm/m64 /// </summary> public static Vector128<short> ConvertToVector128Int16(Vector128<byte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi8_epi32 (__m128i a) /// PMOVSXBD xmm, xmm/m32 /// </summary> public static Vector128<int> ConvertToVector128Int32(Vector128<sbyte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu8_epi32 (__m128i a) /// PMOVZXBD xmm, xmm/m32 /// </summary> public static Vector128<int> ConvertToVector128Int32(Vector128<byte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi16_epi32 (__m128i a) /// PMOVSXWD xmm, xmm/m64 /// </summary> public static Vector128<int> ConvertToVector128Int32(Vector128<short> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu16_epi32 (__m128i a) /// PMOVZXWD xmm, xmm/m64 /// </summary> public static Vector128<int> ConvertToVector128Int32(Vector128<ushort> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi8_epi64 (__m128i a) /// PMOVSXBQ xmm, xmm/m16 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<sbyte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu8_epi64 (__m128i a) /// PMOVZXBQ xmm, xmm/m16 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<byte> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi16_epi64 (__m128i a) /// PMOVSXWQ xmm, xmm/m32 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<short> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu16_epi64 (__m128i a) /// PMOVZXWQ xmm, xmm/m32 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<ushort> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepi32_epi64 (__m128i a) /// PMOVSXDQ xmm, xmm/m64 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<int> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_cvtepu32_epi64 (__m128i a) /// PMOVZXDQ xmm, xmm/m64 /// </summary> public static Vector128<long> ConvertToVector128Int64(Vector128<uint> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_dp_ps (__m128 a, __m128 b, const int imm8) /// DPPS xmm, xmm/m128, imm8 /// </summary> public static Vector128<float> DotProduct(Vector128<float> left, Vector128<float> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_dp_pd (__m128d a, __m128d b, const int imm8) /// DPPD xmm, xmm/m128, imm8 /// </summary> public static Vector128<double> DotProduct(Vector128<double> left, Vector128<double> right, byte control) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_extract_epi8 (__m128i a, const int imm8) /// PEXTRB reg/m8, xmm, imm8 /// </summary> public static sbyte Extract(Vector128<sbyte> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_extract_epi8 (__m128i a, const int imm8) /// PEXTRB reg/m8, xmm, imm8 /// </summary> public static byte Extract(Vector128<byte> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_extract_epi32 (__m128i a, const int imm8) /// PEXTRD reg/m32, xmm, imm8 /// </summary> public static int Extract(Vector128<int> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_extract_epi32 (__m128i a, const int imm8) /// PEXTRD reg/m32, xmm, imm8 /// </summary> public static uint Extract(Vector128<uint> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __int64 _mm_extract_epi64 (__m128i a, const int imm8) /// PEXTRQ reg/m64, xmm, imm8 /// </summary> public static long Extract(Vector128<long> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __int64 _mm_extract_epi64 (__m128i a, const int imm8) /// PEXTRQ reg/m64, xmm, imm8 /// </summary> public static ulong Extract(Vector128<ulong> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_extract_ps (__m128 a, const int imm8) /// EXTRACTPS xmm, xmm/m32, imm8 /// </summary> public static float Extract(Vector128<float> value, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_floor_ps (__m128 a) /// ROUNDPS xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<float> Floor(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_floor_pd (__m128d a) /// ROUNDPD xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<double> Floor(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_floor_sd (__m128d a) /// ROUNDSD xmm, xmm/m128, imm8(9) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> FloorScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_floor_ss (__m128 a) /// ROUNDSS xmm, xmm/m128, imm8(9) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> FloorScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_floor_sd (__m128d a, __m128d b) /// ROUNDSD xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<double> FloorScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_floor_ss (__m128 a, __m128 b) /// ROUNDSS xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<float> FloorScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi8 (__m128i a, int i, const int imm8) /// PINSRB xmm, reg/m8, imm8 /// </summary> public static Vector128<sbyte> Insert(Vector128<sbyte> value, sbyte data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi8 (__m128i a, int i, const int imm8) /// PINSRB xmm, reg/m8, imm8 /// </summary> public static Vector128<byte> Insert(Vector128<byte> value, byte data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi32 (__m128i a, int i, const int imm8) /// PINSRD xmm, reg/m32, imm8 /// </summary> public static Vector128<int> Insert(Vector128<int> value, int data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi32 (__m128i a, int i, const int imm8) /// PINSRD xmm, reg/m32, imm8 /// </summary> public static Vector128<uint> Insert(Vector128<uint> value, uint data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8) /// PINSRQ xmm, reg/m64, imm8 /// </summary> public static Vector128<long> Insert(Vector128<long> value, long data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8) /// PINSRQ xmm, reg/m64, imm8 /// </summary> public static Vector128<ulong> Insert(Vector128<ulong> value, ulong data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_insert_ps (__m128 a, __m128 b, const int imm8) /// INSERTPS xmm, xmm/m32, imm8 /// </summary> public static Vector128<float> Insert(Vector128<float> value, float data, byte index) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_max_epi8 (__m128i a, __m128i b) /// PMAXSB xmm, xmm/m128 /// </summary> public static Vector128<sbyte> Max(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_max_epu16 (__m128i a, __m128i b) /// PMAXUW xmm, xmm/m128 /// </summary> public static Vector128<ushort> Max(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_max_epi32 (__m128i a, __m128i b) /// PMAXSD xmm, xmm/m128 /// </summary> public static Vector128<int> Max(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_max_epu32 (__m128i a, __m128i b) /// PMAXUD xmm, xmm/m128 /// </summary> public static Vector128<uint> Max(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_min_epi8 (__m128i a, __m128i b) /// PMINSB xmm, xmm/m128 /// </summary> public static Vector128<sbyte> Min(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_min_epu16 (__m128i a, __m128i b) /// PMINUW xmm, xmm/m128 /// </summary> public static Vector128<ushort> Min(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_min_epi32 (__m128i a, __m128i b) /// PMINSD xmm, xmm/m128 /// </summary> public static Vector128<int> Min(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_min_epu32 (__m128i a, __m128i b) /// PMINUD xmm, xmm/m128 /// </summary> public static Vector128<uint> Min(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_minpos_epu16 (__m128i a) /// PHMINPOSUW xmm, xmm/m128 /// </summary> public static Vector128<ushort> MinHorizontal(Vector128<ushort> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_mpsadbw_epu8 (__m128i a, __m128i b, const int imm8) /// MPSADBW xmm, xmm/m128, imm8 /// </summary> public static Vector128<ushort> MultipleSumAbsoluteDifferences(Vector128<byte> left, Vector128<byte> right, byte mask) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_mul_epi32 (__m128i a, __m128i b) /// PMULDQ xmm, xmm/m128 /// </summary> public static Vector128<long> Multiply(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_mullo_epi32 (__m128i a, __m128i b) /// PMULLD xmm, xmm/m128 /// </summary> public static Vector128<int> MultiplyLow(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_packus_epi32 (__m128i a, __m128i b) /// PACKUSDW xmm, xmm/m128 /// </summary> public static Vector128<ushort> PackUnsignedSaturate(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ps (__m128 a, int rounding) /// ROUNDPS xmm, xmm/m128, imm8(8) /// _MM_FROUND_TO_NEAREST_INT |_MM_FROUND_NO_EXC /// </summary> public static Vector128<float> RoundToNearestInteger(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_NEG_INF |_MM_FROUND_NO_EXC; ROUNDPS xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<float> RoundToNegativeInfinity(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_POS_INF |_MM_FROUND_NO_EXC; ROUNDPS xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<float> RoundToPositiveInfinity(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_ZERO |_MM_FROUND_NO_EXC; ROUNDPS xmm, xmm/m128, imm8(11) /// </summary> public static Vector128<float> RoundToZero(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_CUR_DIRECTION; ROUNDPS xmm, xmm/m128, imm8(4) /// </summary> public static Vector128<float> RoundCurrentDirection(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_pd (__m128d a, int rounding) /// ROUNDPD xmm, xmm/m128, imm8(8) /// _MM_FROUND_TO_NEAREST_INT |_MM_FROUND_NO_EXC /// </summary> public static Vector128<double> RoundToNearestInteger(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_NEG_INF |_MM_FROUND_NO_EXC; ROUNDPD xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<double> RoundToNegativeInfinity(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_POS_INF |_MM_FROUND_NO_EXC; ROUNDPD xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<double> RoundToPositiveInfinity(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_TO_ZERO |_MM_FROUND_NO_EXC; ROUNDPD xmm, xmm/m128, imm8(11) /// </summary> public static Vector128<double> RoundToZero(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// _MM_FROUND_CUR_DIRECTION; ROUNDPD xmm, xmm/m128, imm8(4) /// </summary> public static Vector128<double> RoundCurrentDirection(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, _MM_FROUND_CUR_DIRECTION) /// ROUNDSD xmm, xmm/m128, imm8(4) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> RoundCurrentDirectionScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, _MM_FROUND_TO_NEAREST_INT |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(8) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> RoundToNearestIntegerScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, _MM_FROUND_TO_NEG_INF |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(9) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> RoundToNegativeInfinityScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, _MM_FROUND_TO_POS_INF |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(10) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> RoundToPositiveInfinityScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, _MM_FROUND_TO_ZERO |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(11) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<double> RoundToZeroScalar(Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, __m128d b, _MM_FROUND_CUR_DIRECTION) /// ROUNDSD xmm, xmm/m128, imm8(4) /// </summary> public static Vector128<double> RoundCurrentDirectionScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, __m128d b, _MM_FROUND_TO_NEAREST_INT |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(8) /// </summary> public static Vector128<double> RoundToNearestIntegerScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, __m128d b, _MM_FROUND_TO_NEG_INF |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<double> RoundToNegativeInfinityScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, __m128d b, _MM_FROUND_TO_POS_INF |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<double> RoundToPositiveInfinityScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128d _mm_round_sd (__m128d a, __m128d b, _MM_FROUND_TO_ZERO |_MM_FROUND_NO_EXC) /// ROUNDSD xmm, xmm/m128, imm8(11) /// </summary> public static Vector128<double> RoundToZeroScalar(Vector128<double> upper, Vector128<double> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, _MM_FROUND_CUR_DIRECTION) /// ROUNDSS xmm, xmm/m128, imm8(4) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> RoundCurrentDirectionScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(8) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> RoundToNearestIntegerScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(9) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> RoundToNegativeInfinityScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(10) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> RoundToPositiveInfinityScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(11) /// The above native signature does not exist. We provide this additional overload for the recommended use case of this intrinsic. /// </summary> public static Vector128<float> RoundToZeroScalar(Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, __m128 b, _MM_FROUND_CUR_DIRECTION) /// ROUNDSS xmm, xmm/m128, imm8(4) /// </summary> public static Vector128<float> RoundCurrentDirectionScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, __m128 b, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(8) /// </summary> public static Vector128<float> RoundToNearestIntegerScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, __m128 b, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(9) /// </summary> public static Vector128<float> RoundToNegativeInfinityScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, __m128 b, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(10) /// </summary> public static Vector128<float> RoundToPositiveInfinityScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128 _mm_round_ss (__m128 a, __m128 b, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC) /// ROUNDSS xmm, xmm/m128, imm8(11) /// </summary> public static Vector128<float> RoundToZeroScalar(Vector128<float> upper, Vector128<float> value) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<sbyte> LoadAlignedVector128NonTemporal(sbyte* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<byte> LoadAlignedVector128NonTemporal(byte* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<short> LoadAlignedVector128NonTemporal(short* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<ushort> LoadAlignedVector128NonTemporal(ushort* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<int> LoadAlignedVector128NonTemporal(int* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<uint> LoadAlignedVector128NonTemporal(uint* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<long> LoadAlignedVector128NonTemporal(long* address) { throw new PlatformNotSupportedException(); } /// <summary> /// __m128i _mm_stream_load_si128 (const __m128i* mem_addr) /// MOVNTDQA xmm, m128 /// </summary> public static unsafe Vector128<ulong> LoadAlignedVector128NonTemporal(ulong* address) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_test_all_ones (__m128i a) /// HELPER /// </summary> public static bool TestAllOnes(Vector128<sbyte> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<byte> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<short> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<ushort> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<int> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<uint> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<long> value) { throw new PlatformNotSupportedException(); } public static bool TestAllOnes(Vector128<ulong> value) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_test_all_zeros (__m128i a, __m128i mask) /// PTEST xmm, xmm/m128 /// </summary> public static bool TestAllZeros(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<byte> left, Vector128<byte> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<short> left, Vector128<short> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } public static bool TestAllZeros(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_testc_si128 (__m128i a, __m128i b) /// PTEST xmm, xmm/m128 /// </summary> public static bool TestC(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<byte> left, Vector128<byte> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<short> left, Vector128<short> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } public static bool TestC(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_test_mix_ones_zeros (__m128i a, __m128i mask) /// PTEST xmm, xmm/m128 /// </summary> public static bool TestMixOnesZeros(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<byte> left, Vector128<byte> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<short> left, Vector128<short> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } public static bool TestMixOnesZeros(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_testnzc_si128 (__m128i a, __m128i b) /// PTEST xmm, xmm/m128 /// </summary> public static bool TestNotZAndNotC(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<byte> left, Vector128<byte> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<short> left, Vector128<short> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } public static bool TestNotZAndNotC(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } /// <summary> /// int _mm_testz_si128 (__m128i a, __m128i b) /// PTEST xmm, xmm/m128 /// </summary> public static bool TestZ(Vector128<sbyte> left, Vector128<sbyte> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<byte> left, Vector128<byte> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<short> left, Vector128<short> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<ushort> left, Vector128<ushort> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<int> left, Vector128<int> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<uint> left, Vector128<uint> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<long> left, Vector128<long> right) { throw new PlatformNotSupportedException(); } public static bool TestZ(Vector128<ulong> left, Vector128<ulong> right) { throw new PlatformNotSupportedException(); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace NorthwindRepository{ /// <summary> /// Strongly-typed collection for the SalesByCategory class. /// </summary> [Serializable] public partial class SalesByCategoryCollection : ReadOnlyList<SalesByCategory, SalesByCategoryCollection> { public SalesByCategoryCollection() {} } /// <summary> /// This is Read-only wrapper class for the Sales by Category view. /// </summary> [Serializable] public partial class SalesByCategory : ReadOnlyRecord<SalesByCategory>, IReadOnlyRecord { #region Default Settings protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema Accessor public static TableSchema.Table Schema { get { if (BaseSchema == null) { SetSQLProps(); } return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("Sales by Category", TableType.View, DataService.GetInstance("NorthwindRepository")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarCategoryID = new TableSchema.TableColumn(schema); colvarCategoryID.ColumnName = "CategoryID"; colvarCategoryID.DataType = DbType.Int32; colvarCategoryID.MaxLength = 0; colvarCategoryID.AutoIncrement = false; colvarCategoryID.IsNullable = false; colvarCategoryID.IsPrimaryKey = false; colvarCategoryID.IsForeignKey = false; colvarCategoryID.IsReadOnly = false; schema.Columns.Add(colvarCategoryID); TableSchema.TableColumn colvarCategoryName = new TableSchema.TableColumn(schema); colvarCategoryName.ColumnName = "CategoryName"; colvarCategoryName.DataType = DbType.String; colvarCategoryName.MaxLength = 15; colvarCategoryName.AutoIncrement = false; colvarCategoryName.IsNullable = false; colvarCategoryName.IsPrimaryKey = false; colvarCategoryName.IsForeignKey = false; colvarCategoryName.IsReadOnly = false; schema.Columns.Add(colvarCategoryName); TableSchema.TableColumn colvarProductName = new TableSchema.TableColumn(schema); colvarProductName.ColumnName = "ProductName"; colvarProductName.DataType = DbType.String; colvarProductName.MaxLength = 40; colvarProductName.AutoIncrement = false; colvarProductName.IsNullable = false; colvarProductName.IsPrimaryKey = false; colvarProductName.IsForeignKey = false; colvarProductName.IsReadOnly = false; schema.Columns.Add(colvarProductName); TableSchema.TableColumn colvarProductSales = new TableSchema.TableColumn(schema); colvarProductSales.ColumnName = "ProductSales"; colvarProductSales.DataType = DbType.Currency; colvarProductSales.MaxLength = 0; colvarProductSales.AutoIncrement = false; colvarProductSales.IsNullable = true; colvarProductSales.IsPrimaryKey = false; colvarProductSales.IsForeignKey = false; colvarProductSales.IsReadOnly = false; schema.Columns.Add(colvarProductSales); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["NorthwindRepository"].AddSchema("Sales by Category",schema); } } #endregion #region Query Accessor public static Query CreateQuery() { return new Query(Schema); } #endregion #region .ctors public SalesByCategory() { SetSQLProps(); SetDefaults(); MarkNew(); } public SalesByCategory(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) { ForceDefaults(); } MarkNew(); } public SalesByCategory(object keyID) { SetSQLProps(); LoadByKey(keyID); } public SalesByCategory(string columnName, object columnValue) { SetSQLProps(); LoadByParam(columnName,columnValue); } #endregion #region Props [XmlAttribute("CategoryID")] [Bindable(true)] public int CategoryID { get { return GetColumnValue<int>("CategoryID"); } set { SetColumnValue("CategoryID", value); } } [XmlAttribute("CategoryName")] [Bindable(true)] public string CategoryName { get { return GetColumnValue<string>("CategoryName"); } set { SetColumnValue("CategoryName", value); } } [XmlAttribute("ProductName")] [Bindable(true)] public string ProductName { get { return GetColumnValue<string>("ProductName"); } set { SetColumnValue("ProductName", value); } } [XmlAttribute("ProductSales")] [Bindable(true)] public decimal? ProductSales { get { return GetColumnValue<decimal?>("ProductSales"); } set { SetColumnValue("ProductSales", value); } } #endregion #region Columns Struct public struct Columns { public static string CategoryID = @"CategoryID"; public static string CategoryName = @"CategoryName"; public static string ProductName = @"ProductName"; public static string ProductSales = @"ProductSales"; } #endregion #region IAbstractRecord Members public new CT GetColumnValue<CT>(string columnName) { return base.GetColumnValue<CT>(columnName); } public object GetColumnValue(string columnName) { return base.GetColumnValue<object>(columnName); } #endregion } }
using System; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Text.RegularExpressions; namespace CSemVer { /// <summary> /// Defines standard informational version (usually stored in the <see cref="FileVersionInfo.ProductVersion"/>): /// the two <see cref="SVersion"/> (the short and long forms), the <see cref="CommitSha"/> and the <see cref="CommitDate"/>. /// <para> /// The constructor can be used directly on any string, or <see cref="Parse"/> can be called (and throws an /// <see cref="ArgumentException"/> if the result is not <see cref="IsValidSyntax"/>), or the informational version /// can be extracted directly from a file thanks to <see cref="ReadFromFile"/>. /// </para> /// <para> /// Syntax check is very strict (the <see cref="Zero"/> string is a sample) and should remain strict. /// What is missing in the equivalence check between NuGet and SemVer version: this requires a parse /// of the NuGet version and it has yet to be done. /// </para> /// </summary> public class InformationalVersion { static Regex _r = new Regex( @"^(?<1>.*?) \((?<2>.*?)\) - SHA1: (?<3>.*?) - CommitDate: (?<4>.*?)$" ); /// <summary> /// The zero <see cref="InformationalVersion"/>. /// See <see cref="ZeroInformationalVersion"/>. /// </summary> static public InformationalVersion Zero = new InformationalVersion(); /// <summary> /// The zero assembly version is "0.0.0". /// </summary> static public readonly string ZeroAssemblyVersion = "0.0.0"; /// <summary> /// The zero file version is "0.0.0.0". /// </summary> static public readonly string ZeroFileVersion = "0.0.0.0"; /// <summary> /// The zero SHA1 is "0000000000000000000000000000000000000000". /// </summary> static public readonly string ZeroCommitSha = "0000000000000000000000000000000000000000"; /// <summary> /// The zero commit date is <see cref="DateTime.MinValue"/> in <see cref="DateTimeKind.Utc"/>. /// </summary> static public readonly DateTime ZeroCommitDate = DateTime.SpecifyKind( DateTime.MinValue, DateTimeKind.Utc ); /// <summary> /// The Zero standard informational version is "0.0.0-0 (0.0.0-0) - SHA1: 0000000000000000000000000000000000000000 - CommitDate: 0001-01-01 00:00:00Z". /// <para> /// These default values may be set in a csproj: /// <code> /// &lt;Version&gt;0.0.0-0&lt;/Version&gt; /// &lt;AssemblyVersion&gt;0.0.0&lt;/AssemblyVersion&gt; /// &lt;FileVersion&gt;0.0.0.0&lt;/FileVersion&gt; /// &lt;InformationalVersion&gt;0.0.0-0 (0.0.0-0) - SHA1: 0000000000000000000000000000000000000000 - CommitDate: 0001-01-01 00:00:00Z&lt;/InformationalVersion&gt; /// </code> /// </para> /// </summary> static public readonly string ZeroInformationalVersion = "0.0.0-0 (0.0.0-0) - SHA1: 0000000000000000000000000000000000000000 - CommitDate: 0001-01-01 00:00:00Z"; /// <summary> /// Initializes a new <see cref="InformationalVersion"/> by parsing a string. /// This never throws: <see cref="IsValidSyntax"/> may be false and <see cref="ParseErrorMessage"/> exposes /// the error message. /// </summary> /// <param name="informationalVersion">Informational version. Can be null.</param> public InformationalVersion( string informationalVersion ) { if( (OriginalInformationalVersion = informationalVersion) != null ) { Match m = _r.Match( informationalVersion ); if( m.Success ) { RawSemVersion = m.Groups[1].Value; RawNuGetVersion = m.Groups[2].Value; CommitSha = m.Groups[3].Value; SemVersion = SVersion.TryParse( RawSemVersion ); NuGetVersion = SVersion.TryParse( RawNuGetVersion ); DateTime t; if( DateTime.TryParseExact( m.Groups[4].Value, "u", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal|DateTimeStyles.AdjustToUniversal, out t ) ) { CommitDate = t; if( t.Kind != DateTimeKind.Utc ) ParseErrorMessage = $"The CommitDate must be Utc: {m.Groups[4].Value} must be {DateTime.SpecifyKind( t, DateTimeKind.Utc ).ToString("u")}."; else if( !SemVersion.IsValid ) ParseErrorMessage = "The SemVersion is invalid: " + SemVersion.ErrorMessage; else if( !NuGetVersion.IsValid ) ParseErrorMessage = "The NuGetVersion is invalid: " + NuGetVersion.ErrorMessage; else if( CommitSha.Length != 40 || !CommitSha.All( IsHexDigit ) ) ParseErrorMessage = "The CommitSha is invalid (must be 40 hex digit)."; else IsValidSyntax = true; } else ParseErrorMessage = "The CommitDate is invalid.It must be a UTC DateTime in \"u\" format."; } else ParseErrorMessage = "The String to parse does not match the standard CSemVer informational version pattern."; } else ParseErrorMessage = "String to parse is null."; } InformationalVersion( string parseErrorMessage, bool forPrivateError ) { ParseErrorMessage = parseErrorMessage; } InformationalVersion() { OriginalInformationalVersion = ZeroInformationalVersion; NuGetVersion = SemVersion = SVersion.ZeroVersion; RawNuGetVersion = RawSemVersion = SemVersion.NormalizedText; CommitSha = ZeroCommitSha; CommitDate = ZeroCommitDate; IsValidSyntax = true; } /// <summary> /// Gets whether <see cref="OriginalInformationalVersion"/> has been sucessfully parsed: /// both <see cref="SemVersion"/> and <see cref="NuGetVersion"/> are syntaxically valid <see cref="SVersion"/>, /// the <see cref="CommitSha"/> is a 40 hexadecimal string and <see cref="CommitDate"/> has been successfully parsed. /// </summary> public bool IsValidSyntax { get; } /// <summary> /// Gets an error message whenever <see cref="IsValidSyntax"/> is true. /// Null otherwise. /// </summary> public string ParseErrorMessage { get; } /// <summary> /// Gets the original informational (can be null). /// </summary> public string OriginalInformationalVersion { get; } /// <summary> /// Gets the semantic version string extracted from <see cref="OriginalInformationalVersion"/>. /// Null if the OriginalInformationalVersion attribute was not standard. /// </summary> public string RawSemVersion { get; } /// <summary> /// Gets the parsed <see cref="RawSemVersion"/> (that may be not <see cref="SVersion.IsValid"/>) /// or null if the OriginalInformationalVersion attribute was not standard. /// </summary> public SVersion SemVersion { get; } /// <summary> /// Gets the NuGet version extracted from the <see cref="OriginalInformationalVersion"/>. /// Null if the OriginalInformationalVersion attribute was not standard. /// </summary> public string RawNuGetVersion { get; } /// <summary> /// Gets the parsed <see cref="RawNuGetVersion"/> (that may be not <see cref="SVersion.IsValid"/>) /// or null if the OriginalInformationalVersion attribute was not standard. /// </summary> public SVersion NuGetVersion { get; } /// <summary> /// Gets the SHA1 extracted from the <see cref="OriginalInformationalVersion"/>. /// Null if the OriginalInformationalVersion attribute was not standard. /// </summary> public string CommitSha { get; } /// <summary> /// Gets the commit date extracted from the <see cref="InformationalVersion"/>. /// <see cref="DateTime.MinValue"/> if the OriginalInformationalVersion attribute was not standard. /// This date is required to be in Utc in "u" DateTime format. /// </summary> public DateTime CommitDate { get; } /// <summary> /// Overridden to return the <see cref="ParseErrorMessage"/> or the <see cref="OriginalInformationalVersion"/>. /// </summary> /// <returns>The textual representation.</returns> public override string ToString() => ParseErrorMessage ?? OriginalInformationalVersion; /// <summary> /// Parses the given string. Throws an <see cref="ArgumentException"/> if the syntax is invalid. /// To avoid exception, simply use the <see cref="InformationalVersion"/> constructor. /// </summary> /// <param name="s">The string to parse.</param> /// <returns>A <see cref="IsValidSyntax"/> informational version.</returns> static public InformationalVersion Parse( string s ) { var i = new InformationalVersion( s ); if( !i.IsValidSyntax ) throw new ArgumentException( i.ParseErrorMessage, nameof( s ) ); return i; } /// <summary> /// Reads the <see cref="InformationalVersion"/> from a file, using <see cref="FileVersionInfo.GetVersionInfo"/>. /// This does not throw, instead the returned <see cref="IsValidSyntax"/> is false and <see cref="ParseErrorMessage"/> /// contains the error description. /// </summary> /// <param name="filePath">The path to the file. Must not be null or empty.</param> /// <returns>The informational version that may be invalid.</returns> static public InformationalVersion ReadFromFile( string filePath ) { if( String.IsNullOrWhiteSpace( filePath ) ) { throw new ArgumentNullException( nameof( filePath ) ); } if( !File.Exists( filePath ) ) { return new InformationalVersion( "File not found.", true ); } try { var p = FileVersionInfo.GetVersionInfo( filePath )?.ProductVersion; return p != null ? new InformationalVersion( p ) : new InformationalVersion( "The file has no FileVersionInfo.", true ); } catch( Exception ex ) { return new InformationalVersion( "Exception:" + ex.Message, true ); } } /// <summary> /// reads the <see cref="InformationalVersion"/> from a loaded assembly: its <see cref="AssemblyInformationalVersionAttribute"/> /// is used. /// </summary> /// <param name="a">The assemblmy. Must not be null.</param> /// <returns>The informational version that may be invalid.</returns> static public InformationalVersion ReadFromAssembly( Assembly a ) { if( a == null ) throw new ArgumentNullException( nameof( a ) ); try { var attr = (AssemblyInformationalVersionAttribute)Attribute.GetCustomAttribute( a, typeof( AssemblyInformationalVersionAttribute ) ); return attr != null ? new InformationalVersion( attr.InformationalVersion ) : new InformationalVersion( "Unable to find AssemblyInformationalVersionAttribute.", true ); } catch( Exception ex ) { return new InformationalVersion( "Exception:" + ex.Message, true ); } } /// <summary> /// Builds a standard Informational version string. /// </summary> /// <param name="semVer">The semantic version. Must be not null nor empty (no syntaxic validation is done).</param> /// <param name="nugetVer">The nuget version. Must be not null nor empty (no syntaxic validation is done).</param> /// <param name="commitSha">The SHA1 of the commit (must be 40 hex digits).</param> /// <param name="commitDateUtc">The commit date (must be in UTC).</param> /// <returns>The informational version.</returns> static public string BuildInformationalVersion( string semVer, string nugetVer, string commitSha, DateTime commitDateUtc ) { if( string.IsNullOrWhiteSpace( semVer ) ) throw new ArgumentException( nameof( semVer ) ); if( string.IsNullOrWhiteSpace( nugetVer ) ) throw new ArgumentException( nameof( nugetVer ) ); if( commitSha == null || commitSha.Length != 40 || !commitSha.All( IsHexDigit ) ) throw new ArgumentException( "Must be a 40 hex digits string.", nameof( commitSha ) ); if( commitDateUtc.Kind != DateTimeKind.Utc ) throw new ArgumentException( "Must be a UTC date.", nameof( commitDateUtc ) ); return $"{semVer} ({nugetVer}) - SHA1: {commitSha} - CommitDate: {commitDateUtc.ToString( "u" )}"; } static bool IsHexDigit( char c ) => (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace NwaDg.Web.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
using YAF.Lucene.Net.Support; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Reflection; namespace YAF.Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using DirectoryReader = YAF.Lucene.Net.Index.DirectoryReader; using IOUtils = YAF.Lucene.Net.Util.IOUtils; /// <summary> /// Keeps track of current plus old <see cref="IndexSearcher"/>s, disposing /// the old ones once they have timed out. /// /// Use it like this: /// /// <code> /// SearcherLifetimeManager mgr = new SearcherLifetimeManager(); /// </code> /// /// Per search-request, if it's a "new" search request, then /// obtain the latest searcher you have (for example, by /// using <see cref="SearcherManager"/>), and then record this /// searcher: /// /// <code> /// // Record the current searcher, and save the returend /// // token into user's search results (eg as a hidden /// // HTML form field): /// long token = mgr.Record(searcher); /// </code> /// /// When a follow-up search arrives, for example the user /// clicks next page, drills down/up, etc., take the token /// that you saved from the previous search and: /// /// <code> /// // If possible, obtain the same searcher as the last /// // search: /// IndexSearcher searcher = mgr.Acquire(token); /// if (searcher != null) /// { /// // Searcher is still here /// try /// { /// // do searching... /// } /// finally /// { /// mgr.Release(searcher); /// // Do not use searcher after this! /// searcher = null; /// } /// } /// else /// { /// // Searcher was pruned -- notify user session timed /// // out, or, pull fresh searcher again /// } /// </code> /// /// Finally, in a separate thread, ideally the same thread /// that's periodically reopening your searchers, you should /// periodically prune old searchers: /// /// <code> /// mgr.Prune(new PruneByAge(600.0)); /// </code> /// /// <para><b>NOTE</b>: keeping many searchers around means /// you'll use more resources (open files, RAM) than a single /// searcher. However, as long as you are using /// <see cref="DirectoryReader.OpenIfChanged(DirectoryReader)"/>, the searchers /// will usually share almost all segments and the added resource usage /// is contained. When a large merge has completed, and /// you reopen, because that is a large change, the new /// searcher will use higher additional RAM than other /// searchers; but large merges don't complete very often and /// it's unlikely you'll hit two of them in your expiration /// window. Still you should budget plenty of heap in the /// runtime to have a good safety margin.</para> /// </summary> public class SearcherLifetimeManager : IDisposable { internal const double NANOS_PER_SEC = 1000000000.0; private sealed class SearcherTracker : IComparable<SearcherTracker>, IDisposable { public IndexSearcher Searcher { get; private set; } public double RecordTimeSec { get; private set; } public long Version { get; private set; } public SearcherTracker(IndexSearcher searcher) { Searcher = searcher; Version = ((DirectoryReader)searcher.IndexReader).Version; searcher.IndexReader.IncRef(); // Use nanoTime not currentTimeMillis since it [in // theory] reduces risk from clock shift RecordTimeSec = Time.NanoTime() / NANOS_PER_SEC; } // Newer searchers are sort before older ones: public int CompareTo(SearcherTracker other) { return other.RecordTimeSec.CompareTo(RecordTimeSec); } public void Dispose() { lock (this) { Searcher.IndexReader.DecRef(); } } } private volatile bool _closed; // TODO: we could get by w/ just a "set"; need to have // Tracker hash by its version and have compareTo(Long) // compare to its version private readonly ConcurrentDictionary<long, Lazy<SearcherTracker>> _searchers = new ConcurrentDictionary<long, Lazy<SearcherTracker>>(); private void EnsureOpen() { if (_closed) { throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "this SearcherLifetimeManager instance is closed"); } } /// <summary> /// Records that you are now using this <see cref="IndexSearcher"/>. /// Always call this when you've obtained a possibly new /// <see cref="IndexSearcher"/>, for example from /// <see cref="SearcherManager"/>. It's fine if you already passed the /// same searcher to this method before. /// /// <para>This returns the <see cref="long"/> token that you can later pass /// to <see cref="Acquire(long)"/> to retrieve the same <see cref="IndexSearcher"/>. /// You should record this <see cref="long"/> token in the search results /// sent to your user, such that if the user performs a /// follow-on action (clicks next page, drills down, etc.) /// the token is returned.</para> /// </summary> public virtual long Record(IndexSearcher searcher) { EnsureOpen(); // TODO: we don't have to use IR.getVersion to track; // could be risky (if it's buggy); we could get better // bug isolation if we assign our own private ID: var version = ((DirectoryReader)searcher.IndexReader).Version; var factoryMethodCalled = false; var tracker = _searchers.GetOrAdd(version, l => new Lazy<SearcherTracker>(() => { factoryMethodCalled = true; return new SearcherTracker(searcher); })).Value; if (!factoryMethodCalled && tracker.Searcher != searcher) { throw new ArgumentException("the provided searcher has the same underlying reader version yet the searcher instance differs from before (new=" + searcher + " vs old=" + tracker.Searcher); } return version; } /// <summary> /// Retrieve a previously recorded <see cref="IndexSearcher"/>, if it /// has not yet been closed. /// /// <para><b>NOTE</b>: this may return <c>null</c> when the /// requested searcher has already timed out. When this /// happens you should notify your user that their session /// timed out and that they'll have to restart their /// search.</para> /// /// <para>If this returns a non-null result, you must match /// later call <see cref="Release(IndexSearcher)"/> on this searcher, best /// from a finally clause.</para> /// </summary> public virtual IndexSearcher Acquire(long version) { EnsureOpen(); Lazy<SearcherTracker> tracker; if (_searchers.TryGetValue(version, out tracker) && tracker.IsValueCreated && tracker.Value.Searcher.IndexReader.TryIncRef()) { return tracker.Value.Searcher; } return null; } /// <summary> /// Release a searcher previously obtained from /// <see cref="Acquire(long)"/>. /// /// <para/><b>NOTE</b>: it's fine to call this after Dispose(). /// </summary> public virtual void Release(IndexSearcher s) { s.IndexReader.DecRef(); } /// <summary> /// See <see cref="Prune(IPruner)"/>. </summary> public interface IPruner { /// <summary> /// Return <c>true</c> if this searcher should be removed. </summary> /// <param name="ageSec"> How much time has passed since this /// searcher was the current (live) searcher </param> /// <param name="searcher"> Searcher </param> bool DoPrune(double ageSec, IndexSearcher searcher); } /// <summary> /// Simple pruner that drops any searcher older by /// more than the specified seconds, than the newest /// searcher. /// </summary> public sealed class PruneByAge : IPruner { private readonly double maxAgeSec; public PruneByAge(double maxAgeSec) { if (maxAgeSec < 0) { throw new System.ArgumentException("maxAgeSec must be > 0 (got " + maxAgeSec + ")"); } this.maxAgeSec = maxAgeSec; } public bool DoPrune(double ageSec, IndexSearcher searcher) { return ageSec > maxAgeSec; } } /// <summary> /// Calls provided <see cref="IPruner"/> to prune entries. The /// entries are passed to the <see cref="IPruner"/> in sorted (newest to /// oldest <see cref="IndexSearcher"/>) order. /// /// <para/><b>NOTE</b>: you must peridiocally call this, ideally /// from the same background thread that opens new /// searchers. /// </summary> public virtual void Prune(IPruner pruner) { lock (this) { // Cannot just pass searchers.values() to ArrayList ctor // (not thread-safe since the values can change while // ArrayList is init'ing itself); must instead iterate // ourselves: var trackers = _searchers.Values.Select(item => item.Value).ToList(); trackers.Sort(); var lastRecordTimeSec = 0.0; double now = Time.NanoTime() / NANOS_PER_SEC; foreach (var tracker in trackers) { double ageSec; if (lastRecordTimeSec == 0.0) { ageSec = 0.0; } else { ageSec = now - lastRecordTimeSec; } // First tracker is always age 0.0 sec, since it's // still "live"; second tracker's age (= seconds since // it was "live") is now minus first tracker's // recordTime, etc: if (pruner.DoPrune(ageSec, tracker.Searcher)) { //System.out.println("PRUNE version=" + tracker.version + " age=" + ageSec + " ms=" + System.currentTimeMillis()); Lazy<SearcherTracker> _; _searchers.TryRemove(tracker.Version, out _); tracker.Dispose(); } lastRecordTimeSec = tracker.RecordTimeSec; } } } /// <summary> /// Close this to future searching; any searches still in /// process in other threads won't be affected, and they /// should still call <see cref="Release(IndexSearcher)"/> after they are /// done. /// /// <para/><b>NOTE</b>: you must ensure no other threads are /// calling <see cref="Record(IndexSearcher)"/> while you call Dispose(); /// otherwise it's possible not all searcher references /// will be freed. /// </summary> public virtual void Dispose() { lock (this) { _closed = true; IList<SearcherTracker> toClose = new List<SearcherTracker>(_searchers.Values.Select(item => item.Value)); // Remove up front in case exc below, so we don't // over-decRef on double-close: foreach (var tracker in toClose) { Lazy<SearcherTracker> _; _searchers.TryRemove(tracker.Version, out _); } IOUtils.Dispose(toClose); // Make some effort to catch mis-use: if (_searchers.Count != 0) { throw new InvalidOperationException("another thread called record while this SearcherLifetimeManager instance was being closed; not all searchers were closed"); } } } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Diagnostics.Contracts; using Microsoft.Research.ClousotRegression; namespace StructsAndProperties { public enum Kind { A, B } public struct S { Kind kind; public int Foo; [Pure] public static Kind GetKind(S s) { Contract.Ensures(Contract.Result<Kind>() == s.kind); return s.kind; } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 23, MethodILOffset = 0)] [RegressionOutcome (Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 30, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 15, MethodILOffset = 35)] public S(Kind kind) { Contract.Ensures(GetKind(Contract.ValueAtReturn(out this)) == kind); this.kind = kind; this.Foo = 0; } public void Impure() { } [Pure] public void Pure() { } } class StructProperties { [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 18, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 10, MethodILOffset = 18)] public void Test1(S s) { Contract.Requires(S.GetKind(s) == Kind.A); Test2(s); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 25, MethodILOffset = 0)] public void Test2(S s) { Contract.Requires(S.GetKind(s) == Kind.A); Contract.Assert(S.GetKind(s) == Kind.A); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 27, MethodILOffset = 0)] public void Test3(S s) { Contract.Requires(S.GetKind(s) == Kind.A); S t = s; Contract.Assert(S.GetKind(t) == Kind.A); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 17, MethodILOffset = 0)] public void Test4(S s) { S t = s; Contract.Assert(S.GetKind(t) == S.GetKind(s)); } [ClousotRegressionTest("regular"), RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 14, MethodILOffset = 30)] public S Test5(Kind kind) { Contract.Ensures(S.GetKind(Contract.Result<S>()) == kind); return new S(kind); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 18, MethodILOffset = 0)] public void Test6() { S s = Test5(Kind.B); Contract.Assert(S.GetKind(s) == Kind.B); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 18, MethodILOffset = 0)] public void Test7(Kind kind) { S s = Test5(kind); Contract.Assert(S.GetKind(s) == kind); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 18, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 33, MethodILOffset = 0)] public void Test8(S s) { Contract.Requires(S.GetKind(s) == Kind.A); s.Pure(); Contract.Assert(S.GetKind(s) == Kind.A); } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 20, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "assert is valid", PrimaryILOffset = 35, MethodILOffset = 0)] public void Test9(S s) { Contract.Requires(S.GetKind(s) == Kind.A); S t = s; s.Impure(); Contract.Assert(S.GetKind(t) == Kind.A); // okay } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 18, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 26, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: S.GetKind(s) == Kind.A", PrimaryILOffset = 10, MethodILOffset = 26)] public void Test1N(S s) { Contract.Requires(S.GetKind(s) == Kind.A); s.Impure(); Test2(s); // should not be proven } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 19, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = "assert unproven", PrimaryILOffset = 33, MethodILOffset = 0)] public void Test2N(S s) { Contract.Requires(S.GetKind(s) == Kind.A); s.Foo = 5; Contract.Assert(S.GetKind(s) == Kind.A); // should not be proven } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 5, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = "assert unproven", PrimaryILOffset = 25, MethodILOffset = 0)] public void Test3N(S s) { S t = s; s.Impure(); Contract.Assert(S.GetKind(t) == S.GetKind(s)); // should not be proven } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 23, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 31, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"ensures unproven: S.GetKind(Contract.Result<S>()) == kind", PrimaryILOffset = 14, MethodILOffset = 42)] public S Test4N(Kind kind) { Contract.Ensures(S.GetKind(Contract.Result<S>()) == kind); S s = new S(kind); s.Impure(); return s; // post should fail. } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 11, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = "assert unproven", PrimaryILOffset = 26, MethodILOffset = 0)] public void Test5N() { S s = Test5(Kind.B); s.Impure(); Contract.Assert(S.GetKind(s) == Kind.B); // should fail } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 11, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = "assert unproven", PrimaryILOffset = 26, MethodILOffset = 0)] public void Test6N(Kind kind) { S s = Test5(kind); s.Impure(); Contract.Assert(S.GetKind(s) == kind); // should fail } } public struct MyNullable<T> { T value; bool hasValue; public bool HasValue { get { Contract.Ensures(Contract.Result<bool>() == this.hasValue); return this.hasValue; } } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 23, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 30, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 10, MethodILOffset = 35)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 15, MethodILOffset = 35)] public MyNullable(T value) { Contract.Ensures(Contract.ValueAtReturn(out this).HasValue); this.value = value; this.hasValue = true; } public T Value { get { Contract.Requires(this.HasValue); return value; } } [ClousotRegressionTest("cci1only")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 9, MethodILOffset = 30)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 14, MethodILOffset = 30)] public static implicit operator MyNullable<T>(T value) { Contract.Ensures(Contract.Result<MyNullable<T>>().HasValue); return new MyNullable<T>(value); } } public class MyNullableTest { [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: this.HasValue", PrimaryILOffset = 7, MethodILOffset = 3)] public static void Test1(MyNullable<int> optInt) { int x = optInt.Value; } [ClousotRegressionTest("regular")] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 3, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 18, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 7, MethodILOffset = 18)] public static int UseNullable(MyNullable<int> optY) { if (optY.HasValue) { return optY.Value; } return 0; } [ClousotRegressionTest] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 4, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 12, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 7, MethodILOffset = 12)] public static int UseNullable1(int x) { MyNullable<int> y = new MyNullable<int>(x); return y.Value; } [ClousotRegressionTest] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 10, MethodILOffset = 0)] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 7, MethodILOffset = 10)] public static int UseNullable2(int x) { MyNullable<int> y = x; return y.Value; } [ClousotRegressionTest] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 8, MethodILOffset = 7)] public static void UseNullable3(int x) { Pass(x); } [ClousotRegressionTest] [RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 8, MethodILOffset = 7)] public static void UseNullable4(int x) { Pass(new MyNullable<int>(x)); } public static void Pass(MyNullable<int> x) { Contract.Requires(x.HasValue); } } }
// NpgsqlTypes.NpgsqlTypesHelper.cs // // Author: // Glen Parker <glenebob@nwlink.com> // // Copyright (C) 2004 The Npgsql Development Team // npgsql-general@gborg.postgresql.org // http://gborg.postgresql.org/project/npgsql/projdisplay.php // // Permission to use, copy, modify, and distribute this software and its // documentation for any purpose, without fee, and without a written // agreement is hereby granted, provided that the above copyright notice // and this paragraph and the following two paragraphs appear in all copies. // // IN NO EVENT SHALL THE NPGSQL DEVELOPMENT TEAM BE LIABLE TO ANY PARTY // FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, // INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS // DOCUMENTATION, EVEN IF THE NPGSQL DEVELOPMENT TEAM HAS BEEN ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // // THE NPGSQL DEVELOPMENT TEAM SPECIFICALLY DISCLAIMS ANY WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY // AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS // ON AN "AS IS" BASIS, AND THE NPGSQL DEVELOPMENT TEAM HAS NO OBLIGATIONS // TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. // This file provides implementations of PostgreSQL specific data types that cannot // be mapped to standard .NET classes. using System; using System.Collections; using System.Collections.Generic; using System.Net; using System.Net.NetworkInformation; using Revenj.DatabasePersistence.Postgres.Npgsql; namespace Revenj.DatabasePersistence.Postgres.NpgsqlTypes { /// <summary> /// Represents a PostgreSQL Point type /// </summary> public struct NpgsqlPoint : IEquatable<NpgsqlPoint> { private Single _x; private Single _y; public NpgsqlPoint(Single x, Single y) { _x = x; _y = y; } public Single X { get { return _x; } set { _x = value; } } public Single Y { get { return _y; } set { _y = value; } } public bool Equals(NpgsqlPoint other) { return X == other.X && Y == other.Y; } public override bool Equals(object obj) { return obj != null && obj is NpgsqlPoint && Equals((NpgsqlPoint) obj); } public static bool operator ==(NpgsqlPoint x, NpgsqlPoint y) { return x.Equals(y); } public static bool operator !=(NpgsqlPoint x, NpgsqlPoint y) { return !(x == y); } public override int GetHashCode() { return X.GetHashCode() ^ PGUtil.RotateShift(Y.GetHashCode(), sizeof (int)/2); } } public struct NpgsqlBox : IEquatable<NpgsqlBox> { private NpgsqlPoint _upperRight; private NpgsqlPoint _lowerLeft; public NpgsqlBox(NpgsqlPoint upperRight, NpgsqlPoint lowerLeft) { _upperRight = upperRight; _lowerLeft = lowerLeft; } public NpgsqlBox(float Top, float Right, float Bottom, float Left) : this(new NpgsqlPoint(Right, Top), new NpgsqlPoint(Left, Bottom)) { } public NpgsqlPoint UpperRight { get { return _upperRight; } set { _upperRight = value; } } public NpgsqlPoint LowerLeft { get { return _lowerLeft; } set { _lowerLeft = value; } } public float Left { get { return LowerLeft.X; } } public float Right { get { return UpperRight.X; } } public float Bottom { get { return LowerLeft.Y; } } public float Top { get { return UpperRight.Y; } } public float Width { get { return Right - Left; } } public float Height { get { return Top - Bottom; } } public bool IsEmpty { get { return Width == 0 || Height == 0; } } public bool Equals(NpgsqlBox other) { return UpperRight == other.UpperRight && LowerLeft == other.LowerLeft; } public override bool Equals(object obj) { return obj != null && obj is NpgsqlBox && Equals((NpgsqlBox) obj); } public static bool operator ==(NpgsqlBox x, NpgsqlBox y) { return x.Equals(y); } public static bool operator !=(NpgsqlBox x, NpgsqlBox y) { return !(x == y); } public override int GetHashCode() { return Top.GetHashCode() ^ PGUtil.RotateShift(Right.GetHashCode(), sizeof (int)/4) ^ PGUtil.RotateShift(Bottom.GetHashCode(), sizeof (int)/2) ^ PGUtil.RotateShift(LowerLeft.GetHashCode(), sizeof (int)*3/4); } } /// <summary> /// Represents a PostgreSQL Line Segment type. /// </summary> public struct NpgsqlLSeg : IEquatable<NpgsqlLSeg> { public NpgsqlPoint Start; public NpgsqlPoint End; public NpgsqlLSeg(NpgsqlPoint start, NpgsqlPoint end) { Start = start; End = end; } public override String ToString() { return String.Format("({0}, {1})", Start, End); } public override int GetHashCode() { return Start.X.GetHashCode() ^ PGUtil.RotateShift(Start.Y.GetHashCode(), sizeof (int)/4) ^ PGUtil.RotateShift(End.X.GetHashCode(), sizeof (int)/2) ^ PGUtil.RotateShift(End.Y.GetHashCode(), sizeof (int)*3/4); } public bool Equals(NpgsqlLSeg other) { return Start == other.Start && End == other.End; } public override bool Equals(object obj) { return obj != null && obj is NpgsqlLSeg && Equals((NpgsqlLSeg) obj); } public static bool operator ==(NpgsqlLSeg x, NpgsqlLSeg y) { return x.Equals(y); } public static bool operator !=(NpgsqlLSeg x, NpgsqlLSeg y) { return !(x == y); } } /// <summary> /// Represents a PostgreSQL Path type. /// </summary> public struct NpgsqlPath : IList<NpgsqlPoint>, IEquatable<NpgsqlPath> { private bool _open; private readonly List<NpgsqlPoint> _points; public NpgsqlPath(IEnumerable<NpgsqlPoint> points, bool open) { _points = new List<NpgsqlPoint>(points); _open = open; } public NpgsqlPath(IEnumerable<NpgsqlPoint> points) : this(points, false) { } public NpgsqlPath(NpgsqlPoint[] points) : this((IEnumerable<NpgsqlPoint>)points, false) { } public NpgsqlPath(bool open) { _points = new List<NpgsqlPoint>(); _open = open; } public NpgsqlPath(int capacity, bool open) { _points = new List<NpgsqlPoint>(capacity); _open = open; } public NpgsqlPath(int capacity) : this(capacity, false) { } public bool Open { get { return _open; } set { _open = value; } } public NpgsqlPoint this[int index] { get { return _points[index]; } set { _points[index] = value; } } public int Count { get { return _points.Count; } } public bool IsReadOnly { get { return false; } } public int IndexOf(NpgsqlPoint item) { return _points.IndexOf(item); } public void Insert(int index, NpgsqlPoint item) { _points.Insert(index, item); } public void RemoveAt(int index) { _points.RemoveAt(index); } public void Add(NpgsqlPoint item) { _points.Add(item); } public void Clear() { _points.Clear(); } public bool Contains(NpgsqlPoint item) { return _points.Contains(item); } public void CopyTo(NpgsqlPoint[] array, int arrayIndex) { _points.CopyTo(array, arrayIndex); } public bool Remove(NpgsqlPoint item) { return _points.Remove(item); } public IEnumerator<NpgsqlPoint> GetEnumerator() { return _points.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public bool Equals(NpgsqlPath other) { if (Open != other.Open || Count != other.Count) return false; else if(ReferenceEquals(_points, other._points))//Short cut for shallow copies. return true; for (int i = 0; i != Count; ++i) { if (this[i] != other[i]) { return false; } } return true; } public override bool Equals(object obj) { return obj != null && obj is NpgsqlPath && Equals((NpgsqlPath) obj); } public static bool operator ==(NpgsqlPath x, NpgsqlPath y) { return x.Equals(y); } public static bool operator !=(NpgsqlPath x, NpgsqlPath y) { return !(x == y); } public override int GetHashCode() { int ret = 266370105;//seed with something other than zero to make paths of all zeros hash differently. foreach (NpgsqlPoint point in this) { //The ideal amount to shift each value is one that would evenly spread it throughout //the resultant bytes. Using the current result % 32 is essentially using a random value //but one that will be the same on subsequent calls. ret ^= PGUtil.RotateShift(point.GetHashCode(), ret%sizeof (int)); } return Open ? ret : -ret; } } /// <summary> /// Represents a PostgreSQL Polygon type. /// </summary> public struct NpgsqlPolygon : IList<NpgsqlPoint>, IEquatable<NpgsqlPolygon> { private readonly List<NpgsqlPoint> _points; public NpgsqlPolygon(IEnumerable<NpgsqlPoint> points) { _points = new List<NpgsqlPoint>(points); } public NpgsqlPolygon(NpgsqlPoint[] points) : this ((IEnumerable<NpgsqlPoint>) points) { } public NpgsqlPoint this[int index] { get { return _points[index]; } set { _points[index] = value; } } public int Count { get { return _points.Count; } } public bool IsReadOnly { get { return false; } } public int IndexOf(NpgsqlPoint item) { return _points.IndexOf(item); } public void Insert(int index, NpgsqlPoint item) { _points.Insert(index, item); } public void RemoveAt(int index) { _points.RemoveAt(index); } public void Add(NpgsqlPoint item) { _points.Add(item); } public void Clear() { _points.Clear(); } public bool Contains(NpgsqlPoint item) { return _points.Contains(item); } public void CopyTo(NpgsqlPoint[] array, int arrayIndex) { _points.CopyTo(array, arrayIndex); } public bool Remove(NpgsqlPoint item) { return _points.Remove(item); } public IEnumerator<NpgsqlPoint> GetEnumerator() { return _points.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public bool Equals(NpgsqlPolygon other) { if (Count != other.Count) return false; else if(ReferenceEquals(_points, _points))//Shortcut for copies of each other. return true; for (int i = 0; i != Count; ++i) { if (this[i] != other[i]) { return false; } } return true; } public override bool Equals(object obj) { return obj is NpgsqlPolygon && Equals((NpgsqlPolygon) obj); } public static bool operator ==(NpgsqlPolygon x, NpgsqlPolygon y) { return x.Equals(y); } public static bool operator !=(NpgsqlPolygon x, NpgsqlPolygon y) { return !(x == y); } public override int GetHashCode() { int ret = 266370105;//seed with something other than zero to make paths of all zeros hash differently. foreach (NpgsqlPoint point in this) { //The ideal amount to shift each value is one that would evenly spread it throughout //the resultant bytes. Using the current result % 32 is essentially using a random value //but one that will be the same on subsequent calls. ret ^= PGUtil.RotateShift(point.GetHashCode(), ret%sizeof (int)); } return ret; } } /// <summary> /// Represents a PostgreSQL Circle type. /// </summary> public struct NpgsqlCircle : IEquatable<NpgsqlCircle> { public NpgsqlPoint Center; public Double Radius; public NpgsqlCircle(NpgsqlPoint center, Double radius) { Center = center; Radius = radius; } public bool Equals(NpgsqlCircle other) { return Center == other.Center && Radius == other.Radius; } public override bool Equals(object obj) { return obj is NpgsqlCircle && Equals((NpgsqlCircle) obj); } public override String ToString() { return string.Format("({0}), {1}", Center, Radius); } public static bool operator ==(NpgsqlCircle x, NpgsqlCircle y) { return x.Equals(y); } public static bool operator !=(NpgsqlCircle x, NpgsqlCircle y) { return !(x == y); } public override int GetHashCode() { return Center.X.GetHashCode() ^ PGUtil.RotateShift(Center.Y.GetHashCode(), sizeof (int)/4) ^ PGUtil.RotateShift(Radius.GetHashCode(), sizeof (int)/2); } } /// <summary> /// Represents a PostgreSQL inet type. /// </summary> public struct NpgsqlInet : IEquatable<NpgsqlInet> { public IPAddress addr; public int mask; public NpgsqlInet(IPAddress addr, int mask) { this.addr = addr; this.mask = mask; } public NpgsqlInet(IPAddress addr) { this.addr = addr; this.mask = 32; } public NpgsqlInet(string addr) { if (addr.IndexOf('/') > 0) { string[] addrbits = addr.Split('/'); if (addrbits.GetUpperBound(0) != 1) { throw new FormatException("Invalid number of parts in CIDR specification"); } this.addr = IPAddress.Parse(addrbits[0]); this.mask = int.Parse(addrbits[1]); } else { this.addr = IPAddress.Parse(addr); this.mask = 32; } } public override String ToString() { if (mask != 32) { return string.Format("{0}/{1}", addr, mask); } return addr.ToString(); } public static explicit operator IPAddress(NpgsqlInet x) { if (x.mask != 32) { throw new InvalidCastException("Cannot cast CIDR network to address"); } return x.addr; } public static implicit operator NpgsqlInet(IPAddress ipaddress) { return new NpgsqlInet(ipaddress); } public bool Equals(NpgsqlInet other) { return addr.Equals(other.addr) && mask == other.mask; } public override bool Equals(object obj) { return obj != null && obj is NpgsqlInet && Equals((NpgsqlInet) obj); } public override int GetHashCode() { return PGUtil.RotateShift(addr.GetHashCode(), mask%32); } public static bool operator ==(NpgsqlInet x, NpgsqlInet y) { return x.Equals(y); } public static bool operator !=(NpgsqlInet x, NpgsqlInet y) { return !(x == y); } } /// <summary> /// Represents a PostgreSQL MacAddress type. /// </summary> public struct NpgsqlMacAddress : IEquatable<NpgsqlMacAddress> { public PhysicalAddress macAddr; public NpgsqlMacAddress(PhysicalAddress macAddr) { this.macAddr = macAddr; } /// <summary> /// /// </summary> /// <param name="macAddr">The macAddr parameter must contain a string that can only consist of numbers /// and upper-case letters as hexadecimal digits. (See PhysicalAddress.Parse method on MSDN)</param> public NpgsqlMacAddress(string macAddr) { if (!string.IsNullOrEmpty(macAddr)) { string lowerMacAddr = macAddr.ToUpper(); System.Text.StringBuilder sb = new System.Text.StringBuilder(); foreach (char c in lowerMacAddr) { if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')) { sb.Append(c); } } this.macAddr = PhysicalAddress.Parse(sb.ToString()); } else { this.macAddr = PhysicalAddress.None; } } public override String ToString() { return macAddr.ToString(); } public static explicit operator PhysicalAddress(NpgsqlMacAddress x) { return x.macAddr; } public static implicit operator NpgsqlMacAddress(PhysicalAddress macAddr) { return new NpgsqlMacAddress(macAddr); } public bool Equals(NpgsqlMacAddress other) { return macAddr.Equals(other.macAddr); } public override bool Equals(object obj) { return obj != null && obj is NpgsqlMacAddress && Equals((NpgsqlMacAddress)obj); } public override int GetHashCode() { int ret = 266370105; //seed with something other than zero to make paths of all zeros hash differently. return PGUtil.RotateShift(macAddr.GetHashCode(), ret); } public static bool operator ==(NpgsqlMacAddress x, NpgsqlMacAddress y) { return x.Equals(y); } public static bool operator !=(NpgsqlMacAddress x, NpgsqlMacAddress y) { return !(x == y); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Compute.V1.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedForwardingRulesClientTest { [xunit::FactAttribute] public void GetRequestObject() { moq::Mock<ForwardingRules.ForwardingRulesClient> mockGrpcClient = new moq::Mock<ForwardingRules.ForwardingRulesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetForwardingRuleRequest request = new GetForwardingRuleRequest { Region = "regionedb20d96", Project = "projectaa6ff846", ForwardingRule = "forwarding_rule51d5478e", }; ForwardingRule expectedResponse = new ForwardingRule { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", CreationTimestamp = "creation_timestamp235e59a1", IPAddress = "I_p_addressf1537179", Ports = { "ports9860f047", }, IsMirroringCollector = false, Region = "regionedb20d96", LabelFingerprint = "label_fingerprint06ccff3a", PscConnectionStatus = ForwardingRule.Types.PscConnectionStatus.UndefinedPscConnectionStatus, Target = "targetaefbae42", PortRange = "port_ranged4420f7d", ServiceDirectoryRegistrations = { new ForwardingRuleServiceDirectoryRegistration(), }, Network = "networkd22ce091", Fingerprint = "fingerprint009e6052", PscConnectionId = 1768355415909345202UL, IpVersion = ForwardingRule.Types.IpVersion.UndefinedIpVersion, BackendService = "backend_serviceed490d45", Subnetwork = "subnetworkf55bf572", ServiceName = "service_named5df05d5", LoadBalancingScheme = ForwardingRule.Types.LoadBalancingScheme.UndefinedLoadBalancingScheme, ServiceLabel = "service_label5f95d0c0", Description = "description2cf9da67", AllPorts = false, SelfLink = "self_link7e87f12d", MetadataFilters = { new MetadataFilter(), }, IPProtocol = ForwardingRule.Types.IPProtocol.Udp, AllowGlobalAccess = false, Labels = { { "key8a0b6e3c", "value60c16320" }, }, NetworkTier = ForwardingRule.Types.NetworkTier.Standard, }; mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ForwardingRulesClient client = new ForwardingRulesClientImpl(mockGrpcClient.Object, null); ForwardingRule response = client.Get(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetRequestObjectAsync() { moq::Mock<ForwardingRules.ForwardingRulesClient> mockGrpcClient = new moq::Mock<ForwardingRules.ForwardingRulesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetForwardingRuleRequest request = new GetForwardingRuleRequest { Region = "regionedb20d96", Project = "projectaa6ff846", ForwardingRule = "forwarding_rule51d5478e", }; ForwardingRule expectedResponse = new ForwardingRule { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", CreationTimestamp = "creation_timestamp235e59a1", IPAddress = "I_p_addressf1537179", Ports = { "ports9860f047", }, IsMirroringCollector = false, Region = "regionedb20d96", LabelFingerprint = "label_fingerprint06ccff3a", PscConnectionStatus = ForwardingRule.Types.PscConnectionStatus.UndefinedPscConnectionStatus, Target = "targetaefbae42", PortRange = "port_ranged4420f7d", ServiceDirectoryRegistrations = { new ForwardingRuleServiceDirectoryRegistration(), }, Network = "networkd22ce091", Fingerprint = "fingerprint009e6052", PscConnectionId = 1768355415909345202UL, IpVersion = ForwardingRule.Types.IpVersion.UndefinedIpVersion, BackendService = "backend_serviceed490d45", Subnetwork = "subnetworkf55bf572", ServiceName = "service_named5df05d5", LoadBalancingScheme = ForwardingRule.Types.LoadBalancingScheme.UndefinedLoadBalancingScheme, ServiceLabel = "service_label5f95d0c0", Description = "description2cf9da67", AllPorts = false, SelfLink = "self_link7e87f12d", MetadataFilters = { new MetadataFilter(), }, IPProtocol = ForwardingRule.Types.IPProtocol.Udp, AllowGlobalAccess = false, Labels = { { "key8a0b6e3c", "value60c16320" }, }, NetworkTier = ForwardingRule.Types.NetworkTier.Standard, }; mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ForwardingRule>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ForwardingRulesClient client = new ForwardingRulesClientImpl(mockGrpcClient.Object, null); ForwardingRule responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ForwardingRule responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void Get() { moq::Mock<ForwardingRules.ForwardingRulesClient> mockGrpcClient = new moq::Mock<ForwardingRules.ForwardingRulesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetForwardingRuleRequest request = new GetForwardingRuleRequest { Region = "regionedb20d96", Project = "projectaa6ff846", ForwardingRule = "forwarding_rule51d5478e", }; ForwardingRule expectedResponse = new ForwardingRule { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", CreationTimestamp = "creation_timestamp235e59a1", IPAddress = "I_p_addressf1537179", Ports = { "ports9860f047", }, IsMirroringCollector = false, Region = "regionedb20d96", LabelFingerprint = "label_fingerprint06ccff3a", PscConnectionStatus = ForwardingRule.Types.PscConnectionStatus.UndefinedPscConnectionStatus, Target = "targetaefbae42", PortRange = "port_ranged4420f7d", ServiceDirectoryRegistrations = { new ForwardingRuleServiceDirectoryRegistration(), }, Network = "networkd22ce091", Fingerprint = "fingerprint009e6052", PscConnectionId = 1768355415909345202UL, IpVersion = ForwardingRule.Types.IpVersion.UndefinedIpVersion, BackendService = "backend_serviceed490d45", Subnetwork = "subnetworkf55bf572", ServiceName = "service_named5df05d5", LoadBalancingScheme = ForwardingRule.Types.LoadBalancingScheme.UndefinedLoadBalancingScheme, ServiceLabel = "service_label5f95d0c0", Description = "description2cf9da67", AllPorts = false, SelfLink = "self_link7e87f12d", MetadataFilters = { new MetadataFilter(), }, IPProtocol = ForwardingRule.Types.IPProtocol.Udp, AllowGlobalAccess = false, Labels = { { "key8a0b6e3c", "value60c16320" }, }, NetworkTier = ForwardingRule.Types.NetworkTier.Standard, }; mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ForwardingRulesClient client = new ForwardingRulesClientImpl(mockGrpcClient.Object, null); ForwardingRule response = client.Get(request.Project, request.Region, request.ForwardingRule); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetAsync() { moq::Mock<ForwardingRules.ForwardingRulesClient> mockGrpcClient = new moq::Mock<ForwardingRules.ForwardingRulesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetForwardingRuleRequest request = new GetForwardingRuleRequest { Region = "regionedb20d96", Project = "projectaa6ff846", ForwardingRule = "forwarding_rule51d5478e", }; ForwardingRule expectedResponse = new ForwardingRule { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", CreationTimestamp = "creation_timestamp235e59a1", IPAddress = "I_p_addressf1537179", Ports = { "ports9860f047", }, IsMirroringCollector = false, Region = "regionedb20d96", LabelFingerprint = "label_fingerprint06ccff3a", PscConnectionStatus = ForwardingRule.Types.PscConnectionStatus.UndefinedPscConnectionStatus, Target = "targetaefbae42", PortRange = "port_ranged4420f7d", ServiceDirectoryRegistrations = { new ForwardingRuleServiceDirectoryRegistration(), }, Network = "networkd22ce091", Fingerprint = "fingerprint009e6052", PscConnectionId = 1768355415909345202UL, IpVersion = ForwardingRule.Types.IpVersion.UndefinedIpVersion, BackendService = "backend_serviceed490d45", Subnetwork = "subnetworkf55bf572", ServiceName = "service_named5df05d5", LoadBalancingScheme = ForwardingRule.Types.LoadBalancingScheme.UndefinedLoadBalancingScheme, ServiceLabel = "service_label5f95d0c0", Description = "description2cf9da67", AllPorts = false, SelfLink = "self_link7e87f12d", MetadataFilters = { new MetadataFilter(), }, IPProtocol = ForwardingRule.Types.IPProtocol.Udp, AllowGlobalAccess = false, Labels = { { "key8a0b6e3c", "value60c16320" }, }, NetworkTier = ForwardingRule.Types.NetworkTier.Standard, }; mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<ForwardingRule>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ForwardingRulesClient client = new ForwardingRulesClientImpl(mockGrpcClient.Object, null); ForwardingRule responseCallSettings = await client.GetAsync(request.Project, request.Region, request.ForwardingRule, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); ForwardingRule responseCancellationToken = await client.GetAsync(request.Project, request.Region, request.ForwardingRule, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
using System; using System.Collections.Generic; using System.IO; using System.Threading; using BTDB.StreamLayer; namespace BTDB.KVDBLayer { public class InMemoryFileCollection : IFileCollection { // disable invalid warning about using volatile inside Interlocked.CompareExchange #pragma warning disable 420 volatile Dictionary<uint, File> _files = new Dictionary<uint, File>(); int _maxFileId; public InMemoryFileCollection() { _maxFileId = 0; } class File : IFileCollectionFile { readonly InMemoryFileCollection _owner; readonly uint _index; readonly List<byte[]> _data = new List<byte[]>(); readonly object _lock = new object(); readonly Writer _writer; const int OneBufSize = 128 * 1024; public File(InMemoryFileCollection owner, uint index) { _owner = owner; _index = index; _writer = new Writer(this); } public uint Index { get { return _index; } } sealed class Reader : AbstractBufferedReader { readonly File _file; ulong _ofs; readonly ulong _totalSize; public Reader(File file) { _file = file; _totalSize = file.GetSize(); _ofs = 0; FillBuffer(); } protected override void FillBuffer() { if (_ofs == _totalSize) { Pos = -1; End = -1; return; } Buf = _file._data[(int)(_ofs / OneBufSize)]; End = (int)Math.Min(_totalSize - _ofs, OneBufSize); _ofs += (ulong)End; Pos = 0; } public override long GetCurrentPosition() { return (long)_ofs - End + Pos; } } public AbstractBufferedReader GetExclusiveReader() { return new Reader(this); } public void RandomRead(byte[] data, int offset, int size, ulong position) { while (size > 0) { byte[] buf; lock (_lock) { if (position + (ulong)size > (ulong)_writer.GetCurrentPosition()) throw new EndOfStreamException(); buf = _data[(int)(position / OneBufSize)]; } var bufofs = (int)(position % OneBufSize); var copy = Math.Min(size, OneBufSize - bufofs); Array.Copy(buf, bufofs, data, offset, copy); offset += copy; size -= copy; position += (ulong)copy; } } sealed class Writer : AbstractBufferedWriter { readonly File _file; ulong _ofs; public Writer(File file) { _file = file; Pos = 0; Buf = new byte[OneBufSize]; End = OneBufSize; lock (_file._lock) { _file._data.Add(Buf); } } public override void FlushBuffer() { if (Pos != End) return; _ofs += OneBufSize; Pos = 0; Buf = new byte[OneBufSize]; lock (_file._lock) { _file._data.Add(Buf); } } public override long GetCurrentPosition() { return (long)(_ofs + (ulong)Pos); } internal void SimulateCorruptionBySetSize(int size) { if (size > OneBufSize || _ofs!=0) throw new ArgumentOutOfRangeException(); Pos = size; } } public AbstractBufferedWriter GetAppenderWriter() { return _writer; } public void HardFlush() { } public void SetSize(long size) { if ((ulong)size!=GetSize()) throw new InvalidOperationException("For in memory collection SetSize should never be set to something else than GetSize"); } public void Truncate() { } public ulong GetSize() { lock (_lock) { return (ulong)_writer.GetCurrentPosition(); } } public void Remove() { Dictionary<uint, File> newFiles; Dictionary<uint, File> oldFiles; do { oldFiles = _owner._files; File value; if (!oldFiles.TryGetValue(_index, out value)) return; newFiles = new Dictionary<uint, File>(oldFiles); newFiles.Remove(_index); } while (Interlocked.CompareExchange(ref _owner._files, newFiles, oldFiles) != oldFiles); } internal void SimulateCorruptionBySetSize(int size) { _writer.SimulateCorruptionBySetSize(size); } } public IFileCollectionFile AddFile(string humanHint) { var index = (uint)Interlocked.Increment(ref _maxFileId); var file = new File(this, index); Dictionary<uint, File> newFiles; Dictionary<uint, File> oldFiles; do { oldFiles = _files; newFiles = new Dictionary<uint, File>(oldFiles) { { index, file } }; } while (Interlocked.CompareExchange(ref _files, newFiles, oldFiles) != oldFiles); return file; } public uint GetCount() { return (uint)_files.Count; } public IFileCollectionFile GetFile(uint index) { File value; return _files.TryGetValue(index, out value) ? value : null; } public IEnumerable<IFileCollectionFile> Enumerate() { return _files.Values; } public void Dispose() { } internal void SimulateCorruptionBySetSize(int size) { _files[1].SimulateCorruptionBySetSize(size); } } }
// // Encog(tm) Core v3.3 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System.Linq; using Encog.ML.Data; using Encog.ML.Data.Basic; namespace Encog.Util.Arrayutil { /// <summary> /// Produce a time-series from an array. /// </summary> /// public class TemporalWindowArray { /// <summary> /// The fields that are to be processed. /// </summary> /// private TemporalWindowField[] _fields; /// <summary> /// The size of the input window. /// </summary> /// private int _inputWindow; /// <summary> /// The size of the prediction window. /// </summary> /// private int _predictWindow; /// <summary> /// Construct a time-series from an array. /// </summary> /// /// <param name="theInputWindow">The size of the input window.</param> /// <param name="thePredictWindow">The size of the predict window.</param> public TemporalWindowArray(int theInputWindow, int thePredictWindow) { _inputWindow = theInputWindow; _predictWindow = thePredictWindow; } /// <value>The fields that are to be processed.</value> public TemporalWindowField[] Fields { get { return _fields; } } /// <value>the inputWindow to set</value> public int InputWindow { get { return _inputWindow; } set { _inputWindow = value; } } /// <value>the predictWindow to set</value> public int PredictWindow { get { return _predictWindow; } set { _predictWindow = value; } } /// <summary> /// Analyze the 1D array. /// </summary> /// /// <param name="array">The array to analyze.</param> public void Analyze(double[] array) { _fields = new TemporalWindowField[1]; _fields[0] = new TemporalWindowField("0") {Action = TemporalType.InputAndPredict}; } /// <summary> /// Analyze the 2D array. /// </summary> /// /// <param name="array">The 2D array to analyze.</param> public void Analyze(double[][] array) { int length = array[0].Length; _fields = new TemporalWindowField[length]; for (int i = 0; i < length; i++) { _fields[i] = new TemporalWindowField("" + i) {Action = TemporalType.InputAndPredict}; } } /// <summary> /// Count the number of input fields, or fields used to predict. /// </summary> /// /// <returns>The number of input fields.</returns> public int CountInputFields() { return _fields.Count(field => field.Input); } /// <summary> /// Count the number of fields that are that are in the prediction. /// </summary> /// /// <returns>The number of fields predicted.</returns> public int CountPredictFields() { return _fields.Count(field => field.Predict); } /// <summary> /// Process the array. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>A neural data set that contains the time-series.</returns> public IMLDataSet Process(double[] data) { var result = new BasicMLDataSet(); int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } var pair = new BasicMLDataPair(inputData, idealData); result.Add(pair); } return result; } /// <summary> /// Processes the specified data array in an IMLDataset. /// You can send a [][] array directly with this method. /// </summary> /// <param name="data">The data.</param> /// <returns></returns> public IMLDataSet Process(double[][] data) { var result = new BasicMLDataSet(); foreach (double[] doubles in data) { result.Add(ProcessToPair(doubles)); } return result; } /// <summary> /// Process the data array and returns an IMLdatapair. /// </summary> /// /// <param name="data">The array to process.</param> /// <returns>An IMLDatapair containing data.</returns> public IMLDataPair ProcessToPair(double[] data) { // not sure this method works right: it's only using the last pair? IMLDataPair pair = null; int totalWindowSize = _inputWindow + _predictWindow; int stopPoint = data.Length - totalWindowSize; for (int i = 0; i < stopPoint; i++) { var inputData = new BasicMLData(_inputWindow); var idealData = new BasicMLData(_predictWindow); int index = i; // handle input window for (int j = 0; j < _inputWindow; j++) { inputData[j] = data[index++]; } // handle predict window for (int j = 0; j < _predictWindow; j++) { idealData[j] = data[index++]; } pair = new BasicMLDataPair(inputData, idealData); } return pair; } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. // // // Authors: // Marek Habersack <mhabersack@novell.com> // // Copyright (C) 2010 Novell, Inc. (http://novell.com/) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Runtime.Caching; using System.Runtime.Caching.Hosting; using System.Text; using Xunit; using MonoTests.Common; namespace MonoTests.System.Runtime.Caching { public class HostFileChangeMonitorTest { [Fact] public void Constructor_Exceptions() { string relPath = Path.Combine("relative", "file", "path"); var paths = new List<string> { relPath }; Assert.Throws<ArgumentException>(() => { new HostFileChangeMonitor(paths); }); paths.Clear(); paths.Add(null); Assert.Throws<ArgumentException>(() => { new HostFileChangeMonitor(paths); }); paths.Clear(); paths.Add(string.Empty); Assert.Throws<ArgumentException>(() => { new HostFileChangeMonitor(paths); }); Assert.Throws<ArgumentNullException>(() => { new HostFileChangeMonitor(null); }); paths.Clear(); Assert.Throws<ArgumentException>(() => { new HostFileChangeMonitor(paths); }); } [Fact] private static void Constructor_MissingFiles_Handler() { HostFileChangeMonitor monitor; string missingFile = Path.GetFullPath(Path.Combine(Guid.NewGuid().ToString("N"), "file", "path")); var paths = new List<string> { missingFile }; // Actually thrown by FileSystemWatcher constructor - note that the exception message suggests the file's // parent directory is being watched, not the file itself: // // MonoTests.System.Runtime.Caching.HostFileChangeMonitorTest.Constructor_MissingFiles: // System.ArgumentException : The directory name c:\missing\file is invalid. // at System.IO.FileSystemWatcher..ctor(String path, String filter) // at System.IO.FileSystemWatcher..ctor(String path) // at System.Runtime.Caching.FileChangeNotificationSystem.System.Runtime.Caching.Hosting.IFileChangeNotificationSystem.StartMonitoring(String filePath, OnChangedCallback onChangedCallback, Object& state, DateTimeOffset& lastWriteTime, Int64& fileSize) // at System.Runtime.Caching.HostFileChangeMonitor.InitDisposableMembers() // at System.Runtime.Caching.HostFileChangeMonitor..ctor(IList`1 filePaths) // at MonoTests.System.Runtime.Caching.HostFileChangeMonitorTest.Constructor_MissingFiles() in c:\users\grendel\documents\visual studio 2010\Projects\System.Runtime.Caching.Test\System.Runtime.Caching.Test\System.Runtime.Caching\HostFileChangeMonitorTest.cs:line 68 Assert.Throws<ArgumentException>(() => { new HostFileChangeMonitor(paths); }); missingFile = Path.GetFullPath(Guid.NewGuid().ToString("N")); paths.Clear(); paths.Add(missingFile); monitor = new HostFileChangeMonitor(paths); Assert.Equal(1, monitor.FilePaths.Count); Assert.Equal(missingFile, monitor.FilePaths[0]); //?? Assert.Equal(missingFile + "701CE1722770000FFFFFFFFFFFFFFFF", monitor.UniqueId); monitor.Dispose(); paths.Add(missingFile); monitor = new HostFileChangeMonitor(paths); Assert.Equal(2, monitor.FilePaths.Count); Assert.Equal(missingFile, monitor.FilePaths[0]); Assert.Equal(missingFile, monitor.FilePaths[1]); //?? Assert.Equal(missingFile + "701CE1722770000FFFFFFFFFFFFFFFF", monitor.UniqueId); monitor.Dispose(); } [Fact] public void Constructor_Duplicates() { HostFileChangeMonitor monitor; string missingFile = Path.GetFullPath(Guid.NewGuid().ToString("N")); var paths = new List<string> { missingFile, missingFile }; // Just checks if it doesn't throw any exception for dupes monitor = new HostFileChangeMonitor(paths); monitor.Dispose(); } private static Tuple<string, string, string, IList<string>> SetupMonitoring() { string testPath = Path.Combine(Path.GetTempPath(), "HostFileChangeMonitorTest", "Dispose_Calls_StopMonitoring"); if (!Directory.Exists(testPath)) Directory.CreateDirectory(testPath); string firstFile = Path.Combine(testPath, "FirstFile.txt"); string secondFile = Path.Combine(testPath, "SecondFile.txt"); File.WriteAllText(firstFile, "I am the first file."); File.WriteAllText(secondFile, "I am the second file."); var paths = new List<string> { firstFile, secondFile }; return new Tuple<string, string, string, IList<string>>(testPath, firstFile, secondFile, paths); } private static void CleanupMonitoring(Tuple<string, string, string, IList<string>> setup) { string testPath = setup != null ? setup.Item1 : null; if (string.IsNullOrEmpty(testPath) || !Directory.Exists(testPath)) return; foreach (string f in Directory.EnumerateFiles(testPath)) { try { File.Delete(f); } catch { // ignore } } try { // 2 nested folders were created by SetupMonitoring, so we'll delete both var dirInfo = new DirectoryInfo(testPath); var parentDirInfo = dirInfo.Parent; dirInfo.Delete(recursive: true); parentDirInfo.Delete(recursive: true); } catch { // ignore } } [Fact] [ActiveIssue(25168)] private static void Constructor_Calls_StartMonitoring_Handler() { Tuple<string, string, string, IList<string>> setup = null; try { var tns = new TestNotificationSystem(); ObjectCache.Host = tns; setup = SetupMonitoring(); var monitor = new HostFileChangeMonitor(setup.Item4); Assert.True(tns.StartMonitoringCalled); Assert.Equal(2U, tns.StartMonitoringCallCount); } finally { CleanupMonitoring(setup); } } [Fact] [ActiveIssue(25168)] private static void Dispose_Calls_StopMonitoring_Handler() { Tuple<string, string, string, IList<string>> setup = null; try { var tns = new TestNotificationSystem(); ObjectCache.Host = tns; setup = SetupMonitoring(); var monitor = new HostFileChangeMonitor(setup.Item4); tns.FakeChanged(setup.Item2); Assert.True(tns.StopMonitoringCalled); Assert.Equal(2U, tns.StopMonitoringCallCount); } finally { CleanupMonitoring(setup); } } [Fact] [ActiveIssue(25168)] private static void Dispose_NullState_NoStopMonitoring_Handler() { Tuple<string, string, string, IList<string>> setup = null; try { var tns = new TestNotificationSystem(); tns.UseNullState = true; ObjectCache.Host = tns; setup = SetupMonitoring(); var monitor = new HostFileChangeMonitor(setup.Item4); tns.FakeChanged(setup.Item2); Assert.False(tns.StopMonitoringCalled); Assert.Equal(0U, tns.StopMonitoringCallCount); } finally { CleanupMonitoring(setup); } } [Fact] public void UniqueId() { Tuple<string, string, string, IList<string>> setup = null; try { setup = SetupMonitoring(); FileInfo fi; var monitor = new HostFileChangeMonitor(setup.Item4); var sb = new StringBuilder(); fi = new FileInfo(setup.Item2); sb.AppendFormat("{0}{1:X}{2:X}", setup.Item2, fi.LastWriteTimeUtc.Ticks, fi.Length); fi = new FileInfo(setup.Item3); sb.AppendFormat("{0}{1:X}{2:X}", setup.Item3, fi.LastWriteTimeUtc.Ticks, fi.Length); Assert.Equal(sb.ToString(), monitor.UniqueId); var list = new List<string>(setup.Item4); list.Add(setup.Item1); monitor = new HostFileChangeMonitor(list); var di = new DirectoryInfo(setup.Item1); sb.AppendFormat("{0}{1:X}{2:X}", setup.Item1, di.LastWriteTimeUtc.Ticks, -1L); Assert.Equal(sb.ToString(), monitor.UniqueId); list.Add(setup.Item1); monitor = new HostFileChangeMonitor(list); Assert.Equal(sb.ToString(), monitor.UniqueId); monitor.Dispose(); } finally { CleanupMonitoring(setup); } } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using java.lang; using java.util; namespace stab.query { public interface Grouping<TKey, TElement> : Iterable<TElement> { TKey Key^; } public interface IntGrouping<TElement> : Iterable<TElement> { int Key^; } public interface LongGrouping<TElement> : Iterable<TElement> { long Key^; } public interface FloatGrouping<TElement> : Iterable<TElement> { float Key^; } public interface DoubleGrouping<TElement> : Iterable<TElement> { double Key^; } class DefaultGrouping<TKey, TElement> : Grouping<TKey, TElement> { private TElement[] array; private int count; DefaultGrouping(TKey key) { this.Key = key; #pragma warning disable 313 this.array = new TElement[4]; #pragma warning restore } public TKey Key^; public Iterator<TElement> iterator() { for (int i = 0; i < count; i++) { yield return array[i]; } } public DefaultGrouping<TKey, TElement> Next; void add(TElement item) { if (count == sizeof(array)) { #pragma warning disable 313 var t = new TElement[count * 2]; #pragma warning restore System.arraycopy(array, 0, t, 0, count); array = t; } array[count++] = item; } } class DefaultIntGrouping<TElement> : IntGrouping<TElement> { private TElement[] array; private int count; DefaultIntGrouping(int key) { this.Key = key; #pragma warning disable 313 this.array = new TElement[4]; #pragma warning restore } public int Key^; public Iterator<TElement> iterator() { for (int i = 0; i < count; i++) { yield return array[i]; } } public DefaultIntGrouping<TElement> Next; void add(TElement item) { if (count == sizeof(array)) { #pragma warning disable 313 var t = new TElement[count * 2]; #pragma warning restore System.arraycopy(array, 0, t, 0, count); array = t; } array[count++] = item; } } class DefaultLongGrouping<TElement> : LongGrouping<TElement> { private TElement[] array; private int count; DefaultLongGrouping(long key) { this.Key = key; #pragma warning disable 313 this.array = new TElement[4]; #pragma warning restore } public long Key^; public Iterator<TElement> iterator() { for (int i = 0; i < count; i++) { yield return array[i]; } } public DefaultLongGrouping<TElement> Next; void add(TElement item) { if (count == sizeof(array)) { #pragma warning disable 313 var t = new TElement[count * 2]; #pragma warning restore System.arraycopy(array, 0, t, 0, count); array = t; } array[count++] = item; } } class DefaultFloatGrouping<TElement> : FloatGrouping<TElement> { private TElement[] array; private int count; DefaultFloatGrouping(float key) { this.Key = key; #pragma warning disable 313 this.array = new TElement[4]; } public float Key^; public Iterator<TElement> iterator() { for (int i = 0; i < count; i++) { yield return array[i]; } } public DefaultFloatGrouping<TElement> Next; void add(TElement item) { if (count == sizeof(array)) { #pragma warning disable 313 var t = new TElement[count * 2]; #pragma warning restore System.arraycopy(array, 0, t, 0, count); array = t; } array[count++] = item; } } class DefaultDoubleGrouping<TElement> : DoubleGrouping<TElement> { private TElement[] array; private int count; DefaultDoubleGrouping(double key) { this.Key = key; #pragma warning disable 313 this.array = new TElement[4]; #pragma warning restore } public double Key^; public Iterator<TElement> iterator() { for (int i = 0; i < count; i++) { yield return array[i]; } } public DefaultDoubleGrouping<TElement> Next; void add(TElement item) { if (count == sizeof(array)) { #pragma warning disable 313 var t = new TElement[count * 2]; #pragma warning restore System.arraycopy(array, 0, t, 0, count); array = t; } array[count++] = item; } } }
using System; using System.Globalization; using System.Threading; namespace MonetDB.Concurrency { /// <summary> /// simple no-arg delegate type; can use this for anonymous methods, e.g. /// <code> /// SafeThread safeThrd = new SafeThread((SimpleDelegate) delegate { dosomething(); }); /// </code> /// </summary> public delegate void SimpleDelegate(); /// <summary> /// delegate for thread-threw-exception event /// </summary> /// <param name="thrd">the SafeThread that threw the exception</param> /// <param name="ex">the exception throws</param> public delegate void ThreadThrewExceptionHandler(SafeThread thrd, Exception ex); /// <summary> /// delegate for thread-completed event /// </summary> /// <param name="thrd">the SafeThread that completed processing</param> /// <param name="hadException">true if the thread terminated due to an exception</param> /// <param name="ex">the exception that terminated the thread, or null if completed successfully</param> public delegate void ThreadCompletedHandler(SafeThread thrd, bool hadException, Exception ex); /// <summary> /// This class implements a Thread wrapper to trap unhandled exceptions /// thrown by the thread-start delegate. Add ThreadException event /// handlers to be notified of such exceptions and take custom actions /// (such as restart, clean-up, et al, depending on what the SafeThread was /// doing in your application). Add ThreadCompleted event handlers to be /// notified when the thread has completed processing. /// </summary> public class SafeThread : MarshalByRefObject { /// <summary> /// gets the internal thread being used /// </summary> public Thread ThreadObject { get; private set; } /// <summary> /// the thread-start object, if any /// </summary> private readonly ThreadStart _ts; /// <summary> /// the parameterized thread-start object, if any /// </summary> private readonly ParameterizedThreadStart _pts; /// <summary> /// the SimpleDelegate target, if any /// </summary> private readonly SimpleDelegate _dlg; /// <summary> /// gets the thread-start argument, if any /// </summary> public object ThreadStartArg { get; private set; } /// <summary> /// gets the last exception thrown /// </summary> public Exception LastException { get; private set; } /// <summary> /// the name of the internal thread /// </summary> private string _name; /// <summary> /// gets/sets the name of the internal thread /// </summary> public string Name { get { return _name ?? "Thread#" + GetHashCode(); } set { _name = value; } } /// <summary> /// object tag - use to hold extra info about the SafeThread /// </summary> public object Tag { get; set; } /// <summary> /// default constructor for SafeThread /// </summary> public SafeThread() { ShouldReportThreadAbort = false; } /// <summary> /// SafeThread constructor using ThreadStart object /// </summary> /// <param name="ts">ThreadStart object to use</param> public SafeThread(ThreadStart ts) : this() { _ts = ts; ThreadObject = new Thread(ts); } /// <summary> /// SafeThread constructor using ParameterizedThreadStart object /// </summary> /// <param name="pts">ParameterizedThreadStart to use</param> public SafeThread(ParameterizedThreadStart pts) : this() { _pts = pts; ThreadObject = new Thread(pts); } /// <summary> /// SafeThread constructor using SimpleDelegate object for anonymous methods, e.g. /// <code> /// SafeThread safeThrd = new SafeThread((SimpleDelegate) delegate { dosomething(); }); /// </code> /// </summary> /// <param name="sd"></param> public SafeThread(SimpleDelegate sd) : this() { _dlg = sd; _pts = new ParameterizedThreadStart(this.CallDelegate); ThreadObject = new Thread(_pts); } /// <summary> /// thread-threw-exception event /// </summary> public event ThreadThrewExceptionHandler ThreadException; /// <summary> /// called when a thread throws an exception /// </summary> /// <param name="ex">Exception thrown</param> public void OnThreadException(Exception ex) { try { if (ex is ThreadAbortException && !ShouldReportThreadAbort) { return; } if (ThreadException != null) { ThreadException.Invoke(this, ex); } } catch (Exception) { } } /// <summary> /// thread-completed event /// </summary> public event ThreadCompletedHandler ThreadCompleted; /// <summary> /// called when a thread completes processing /// </summary> private void OnThreadCompleted(bool bHadException, Exception ex) { try { if (ThreadCompleted != null) { ThreadCompleted.Invoke(this, bHadException, ex); } } catch (Exception) { } } /// <summary> /// starts thread with target if any /// </summary> private void StartTarget() { Exception exceptn = null; var bHadException = false; try { if (_ts != null) { _ts.Invoke(); } else if (_pts != null) { _pts.Invoke(ThreadStartArg); } } catch (Exception ex) { bHadException = true; exceptn = ex; this.LastException = ex; OnThreadException(ex); } finally { OnThreadCompleted(bHadException, exceptn); } } /// <summary> /// thread-start internal method for SimpleDelegate target /// </summary> /// <param name="arg">unused</param> private void CallDelegate(object arg) { this._dlg.Invoke(); } /// <summary> /// starts thread execution /// </summary> public void Start() { ThreadObject = new Thread(new ThreadStart(StartTarget)) { Name = this.Name }; if (_aptState != null) { ThreadObject.TrySetApartmentState((ApartmentState) _aptState); } ThreadObject.Start(); } /// <summary> /// starts thread execution with parameter /// </summary> /// <param name="val">parameter object</param> public void Start(object val) { ThreadStartArg = val; Start(); } /// <summary> /// gets/sets a flag to control whether thread-abort exception is reported or not /// </summary> public bool ShouldReportThreadAbort { get; set; } /// <summary> /// abort the thread execution /// </summary> public void Abort() { ThreadObject.Abort(); } /// <summary> /// gets or sets the Culture for the current thread. /// </summary> public CultureInfo CurrentCulture => ThreadObject != null ? ThreadObject.CurrentCulture : null; /// <summary> /// gets or sets the current culture used by the Resource Manager /// to look up culture-specific resources at run time. /// </summary> public CultureInfo CurrentUiCulture => ThreadObject != null ? ThreadObject.CurrentUICulture : null; /// <summary> /// gets an System.Threading.ExecutionContext object that contains information /// about the various contexts of the current thread. /// </summary> public ExecutionContext ExecutionContext => ThreadObject != null ? ThreadObject.ExecutionContext : null; /// <summary> /// Returns an System.Threading.ApartmentState value indicating the apparent state. /// </summary> /// <returns></returns> public ApartmentState GetApartmentState() { return ThreadObject != null ? ThreadObject.GetApartmentState() : ApartmentState.Unknown; } /// <summary> /// Interrupts a thread that is in the WaitSleepJoin thread state. /// </summary> public void Interrupt() { if (ThreadObject != null) { ThreadObject.Interrupt(); } } /// <summary> /// gets a value indicating the execution status of the thread /// </summary> public bool IsAlive => ThreadObject != null && ThreadObject.IsAlive; /// <summary> /// Gets or sets a value indicating whether or not a thread is a background thread /// </summary> public bool IsBackground { get { return ThreadObject != null && ThreadObject.IsBackground; } set { if (ThreadObject != null) { ThreadObject.IsBackground = value; } } } /// <summary> /// gets a value indicating whether or not a thread belongs to the managed thread pool /// </summary> public bool IsThreadPoolThread => ThreadObject != null && ThreadObject.IsThreadPoolThread; /// <summary> /// Blocks the calling thread until a thread terminates, /// while continuing to perform standard COM and SendMessage pumping. /// </summary> public void Join() { if (ThreadObject != null) { ThreadObject.Join(); } } /// <summary> /// Blocks the calling thread until a thread terminates or the specified time elapses, /// while continuing to perform standard COM and SendMessage pumping. /// </summary> /// <param name="millisecondsTimeout">the number of milliseconds to wait for the /// thread to terminate</param> public bool Join(int millisecondsTimeout) { return ThreadObject != null && ThreadObject.Join(millisecondsTimeout); } /// <summary> /// Blocks the calling thread until a thread terminates or the specified time elapses, /// while continuing to perform standard COM and SendMessage pumping. /// </summary> /// <param name="timeout">a System.TimeSpan set to the amount of time to wait /// for the thread to terminate </param> public bool Join(TimeSpan timeout) { return ThreadObject != null && ThreadObject.Join(timeout); } /// <summary> /// Gets a unique identifier for the current managed thread /// </summary> public int ManagedThreadId => ThreadObject != null ? ThreadObject.ManagedThreadId : 0; /// <summary> /// gets or sets a value indicating the scheduling priority of a thread /// </summary> public ThreadPriority Priority { get { return ThreadObject != null ? ThreadObject.Priority : ThreadPriority.Lowest; } set { if (ThreadObject != null) { ThreadObject.Priority = value; } } } private object _aptState; /// <summary> /// sets the ApartmentState of a thread before it is started /// </summary> /// <param name="state">ApartmentState</param> public void SetApartmentState(ApartmentState state) { if (ThreadObject == null) { _aptState = state; } else { ThreadObject.SetApartmentState(state); } } /// <summary> /// gets a value containing the states of the current thread /// </summary> public ThreadState ThreadState => ThreadObject != null ? ThreadObject.ThreadState : ThreadState.Unstarted; /// <summary> /// returns a System.String that represents the current System.Object /// </summary> /// <returns></returns> public override string ToString() { return ThreadObject != null ? ThreadObject.ToString() : base.ToString(); } /// <summary> /// sets the ApartmentState of a thread before it is started /// </summary> /// <param name="state">ApartmentState</param> public bool TrySetApartmentState(ApartmentState state) { if (ThreadObject != null) { return ThreadObject.TrySetApartmentState(state); } _aptState = state; return false; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Text; /// <summary> /// String.TrimEnd(char[]) /// </summary> public class StringTrim3 { private int c_MINI_STRING_LENGTH = 8; private int c_MAX_STRING_LENGTH = 256; // U+200B stops being trimmable http://msdn2.microsoft.com/en-us/library/t97s7bs3.aspx // U+FEFF has been deprecate as a trimmable space private string[] spaceStrings = new string[]{"\u0009","\u000A","\u000B","\u000C","\u000D","\u0020", "\u00A0","\u2000","\u2001","\u2002","\u2003","\u2004","\u2005", "\u2006","\u2007","\u2008","\u2009","\u200A","\u3000"}; public static int Main() { StringTrim3 st3 = new StringTrim3(); TestLibrary.TestFramework.BeginTestCase("StringTrim3"); if (st3.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; retVal = PosTest5() && retVal; return retVal; } #region PostiveTesting public bool PosTest1() { bool retVal = true; string strA; char[] charA; string ActualResult; TestLibrary.TestFramework.BeginScenario("PosTest1: empty string trimEnd char[]"); try { strA = string.Empty; charA = new char[] { TestLibrary.Generator.GetChar(-55), TestLibrary.Generator.GetChar(-55), TestLibrary.Generator.GetChar(-55) }; ActualResult = strA.TrimEnd(charA); if (ActualResult != string.Empty) { TestLibrary.TestFramework.LogError("001", "empty string trimEnd char[] ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; string strA; char[] charA; string ActualResult; TestLibrary.TestFramework.BeginScenario("PosTest2:normal string trimStart char[] one"); try { strA = TestLibrary.Generator.GetString(-55, false, c_MINI_STRING_LENGTH, c_MAX_STRING_LENGTH); char char1 = this.GetChar(0, c_MINI_STRING_LENGTH); char char2 = this.GetChar(c_MINI_STRING_LENGTH, c_MINI_STRING_LENGTH + 68); char char3 = this.GetChar(c_MINI_STRING_LENGTH + 68, c_MAX_STRING_LENGTH / 2); char charEnd = this.GetChar(c_MAX_STRING_LENGTH / 2, c_MAX_STRING_LENGTH); charA = new char[] { char1, char2, char3 }; string strA1 = char1.ToString() + char3.ToString() + strA + charEnd.ToString() + char1.ToString() + char3.ToString(); ActualResult = strA1.TrimEnd(charA); if (ActualResult.ToString() != char1.ToString() + char3.ToString() + strA.ToString() + charEnd.ToString()) { TestLibrary.TestFramework.LogError("003", "normal string trimEnd char[] one ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; string strA; char[] charA; string ActualResult; TestLibrary.TestFramework.BeginScenario("PosTest3:normal string trimEnd char[] two"); try { strA = TestLibrary.Generator.GetString(-55, false, c_MINI_STRING_LENGTH, c_MAX_STRING_LENGTH); char char1 = this.GetChar(0, c_MINI_STRING_LENGTH); char char2 = this.GetChar(c_MINI_STRING_LENGTH, c_MINI_STRING_LENGTH + 68); char char3 = this.GetChar(c_MAX_STRING_LENGTH + 68, c_MAX_STRING_LENGTH / 2); char charStart = this.GetChar(c_MAX_STRING_LENGTH / 2, c_MAX_STRING_LENGTH); charA = new char[] { char1, char2, char3 }; string strA1 = char1.ToString() + char3.ToString() + charStart.ToString() + strA + char2.ToString() + charStart.ToString() + char1.ToString() + char3.ToString(); ActualResult = strA1.TrimEnd(charA); if (ActualResult.ToString() != char1.ToString() + char3.ToString() + charStart.ToString() + strA + char2.ToString() + charStart.ToString()) { TestLibrary.TestFramework.LogError("005", "normal string trimEnd char[] two ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; string strA; char[] charA; string ActualResult; TestLibrary.TestFramework.BeginScenario("PosTest4:normal string trimEnd char[] three"); try { strA = TestLibrary.Generator.GetString(-55, false, c_MINI_STRING_LENGTH, c_MAX_STRING_LENGTH); charA = new char[0]; string strB = spaceStrings[this.GetInt32(0, spaceStrings.Length)]; string strA1 = strB + "H" + strA + "D" + strB; ActualResult = strA1.TrimEnd(charA); if (ActualResult.ToString() != strB + "H" + strA + "D") { TestLibrary.TestFramework.LogError("007", "normal string trimEnd char[] three ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("008", "Unexpect exception:" + e); retVal = false; } return retVal; } public bool PosTest5() { bool retVal = true; string strA; char[] charA; string ActualResult; TestLibrary.TestFramework.BeginScenario("PosTest5:normal string trimEnd char[] four"); try { strA = TestLibrary.Generator.GetString(-55, false, c_MINI_STRING_LENGTH, c_MAX_STRING_LENGTH); charA = new char[0]; string strB = spaceStrings[this.GetInt32(0, spaceStrings.Length)]; string strA1 = strB + "H" + strB + strA + "D" + strB; ActualResult = strA1.TrimEnd(charA); if (ActualResult.ToString() != strB + "H" + strB + strA + "D") { TestLibrary.TestFramework.LogError("009", "normal string trimEnd char[] four ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("010", "Unexpect exception:" + e); retVal = false; } return retVal; } #endregion #region Help method for geting test data private Int32 GetInt32(Int32 minValue, Int32 maxValue) { try { if (minValue == maxValue) { return minValue; } if (minValue < maxValue) { return minValue + TestLibrary.Generator.GetInt32(-55) % (maxValue - minValue); } } catch { throw; } return minValue; } private Char GetChar(Int32 minValue, Int32 maxValue) { try { if (minValue == maxValue) { return Convert.ToChar(minValue); } if (minValue < maxValue) { return Convert.ToChar(Convert.ToInt32(TestLibrary.Generator.GetChar(-55)) % (maxValue - minValue) + minValue); } } catch { throw; } return Convert.ToChar(minValue); } private string GetString(bool ValidPath, Int32 minValue, Int32 maxValue) { StringBuilder sVal = new StringBuilder(); string s; if (0 == minValue && 0 == maxValue) return String.Empty; if (minValue > maxValue) return null; if (ValidPath) { return TestLibrary.Generator.GetString(-55, ValidPath, minValue, maxValue); } else { int length = this.GetInt32(minValue, maxValue); for (int i = 0; length > i; i++) { char c = this.GetChar(minValue, maxValue); sVal.Append(c); } s = sVal.ToString(); return s; } } #endregion }
using System; using System.Data; using Csla; using Csla.Data; using ParentLoadSoftDelete.DataAccess; using ParentLoadSoftDelete.DataAccess.ERLevel; namespace ParentLoadSoftDelete.Business.ERLevel { /// <summary> /// E08_Region (editable child object).<br/> /// This is a generated base class of <see cref="E08_Region"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="E09_CityObjects"/> of type <see cref="E09_CityColl"/> (1:M relation to <see cref="E10_City"/>)<br/> /// This class is an item of <see cref="E07_RegionColl"/> collection. /// </remarks> [Serializable] public partial class E08_Region : BusinessBase<E08_Region> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int parent_Country_ID = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="Region_ID"/> property. /// </summary> public static readonly PropertyInfo<int> Region_IDProperty = RegisterProperty<int>(p => p.Region_ID, "Regions ID"); /// <summary> /// Gets the Regions ID. /// </summary> /// <value>The Regions ID.</value> public int Region_ID { get { return GetProperty(Region_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="Region_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Region_NameProperty = RegisterProperty<string>(p => p.Region_Name, "Regions Name"); /// <summary> /// Gets or sets the Regions Name. /// </summary> /// <value>The Regions Name.</value> public string Region_Name { get { return GetProperty(Region_NameProperty); } set { SetProperty(Region_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E09_Region_SingleObject"/> property. /// </summary> public static readonly PropertyInfo<E09_Region_Child> E09_Region_SingleObjectProperty = RegisterProperty<E09_Region_Child>(p => p.E09_Region_SingleObject, "E09 Region Single Object", RelationshipTypes.Child); /// <summary> /// Gets the E09 Region Single Object ("parent load" child property). /// </summary> /// <value>The E09 Region Single Object.</value> public E09_Region_Child E09_Region_SingleObject { get { return GetProperty(E09_Region_SingleObjectProperty); } private set { LoadProperty(E09_Region_SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E09_Region_ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<E09_Region_ReChild> E09_Region_ASingleObjectProperty = RegisterProperty<E09_Region_ReChild>(p => p.E09_Region_ASingleObject, "E09 Region ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the E09 Region ASingle Object ("parent load" child property). /// </summary> /// <value>The E09 Region ASingle Object.</value> public E09_Region_ReChild E09_Region_ASingleObject { get { return GetProperty(E09_Region_ASingleObjectProperty); } private set { LoadProperty(E09_Region_ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E09_CityObjects"/> property. /// </summary> public static readonly PropertyInfo<E09_CityColl> E09_CityObjectsProperty = RegisterProperty<E09_CityColl>(p => p.E09_CityObjects, "E09 City Objects", RelationshipTypes.Child); /// <summary> /// Gets the E09 City Objects ("parent load" child property). /// </summary> /// <value>The E09 City Objects.</value> public E09_CityColl E09_CityObjects { get { return GetProperty(E09_CityObjectsProperty); } private set { LoadProperty(E09_CityObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="E08_Region"/> object. /// </summary> /// <returns>A reference to the created <see cref="E08_Region"/> object.</returns> internal static E08_Region NewE08_Region() { return DataPortal.CreateChild<E08_Region>(); } /// <summary> /// Factory method. Loads a <see cref="E08_Region"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="E08_Region"/> object.</returns> internal static E08_Region GetE08_Region(SafeDataReader dr) { E08_Region obj = new E08_Region(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.LoadProperty(E09_CityObjectsProperty, E09_CityColl.NewE09_CityColl()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="E08_Region"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public E08_Region() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="E08_Region"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(Region_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(E09_Region_SingleObjectProperty, DataPortal.CreateChild<E09_Region_Child>()); LoadProperty(E09_Region_ASingleObjectProperty, DataPortal.CreateChild<E09_Region_ReChild>()); LoadProperty(E09_CityObjectsProperty, DataPortal.CreateChild<E09_CityColl>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="E08_Region"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Region_IDProperty, dr.GetInt32("Region_ID")); LoadProperty(Region_NameProperty, dr.GetString("Region_Name")); // parent properties parent_Country_ID = dr.GetInt32("Parent_Country_ID"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Loads child <see cref="E09_Region_Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(E09_Region_Child child) { LoadProperty(E09_Region_SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="E09_Region_ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(E09_Region_ReChild child) { LoadProperty(E09_Region_ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="E08_Region"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(E06_Country parent) { using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnInsertPre(args); var dal = dalManager.GetProvider<IE08_RegionDal>(); using (BypassPropertyChecks) { int region_ID = -1; dal.Insert( parent.Country_ID, out region_ID, Region_Name ); LoadProperty(Region_IDProperty, region_ID); } OnInsertPost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="E08_Region"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnUpdatePre(args); var dal = dalManager.GetProvider<IE08_RegionDal>(); using (BypassPropertyChecks) { dal.Update( Region_ID, Region_Name ); } OnUpdatePost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="E08_Region"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); // flushes all pending data operations FieldManager.UpdateChildren(this); OnDeletePre(args); var dal = dalManager.GetProvider<IE08_RegionDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(Region_IDProperty)); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.ComponentModel.Composition.Primitives; using System.Diagnostics; using System.Linq; using System.Threading; using Microsoft.Internal; namespace System.ComponentModel.Composition.Hosting { [DebuggerTypeProxy(typeof(CompositionScopeDefinitionDebuggerProxy))] public class CompositionScopeDefinition : ComposablePartCatalog, INotifyComposablePartCatalogChanged { private ComposablePartCatalog _catalog; private IEnumerable<ExportDefinition> _publicSurface = null; private IEnumerable<CompositionScopeDefinition> _children = Enumerable.Empty<CompositionScopeDefinition>(); private volatile int _isDisposed = 0; /// <summary> /// Initializes a new instance of the <see cref="CompositionScopeDefinition"/> class. /// </summary> protected CompositionScopeDefinition() { } /// <summary> /// Initializes a new instance of the <see cref="CompositionScopeDefinition"/> class. /// </summary> /// <param name="catalog">The catalog.</param> /// <param name="children">The children.</param> public CompositionScopeDefinition(ComposablePartCatalog catalog, IEnumerable<CompositionScopeDefinition> children) { Requires.NotNull(catalog, nameof(catalog)); Requires.NullOrNotNullElements(children, nameof(children)); InitializeCompositionScopeDefinition(catalog, children, null); } /// <summary> /// Initializes a new instance of the <see cref="CompositionScopeDefinition"/> class. /// </summary> /// <param name="catalog">The catalog.</param> /// <param name="children">The children.</param> /// <param name="publicSurface">The exports that can be used to create new scopes.</param> public CompositionScopeDefinition(ComposablePartCatalog catalog, IEnumerable<CompositionScopeDefinition> children, IEnumerable<ExportDefinition> publicSurface) { Requires.NotNull(catalog, nameof(catalog)); Requires.NullOrNotNullElements(children, nameof(children)); Requires.NullOrNotNullElements(publicSurface, nameof(publicSurface)); InitializeCompositionScopeDefinition(catalog, children, publicSurface); } /// <summary> /// Initializes a new instance of the <see cref="CompositionScopeDefinition"/> class. /// </summary> /// <param name="catalog">The catalog.</param> /// <param name="children">The children.</param> private void InitializeCompositionScopeDefinition(ComposablePartCatalog catalog, IEnumerable<CompositionScopeDefinition> children, IEnumerable<ExportDefinition> publicSurface) { _catalog = catalog; if (children != null) { _children = children.ToArray(); } if (publicSurface != null) { _publicSurface = publicSurface; } INotifyComposablePartCatalogChanged notifyCatalog = _catalog as INotifyComposablePartCatalogChanged; if (notifyCatalog != null) { notifyCatalog.Changed += OnChangedInternal; notifyCatalog.Changing += OnChangingInternal; } } /// <summary> /// Releases unmanaged and - optionally - managed resources /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected override void Dispose(bool disposing) { try { if (disposing) { if (Interlocked.CompareExchange(ref _isDisposed, 1, 0) == 0) { INotifyComposablePartCatalogChanged notifyCatalog = _catalog as INotifyComposablePartCatalogChanged; if (notifyCatalog != null) { notifyCatalog.Changed -= OnChangedInternal; notifyCatalog.Changing -= OnChangingInternal; } } } } finally { base.Dispose(disposing); } } /// <summary> /// Gets the children. /// </summary> /// <value>The children.</value> public virtual IEnumerable<CompositionScopeDefinition> Children { get { ThrowIfDisposed(); return _children; } } /// <summary> /// Gets the export definitions that describe the exports surfaced by the CompositionScopedefinition. /// </summary> /// <value> /// An <see cref="IEnumerable{T}"/> of <see cref="ExportDefinition"/> objects describing /// the exports surfaced by the <see cref="CompositionScopeDefinition"/>. /// </value> /// <remarks> /// <note type="inheritinfo"> /// Overriders of this property must not return <see langword="null"/>. /// </note> /// </remarks> public virtual IEnumerable<ExportDefinition> PublicSurface { get { ThrowIfDisposed(); if (_publicSurface == null) { return this.SelectMany((p) => p.ExportDefinitions); } return _publicSurface; } } /// <summary> /// Gets an Enumerator for the ComposablePartDefinitions /// </summary> /// <value>The children.</value> public override IEnumerator<ComposablePartDefinition> GetEnumerator() { return _catalog.GetEnumerator(); } /// <summary> /// Returns the export definitions that match the constraint defined by the specified definition. /// </summary> /// <param name="definition">The <see cref="ImportDefinition"/> that defines the conditions of the /// <see cref="ExportDefinition"/> objects to return.</param> /// <returns> /// An <see cref="IEnumerable{T}"/> of <see cref="Tuple{T1, T2}"/> containing the /// <see cref="ExportDefinition"/> objects and their associated /// <see cref="ComposablePartDefinition"/> for objects that match the constraint defined /// by <paramref name="definition"/>. /// </returns> /// <exception cref="ArgumentNullException"> /// <paramref name="definition"/> is <see langword="null"/>. /// </exception> /// <exception cref="ObjectDisposedException"> /// The <see cref="ComposablePartCatalog"/> has been disposed of. /// </exception> /// <remarks> /// <note type="inheritinfo"> /// Overriders of this property should never return <see langword="null"/>, if no /// <see cref="ExportDefinition"/> match the conditions defined by /// <paramref name="definition"/>, return an empty <see cref="IEnumerable{T}"/>. /// </note> /// </remarks> public override IEnumerable<Tuple<ComposablePartDefinition, ExportDefinition>> GetExports(ImportDefinition definition) { ThrowIfDisposed(); return _catalog.GetExports(definition); } internal IEnumerable<Tuple<ComposablePartDefinition, ExportDefinition>> GetExportsFromPublicSurface(ImportDefinition definition) { if (definition == null) { throw new ArgumentNullException(nameof(definition)); } var exports = new List<Tuple<ComposablePartDefinition, ExportDefinition>>(); foreach (var exportDefinition in PublicSurface) { if (definition.IsConstraintSatisfiedBy(exportDefinition)) { foreach (var export in GetExports(definition)) { if (export.Item2 == exportDefinition) { exports.Add(export); break; } } } } return exports; } /// <summary> /// Notify when the contents of the Catalog has changed. /// </summary> public event EventHandler<ComposablePartCatalogChangeEventArgs> Changed; /// <summary> /// Notify when the contents of the Catalog is changing. /// </summary> public event EventHandler<ComposablePartCatalogChangeEventArgs> Changing; /// <summary> /// Raises the <see cref="Changed"/> event. /// </summary> /// <param name="e">The <see cref="System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs"/> instance containing the event data.</param> protected virtual void OnChanged(ComposablePartCatalogChangeEventArgs e) { EventHandler<ComposablePartCatalogChangeEventArgs> changedEvent = Changed; if (changedEvent != null) { changedEvent.Invoke(this, e); } } /// <summary> /// Raises the <see cref="Changing"/> event. /// </summary> /// <param name="e">The <see cref="System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs"/> instance containing the event data.</param> protected virtual void OnChanging(ComposablePartCatalogChangeEventArgs e) { EventHandler<ComposablePartCatalogChangeEventArgs> changingEvent = Changing; if (changingEvent != null) { changingEvent.Invoke(this, e); } } private void OnChangedInternal(object sender, ComposablePartCatalogChangeEventArgs e) { OnChanged(e); } private void OnChangingInternal(object sender, ComposablePartCatalogChangeEventArgs e) { OnChanging(e); } [DebuggerStepThrough] private void ThrowIfDisposed() { if (_isDisposed == 1) { throw ExceptionBuilder.CreateObjectDisposed(this); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Linq; using System.Collections.Concurrent; using System.Collections.Generic; using System.Security.Cryptography; namespace Internal.Cryptography { internal static partial class OidLookup { private static readonly ConcurrentDictionary<string, string> s_lateBoundOidToFriendlyName = new ConcurrentDictionary<string, string>(); private static readonly ConcurrentDictionary<string, string> s_lateBoundFriendlyNameToOid = new ConcurrentDictionary<string, string>(StringComparer.OrdinalIgnoreCase); // // Attempts to map a friendly name to an OID. Returns null if not a known name. // public static string ToFriendlyName(string oid, OidGroup oidGroup, bool fallBackToAllGroups) { if (oid == null) throw new ArgumentNullException("oid"); string mappedName; bool shouldUseCache = ShouldUseCache(oidGroup); // On Unix shouldUseCache is always true, so no matter what OidGroup is passed in the Windows // friendly name will be returned. // // On Windows shouldUseCache is only true for OidGroup.All, because otherwise the OS may filter // out the answer based on the group criteria. if (shouldUseCache) { if (s_oidToFriendlyName.TryGetValue(oid, out mappedName) || s_compatOids.TryGetValue(oid, out mappedName) || s_lateBoundOidToFriendlyName.TryGetValue(oid, out mappedName)) { return mappedName; } } mappedName = NativeOidToFriendlyName(oid, oidGroup, fallBackToAllGroups); if (shouldUseCache && mappedName != null) { s_lateBoundOidToFriendlyName.TryAdd(oid, mappedName); // Don't add the reverse here. Just because oid => name doesn't mean name => oid. // And don't bother doing the reverse lookup proactively, just wait until they ask for it. } return mappedName; } // // Attempts to retrieve the friendly name for an OID. Returns null if not a known or valid OID. // public static string ToOid(string friendlyName, OidGroup oidGroup, bool fallBackToAllGroups) { if (friendlyName == null) throw new ArgumentNullException("friendlyName"); if (friendlyName.Length == 0) return null; string mappedOid; bool shouldUseCache = ShouldUseCache(oidGroup); if (shouldUseCache) { if (s_friendlyNameToOid.TryGetValue(friendlyName, out mappedOid) || s_lateBoundFriendlyNameToOid.TryGetValue(friendlyName, out mappedOid)) { return mappedOid; } } mappedOid = NativeFriendlyNameToOid(friendlyName, oidGroup, fallBackToAllGroups); if (shouldUseCache && mappedOid != null) { s_lateBoundFriendlyNameToOid.TryAdd(friendlyName, mappedOid); // Don't add the reverse here. Friendly Name => OID is a case insensitive search, // so the casing provided as input here may not be the 'correct' one. Just let // ToFriendlyName capture the response and cache it itself. } return mappedOid; } // This table was originally built by extracting every szOID #define out of wincrypt.h, // and running them through new Oid(string) on Windows 10. Then, take the list of everything // which produced a FriendlyName value, and run it through two other languages. If all 3 agree // on the mapping, consider the value to be non-localized. // // This original list was produced on English (Win10), cross-checked with Spanish (Win8.1) and // Japanese (Win10). // // Sometimes wincrypt.h has more than one OID which results in the same name. The OIDs whose value // doesn't roundtrip (new Oid(new Oid(value).FriendlyName).Value) are contained in s_compatOids. // // X-Plat: The names (and casing) in this table come from Windows. Part of the intent of this table // is to prevent issues wherein an identifier is different between CoreFX\Windows and CoreFX\Unix; // since any existing code would be using the Windows identifier, it is the de facto standard. private static readonly Dictionary<string, string> s_friendlyNameToOid = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) { { "3des", "1.2.840.113549.3.7" }, { "aes128", "2.16.840.1.101.3.4.1.2" }, { "aes128wrap", "2.16.840.1.101.3.4.1.5" }, { "aes192", "2.16.840.1.101.3.4.1.22" }, { "aes192wrap", "2.16.840.1.101.3.4.1.25" }, { "aes256", "2.16.840.1.101.3.4.1.42" }, { "aes256wrap", "2.16.840.1.101.3.4.1.45" }, { "brainpoolP160r1", "1.3.36.3.3.2.8.1.1.1" }, { "brainpoolP160t1", "1.3.36.3.3.2.8.1.1.2" }, { "brainpoolP192r1", "1.3.36.3.3.2.8.1.1.3" }, { "brainpoolP192t1", "1.3.36.3.3.2.8.1.1.4" }, { "brainpoolP224r1", "1.3.36.3.3.2.8.1.1.5" }, { "brainpoolP224t1", "1.3.36.3.3.2.8.1.1.6" }, { "brainpoolP256r1", "1.3.36.3.3.2.8.1.1.7" }, { "brainpoolP256t1", "1.3.36.3.3.2.8.1.1.8" }, { "brainpoolP320r1", "1.3.36.3.3.2.8.1.1.9" }, { "brainpoolP320t1", "1.3.36.3.3.2.8.1.1.10" }, { "brainpoolP384r1", "1.3.36.3.3.2.8.1.1.11" }, { "brainpoolP384t1", "1.3.36.3.3.2.8.1.1.12" }, { "brainpoolP512r1", "1.3.36.3.3.2.8.1.1.13" }, { "brainpoolP512t1", "1.3.36.3.3.2.8.1.1.14" }, { "C", "2.5.4.6" }, { "CMS3DESwrap", "1.2.840.113549.1.9.16.3.6" }, { "CMSRC2wrap", "1.2.840.113549.1.9.16.3.7" }, { "CN", "2.5.4.3" }, { "CPS", "1.3.6.1.5.5.7.2.1" }, { "DC", "0.9.2342.19200300.100.1.25" }, { "des", "1.3.14.3.2.7" }, { "Description", "2.5.4.13" }, { "DH", "1.2.840.10046.2.1" }, { "dnQualifier", "2.5.4.46" }, { "DSA", "1.2.840.10040.4.1" }, { "dsaSHA1", "1.3.14.3.2.27" }, { "E", "1.2.840.113549.1.9.1" }, { "ec192wapi", "1.2.156.11235.1.1.2.1" }, { "ECC", "1.2.840.10045.2.1" }, { "ECDH_STD_SHA1_KDF", "1.3.133.16.840.63.0.2" }, { "ECDH_STD_SHA256_KDF", "1.3.132.1.11.1" }, { "ECDH_STD_SHA384_KDF", "1.3.132.1.11.2" }, { "ECDSA_P256", "1.2.840.10045.3.1.7" }, { "ECDSA_P384", "1.3.132.0.34" }, { "ECDSA_P521", "1.3.132.0.35" }, { "ESDH", "1.2.840.113549.1.9.16.3.5" }, { "G", "2.5.4.42" }, { "I", "2.5.4.43" }, { "L", "2.5.4.7" }, { "md2", "1.2.840.113549.2.2" }, { "md2RSA", "1.2.840.113549.1.1.2" }, { "md4", "1.2.840.113549.2.4" }, { "md4RSA", "1.2.840.113549.1.1.3" }, { "md5", "1.2.840.113549.2.5" }, { "md5RSA", "1.2.840.113549.1.1.4" }, { "mgf1", "1.2.840.113549.1.1.8" }, { "mosaicKMandUpdSig", "2.16.840.1.101.2.1.1.20" }, { "mosaicUpdatedSig", "2.16.840.1.101.2.1.1.19" }, { "nistP192", "1.2.840.10045.3.1.1" }, { "nistP224", "1.3.132.0.33" }, { "NO_SIGN", "1.3.6.1.5.5.7.6.2" }, { "O", "2.5.4.10" }, { "OU", "2.5.4.11" }, { "Phone", "2.5.4.20" }, { "POBox", "2.5.4.18" }, { "PostalCode", "2.5.4.17" }, { "rc2", "1.2.840.113549.3.2" }, { "rc4", "1.2.840.113549.3.4" }, { "RSA", "1.2.840.113549.1.1.1" }, { "RSAES_OAEP", "1.2.840.113549.1.1.7" }, { "RSASSA-PSS", "1.2.840.113549.1.1.10" }, { "S", "2.5.4.8" }, { "secP160k1", "1.3.132.0.9" }, { "secP160r1", "1.3.132.0.8" }, { "secP160r2", "1.3.132.0.30" }, { "secP192k1", "1.3.132.0.31" }, { "secP224k1", "1.3.132.0.32" }, { "secP256k1", "1.3.132.0.10" }, { "SERIALNUMBER", "2.5.4.5" }, { "sha1", "1.3.14.3.2.26" }, { "sha1DSA", "1.2.840.10040.4.3" }, { "sha1ECDSA", "1.2.840.10045.4.1" }, { "sha1RSA", "1.2.840.113549.1.1.5" }, { "sha256", "2.16.840.1.101.3.4.2.1" }, { "sha256ECDSA", "1.2.840.10045.4.3.2" }, { "sha256RSA", "1.2.840.113549.1.1.11" }, { "sha384", "2.16.840.1.101.3.4.2.2" }, { "sha384ECDSA", "1.2.840.10045.4.3.3" }, { "sha384RSA", "1.2.840.113549.1.1.12" }, { "sha512", "2.16.840.1.101.3.4.2.3" }, { "sha512ECDSA", "1.2.840.10045.4.3.4" }, { "sha512RSA", "1.2.840.113549.1.1.13" }, { "SN", "2.5.4.4" }, { "specifiedECDSA", "1.2.840.10045.4.3" }, { "STREET", "2.5.4.9" }, { "T", "2.5.4.12" }, { "wtls9", "2.23.43.1.4.9" }, { "X21Address", "2.5.4.24" }, { "x962P192v2", "1.2.840.10045.3.1.2" }, { "x962P192v3", "1.2.840.10045.3.1.3" }, { "x962P239v1", "1.2.840.10045.3.1.4" }, { "x962P239v2", "1.2.840.10045.3.1.5" }, { "x962P239v3", "1.2.840.10045.3.1.6" }, }; private static readonly Dictionary<string, string> s_oidToFriendlyName = s_friendlyNameToOid.ToDictionary(kvp => kvp.Value, kvp => kvp.Key); private static readonly Dictionary<string, string> s_compatOids = new Dictionary<string, string> { { "1.2.840.113549.1.3.1", "DH" }, { "1.3.14.3.2.12", "DSA" }, { "1.3.14.3.2.13", "sha1DSA" }, { "1.3.14.3.2.15", "shaRSA" }, { "1.3.14.3.2.18", "sha" }, { "1.3.14.3.2.2", "md4RSA" }, { "1.3.14.3.2.22", "RSA_KEYX" }, { "1.3.14.3.2.29", "sha1RSA" }, { "1.3.14.3.2.3", "md5RSA" }, { "1.3.14.3.2.4", "md4RSA" }, { "1.3.14.7.2.3.1", "md2RSA" }, }; } }
using System.ComponentModel; using System.Collections.ObjectModel; namespace Reflector { [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.megapanezote.com", IsNullable=false)] public partial class Configuration : NotificationBase { private ObservableCollection<Datasource> datasourcesField; public Configuration() { this.datasourcesField = new ObservableCollection<Datasource>(); } [System.Xml.Serialization.XmlArrayAttribute(Order=1)] [System.Xml.Serialization.XmlArrayItemAttribute("Source", IsNullable=false)] public ObservableCollection<Datasource> Datasources { get { return this.datasourcesField; } set { if ((this.datasourcesField != null)) { if ((datasourcesField.Equals(value) != true)) { this.datasourcesField = value; this.OnPropertyChanged("Datasources"); } } else { this.datasourcesField = value; this.OnPropertyChanged("Datasources"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.megapanezote.com", IsNullable=true)] public partial class SettingSource : NotificationBase { private string nameField; private string valueField; [System.Xml.Serialization.XmlAttributeAttribute()] public string Name { get { return this.nameField; } set { if ((this.nameField != null)) { if ((nameField.Equals(value) != true)) { this.nameField = value; this.OnPropertyChanged("Name"); } } else { this.nameField = value; this.OnPropertyChanged("Name"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Value { get { return this.valueField; } set { if ((this.valueField != null)) { if ((valueField.Equals(value) != true)) { this.valueField = value; this.OnPropertyChanged("Value"); } } else { this.valueField = value; this.OnPropertyChanged("Value"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.megapanezote.com", IsNullable=true)] public partial class TextSource : NotificationBase { private ObservableCollection<TextItem> itemField; public TextSource() { this.itemField = new ObservableCollection<TextItem>(); } [System.Xml.Serialization.XmlElementAttribute("Item", Order=0)] public ObservableCollection<TextItem> Item { get { return this.itemField; } set { if ((this.itemField != null)) { if ((itemField.Equals(value) != true)) { this.itemField = value; this.OnPropertyChanged("Item"); } } else { this.itemField = value; this.OnPropertyChanged("Item"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.megapanezote.com", IsNullable=true)] public partial class TextItem : NotificationBase { private string valueField; private string nameField; private string tagField; private bool enabledField = true; [System.Xml.Serialization.XmlAttributeAttribute()] public string Name { get { return this.nameField; } set { if ((this.nameField != null)) { if ((nameField.Equals(value) != true)) { this.nameField = value; this.OnPropertyChanged("Name"); } } else { this.nameField = value; this.OnPropertyChanged("Name"); } } } [System.Xml.Serialization.XmlTextAttribute()] public string Value { get { return this.valueField; } set { if ((this.valueField != null)) { if ((valueField.Equals(value) != true)) { this.valueField = value; this.OnPropertyChanged("Value"); } } else { this.valueField = value; this.OnPropertyChanged("Value"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Tag { get { return this.tagField; } set { if ((this.tagField != null)) { if ((tagField.Equals(value) != true)) { this.tagField = value; this.OnPropertyChanged("Tag"); } } else { this.tagField = value; this.OnPropertyChanged("Tag"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] [System.ComponentModel.DefaultValueAttribute(true)] public bool Enabled { get { return this.enabledField; } set { if ((enabledField.Equals(value) != true)) { this.enabledField = value; this.OnPropertyChanged("Enabled"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace = "http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace = "http://www.megapanezote.com", IsNullable = true)] public partial class TupleSource : NotificationBase { private string keyField; private string valueField; private string item1Field; private string item2Field; private string item3Field; private string item4Field; private string item5Field; private string item6Field; [System.Xml.Serialization.XmlAttributeAttribute()] public string Key { get { return this.keyField; } set { if ((this.keyField != null)) { if ((keyField.Equals(value) != true)) { this.keyField = value; this.OnPropertyChanged("Key"); } } else { this.keyField = value; this.OnPropertyChanged("Key"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Value { get { return this.valueField; } set { if ((this.valueField != null)) { if ((valueField.Equals(value) != true)) { this.valueField = value; this.OnPropertyChanged("Value"); } } else { this.valueField = value; this.OnPropertyChanged("Value"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item1 { get { return this.item1Field; } set { if ((this.item1Field != null)) { if ((item1Field.Equals(value) != true)) { this.item1Field = value; this.OnPropertyChanged("Item1"); } } else { this.item1Field = value; this.OnPropertyChanged("Item1"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item2 { get { return this.item2Field; } set { if ((this.item2Field != null)) { if ((item2Field.Equals(value) != true)) { this.item2Field = value; this.OnPropertyChanged("Item2"); } } else { this.item2Field = value; this.OnPropertyChanged("Item2"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item3 { get { return this.item3Field; } set { if ((this.item3Field != null)) { if ((item3Field.Equals(value) != true)) { this.item3Field = value; this.OnPropertyChanged("Item3"); } } else { this.item3Field = value; this.OnPropertyChanged("Item3"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item4 { get { return this.item4Field; } set { if ((this.item4Field != null)) { if ((item4Field.Equals(value) != true)) { this.item4Field = value; this.OnPropertyChanged("Item4"); } } else { this.item4Field = value; this.OnPropertyChanged("Item4"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item5 { get { return this.item5Field; } set { if ((this.item5Field != null)) { if ((item5Field.Equals(value) != true)) { this.item5Field = value; this.OnPropertyChanged("Item5"); } } else { this.item5Field = value; this.OnPropertyChanged("Item5"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Item6 { get { return this.item6Field; } set { if ((this.item6Field != null)) { if ((item6Field.Equals(value) != true)) { this.item6Field = value; this.OnPropertyChanged("Item6"); } } else { this.item6Field = value; this.OnPropertyChanged("Item6"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.megapanezote.com", IsNullable=true)] public partial class Datasource : NotificationBase { private ObservableCollection<object> itemsField; private string nameField; private enSourceType typeField; private bool isReservedField; private string basedOnField; public Datasource() { this.itemsField = new ObservableCollection<object>(); this.isReservedField = false; } [System.Xml.Serialization.XmlElementAttribute("Setting", typeof(SettingSource), Order=0)] [System.Xml.Serialization.XmlElementAttribute("Text", typeof(TextSource), Order=0)] [System.Xml.Serialization.XmlElementAttribute("Dictionary", typeof(TupleSource), Order = 0)] [System.Xml.Serialization.XmlElementAttribute("GroupedText", typeof(GroupSource), Order = 0)] public ObservableCollection<object> Items { get { return this.itemsField; } set { if ((this.itemsField != null)) { if ((itemsField.Equals(value) != true)) { this.itemsField = value; this.OnPropertyChanged("Items"); } } else { this.itemsField = value; this.OnPropertyChanged("Items"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Name { get { return this.nameField; } set { if ((this.nameField != null)) { if ((nameField.Equals(value) != true)) { this.nameField = value; this.OnPropertyChanged("Name"); } } else { this.nameField = value; this.OnPropertyChanged("Name"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public enSourceType Type { get { return this.typeField; } set { if ((typeField.Equals(value) != true)) { this.typeField = value; this.OnPropertyChanged("Type"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] [System.ComponentModel.DefaultValueAttribute(false)] public bool IsReserved { get { return this.isReservedField; } set { if ((isReservedField.Equals(value) != true)) { this.isReservedField = value; this.OnPropertyChanged("IsReserved"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string BasedOn { get { return this.basedOnField; } set { if ((this.basedOnField != null)) { if ((basedOnField.Equals(value) != true)) { this.basedOnField = value; this.OnPropertyChanged("BasedOn"); } } else { this.basedOnField = value; this.OnPropertyChanged("BasedOn"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.megapanezote.com")] public enum enSourceType { /// <remarks/> TextList, /// <remarks/> Settings, /// <remarks/> DictionaryTuple, /// <remarks/> GroupedTextList, } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace = "http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace = "http://www.megapanezote.com", IsNullable = true)] public partial class GroupSource : NotificationBase { private ObservableCollection<TextItem> textField; private ObservableCollection<GroupExpression> textExpressionField; private string nameField; public GroupSource() { this.textExpressionField = new ObservableCollection<GroupExpression>(); this.textField = new ObservableCollection<TextItem>(); } [System.Xml.Serialization.XmlElementAttribute("Text", Order = 0)] public ObservableCollection<TextItem> Text { get { return this.textField; } set { if ((this.textField != null)) { if ((textField.Equals(value) != true)) { this.textField = value; this.OnPropertyChanged("Text"); } } else { this.textField = value; this.OnPropertyChanged("Text"); } } } [System.Xml.Serialization.XmlElementAttribute("TextExpression", Order = 1)] public ObservableCollection<GroupExpression> TextExpression { get { return this.textExpressionField; } set { if ((this.textExpressionField != null)) { if ((textExpressionField.Equals(value) != true)) { this.textExpressionField = value; this.OnPropertyChanged("TextExpression"); } } else { this.textExpressionField = value; this.OnPropertyChanged("TextExpression"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Name { get { return this.nameField; } set { if ((this.nameField != null)) { if ((nameField.Equals(value) != true)) { this.nameField = value; this.OnPropertyChanged("Name"); } } else { this.nameField = value; this.OnPropertyChanged("Name"); } } } } [System.CodeDom.Compiler.GeneratedCodeAttribute("Xsd2Code", "3.4.0.38968")] [System.SerializableAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Xml.Serialization.XmlTypeAttribute(Namespace = "http://www.megapanezote.com")] [System.Xml.Serialization.XmlRootAttribute(Namespace = "http://www.megapanezote.com", IsNullable = true)] public partial class GroupExpression : NotificationBase { private TextItem expressionField; private ObservableCollection<TextItem> textField; private string nameField; public GroupExpression() { this.textField = new ObservableCollection<TextItem>(); this.expressionField = new TextItem(); } [System.Xml.Serialization.XmlElementAttribute(Order = 0)] public TextItem Expression { get { return this.expressionField; } set { if ((this.expressionField != null)) { if ((expressionField.Equals(value) != true)) { this.expressionField = value; this.OnPropertyChanged("Expression"); } } else { this.expressionField = value; this.OnPropertyChanged("Expression"); } } } [System.Xml.Serialization.XmlElementAttribute("Text", Order = 1)] public ObservableCollection<TextItem> Text { get { return this.textField; } set { if ((this.textField != null)) { if ((textField.Equals(value) != true)) { this.textField = value; this.OnPropertyChanged("Text"); } } else { this.textField = value; this.OnPropertyChanged("Text"); } } } [System.Xml.Serialization.XmlAttributeAttribute()] public string Name { get { return this.nameField; } set { if ((this.nameField != null)) { if ((nameField.Equals(value) != true)) { this.nameField = value; this.OnPropertyChanged("Name"); } } else { this.nameField = value; this.OnPropertyChanged("Name"); } } } } public class NotificationBase : INotifyPropertyChanged { #region INotifyPropertyChanged implementation public event PropertyChangedEventHandler PropertyChanged; public virtual void OnPropertyChanged(string propertyName) { PropertyChangedEventHandler handler = this.PropertyChanged; if ((handler != null)) { handler(this, new PropertyChangedEventArgs(propertyName)); } } #endregion INotifyPropertyChanged implementation } }
using UnityEngine; using Kazoo.Physics; #if UNITY_EDITOR using UnityEditor; #endif namespace Kazoo.Physics { /// <summary> /// Apply a spring force to a transform. /// Useful for adding secondary motion to bones. Useful for animated characters, but can also be used on any transform. /// Uses a Verlet simulation that is not stable at low framerates. /// </summary> public class BoneSpring : MonoBehaviour { public SpringConfiguration configuration = new SpringConfiguration(); [Header("Visualization")] public bool visualizeMaxSpringDistance = false; private Vector3 x, xOld, localPStart, springOrigin; private Quaternion localRStart; private void Awake() { x = configuration.simulationSpace ? configuration.simulationSpace.InverseTransformPoint(transform.position) : transform.position; xOld = configuration.simulationSpace ? configuration.simulationSpace.InverseTransformPoint(transform.position) : transform.position; if (configuration.updateType == SpringType.FixedPosition) { localPStart = transform.localPosition; } else if (configuration.updateType == SpringType.FixedRotation) { localRStart = transform.localRotation; } } private void FixedUpdate() { if (configuration.stepInFixedUpdate) { Step(Time.deltaTime); } } /// <summary> /// Perform a physics sim step. /// </summary> public void Step(float dt) { if (dt <= 0) { return; } Vector3 xDiff = (springOrigin - x); // Spring Force Vector3 springForce = xDiff * (configuration.springStrength * dt); Vector3 acceleration = (springForce + configuration.gravityForce) / configuration.mass; // Velocity from last frame Vector3 v = ((x - xOld) / dt) * (configuration.bounceStrength * dt); xOld = x; // Update Virtual Pos x += v + (acceleration * dt); // Max Distance Clamp if ((x - springOrigin).sqrMagnitude > (configuration.maxSpringDistance * configuration.maxSpringDistance)) { x = Vector3.MoveTowards(springOrigin, x, configuration.maxSpringDistance); } } private void LateUpdate() { if (configuration.applyInLateUpdate) { Apply(); } } /// <summary> /// Apply the position to the transform. /// </summary> public void Apply() { if (configuration.weight <= 0) { return; } if (configuration.updateType == SpringType.FixedRotation) { transform.localRotation = localRStart; } Vector3 springWorldOrigin = Vector3.zero; if (configuration.updateType == SpringType.AnimatedRotation || configuration.updateType == SpringType.FixedRotation) { springWorldOrigin = transform.TransformPoint(configuration.localSpringPoint); } else if (configuration.updateType == SpringType.FixedPosition && transform.parent != null) { springWorldOrigin = transform.parent.TransformPoint(localPStart); } else if (configuration.updateType == SpringType.FixedPosition) { springWorldOrigin = localPStart; } springOrigin = configuration.simulationSpace ? configuration.simulationSpace.InverseTransformPoint(springWorldOrigin) : springWorldOrigin; Vector3 xWorld = configuration.simulationSpace ? configuration.simulationSpace.TransformPoint(x) : x; // Update Bone if (configuration.updateType == SpringType.AnimatedRotation || configuration.updateType == SpringType.FixedRotation) { Quaternion rot = Quaternion.FromToRotation(transform.TransformDirection(configuration.localSpringPoint), xWorld - transform.position) * transform.rotation; rot = Quaternion.RotateTowards(transform.rotation, rot, configuration.maxRotationAngleDegrees); transform.rotation = Quaternion.Slerp(transform.rotation, rot, configuration.weight); } else if (configuration.updateType == SpringType.FixedPosition) { transform.position = Vector3.Lerp(transform.position, xWorld, configuration.weight); } } private void OnValidate() { configuration.mass = Mathf.Max(0.0001f, configuration.mass); configuration.maxSpringDistance = Mathf.Max(0, configuration.maxSpringDistance); if (configuration.updateType == SpringType.AnimatedRotation && configuration.setLocalSpringPointFromChildTransform && transform.childCount == 1) { configuration.localSpringPoint = transform.GetChild(0).localPosition; } } private void OnDrawGizmosSelected() { Gizmos.color = Color.red; Gizmos.DrawWireSphere(transform.position, 0.01f); if (configuration.updateType != SpringType.FixedPosition) { Gizmos.color = Color.blue; Gizmos.DrawWireSphere(transform.TransformPoint(configuration.localSpringPoint), 0.005f); Gizmos.DrawLine(transform.position, transform.TransformPoint(configuration.localSpringPoint)); } if (visualizeMaxSpringDistance) { Gizmos.color = Color.grey; Gizmos.DrawWireSphere(transform.TransformPoint(configuration.localSpringPoint), configuration.maxSpringDistance); } } public enum SpringType { /// <summary> /// The spring is attached to the animation that is applied in Update or Mecanim. /// </summary> AnimatedRotation, /// <summary> /// The spring is attached to the original local position. /// </summary> FixedPosition, /// <summary> /// The spring is attached to the original local rotation. /// </summary> FixedRotation } [System.Serializable] public class SpringConfiguration { public SpringType updateType = SpringType.AnimatedRotation; public Transform simulationSpace; [Header("Spring Space")] [Tooltip("The local vector where the spring is attached. This can effect how the spring feels.")] public Vector3 localSpringPoint = new Vector3(-0.1f, 0, 0); public bool setLocalSpringPointFromChildTransform = true; [Header("Simulation")] [Tooltip("Mass of particle. Leave this 1 unless you know what you're doing.")] public float mass = 1; [Tooltip("Distance-based spring force.")] public float springStrength = 1000f; [Range(0, 1), Tooltip("How much velocity should be retained each frame.")] public float bounceStrength = 0.5f; public Vector3 gravityForce = new Vector3(0, 0, 0); [Header("Constraints")] [Tooltip("How far off target we can go.")] public float maxSpringDistance = 100f; [Range(0, 360)] public float maxRotationAngleDegrees = 360f; [Range(0, 1)] public float weight = 1; [Header("Simulation Timing")] public bool applyInLateUpdate = true; public bool stepInFixedUpdate = true; } } } #if UNITY_EDITOR [CustomEditor(typeof(BoneSpring))] [CanEditMultipleObjects] public class BoneSpringEditor : Editor { public override void OnInspectorGUI() { var spring = target as BoneSpring; if (spring.configuration.updateType == BoneSpring.SpringType.FixedPosition) { EditorGUILayout.HelpBox("Fixed Position: The transform will translate only. It's initial local position on Awake is assumed to be it's goal position.", MessageType.Info); } else if (spring.configuration.updateType == BoneSpring.SpringType.FixedRotation) { EditorGUILayout.HelpBox("Fixed Rotation: The transform will rotate only. It's initial local rotation on Awake is assumed to be it's goal rotation.", MessageType.Info); } else if (spring.configuration.updateType == BoneSpring.SpringType.AnimatedRotation) { EditorGUILayout.HelpBox("Animated Rotation: The transform will rotate only. It's goal rotation is assumed to be set every frame by Animator or Ik.", MessageType.Info); } base.OnInspectorGUI(); if (spring.configuration.updateType != BoneSpring.SpringType.FixedPosition) { if (spring.configuration.localSpringPoint.sqrMagnitude <= 0) { EditorGUILayout.HelpBox("For rotating springs, you must set a non-zero-length 'Local Spring Point'.", MessageType.Warning); } } } } #endif
using System.Collections.Generic; using System.Text; namespace Lucene.Net.Analysis { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Lucene.Net.Analysis.TokenAttributes; using NUnit.Framework; using Attribute = Lucene.Net.Util.Attribute; using BytesRef = Lucene.Net.Util.BytesRef; using IAttribute = Lucene.Net.Util.IAttribute; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using TestUtil = Lucene.Net.Util.TestUtil; [TestFixture] public class TestToken : LuceneTestCase { [Test] public virtual void TestCtor() { Token t = new Token(); char[] content = "hello".ToCharArray(); t.CopyBuffer(content, 0, content.Length); Assert.AreNotSame(t.Buffer, content); Assert.AreEqual(0, t.StartOffset); Assert.AreEqual(0, t.EndOffset); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual("word", t.Type); Assert.AreEqual(0, t.Flags); t = new Token(6, 22); t.CopyBuffer(content, 0, content.Length); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual(6, t.StartOffset); Assert.AreEqual(22, t.EndOffset); Assert.AreEqual("word", t.Type); Assert.AreEqual(0, t.Flags); t = new Token(6, 22, 7); t.CopyBuffer(content, 0, content.Length); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual(6, t.StartOffset); Assert.AreEqual(22, t.EndOffset); Assert.AreEqual("word", t.Type); Assert.AreEqual(7, t.Flags); t = new Token(6, 22, "junk"); t.CopyBuffer(content, 0, content.Length); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual("hello", t.ToString()); Assert.AreEqual(6, t.StartOffset); Assert.AreEqual(22, t.EndOffset); Assert.AreEqual("junk", t.Type); Assert.AreEqual(0, t.Flags); } [Test] public virtual void TestResize() { Token t = new Token(); char[] content = "hello".ToCharArray(); t.CopyBuffer(content, 0, content.Length); for (int i = 0; i < 2000; i++) { t.ResizeBuffer(i); Assert.IsTrue(i <= t.Buffer.Length); Assert.AreEqual("hello", t.ToString()); } } [Test] public virtual void TestGrow() { Token t = new Token(); StringBuilder buf = new StringBuilder("ab"); for (int i = 0; i < 20; i++) { char[] content = buf.ToString().ToCharArray(); t.CopyBuffer(content, 0, content.Length); Assert.AreEqual(buf.Length, t.Length); Assert.AreEqual(buf.ToString(), t.ToString()); buf.Append(buf.ToString()); } Assert.AreEqual(1048576, t.Length); // now as a string, second variant t = new Token(); buf = new StringBuilder("ab"); for (int i = 0; i < 20; i++) { t.SetEmpty().Append(buf); string content = buf.ToString(); Assert.AreEqual(content.Length, t.Length); Assert.AreEqual(content, t.ToString()); buf.Append(content); } Assert.AreEqual(1048576, t.Length); // Test for slow growth to a long term t = new Token(); buf = new StringBuilder("a"); for (int i = 0; i < 20000; i++) { t.SetEmpty().Append(buf); string content = buf.ToString(); Assert.AreEqual(content.Length, t.Length); Assert.AreEqual(content, t.ToString()); buf.Append("a"); } Assert.AreEqual(20000, t.Length); // Test for slow growth to a long term t = new Token(); buf = new StringBuilder("a"); for (int i = 0; i < 20000; i++) { t.SetEmpty().Append(buf); string content = buf.ToString(); Assert.AreEqual(content.Length, t.Length); Assert.AreEqual(content, t.ToString()); buf.Append("a"); } Assert.AreEqual(20000, t.Length); } [Test] public virtual void TestToString() { char[] b = new char[] { 'a', 'l', 'o', 'h', 'a' }; Token t = new Token("", 0, 5); t.CopyBuffer(b, 0, 5); Assert.AreEqual("aloha", t.ToString()); t.SetEmpty().Append("hi there"); Assert.AreEqual("hi there", t.ToString()); } [Test] public virtual void TestTermBufferEquals() { Token t1a = new Token(); char[] content1a = "hello".ToCharArray(); t1a.CopyBuffer(content1a, 0, 5); Token t1b = new Token(); char[] content1b = "hello".ToCharArray(); t1b.CopyBuffer(content1b, 0, 5); Token t2 = new Token(); char[] content2 = "hello2".ToCharArray(); t2.CopyBuffer(content2, 0, 6); Assert.IsTrue(t1a.Equals(t1b)); Assert.IsFalse(t1a.Equals(t2)); Assert.IsFalse(t2.Equals(t1b)); } [Test] public virtual void TestMixedStringArray() { Token t = new Token("hello", 0, 5); Assert.AreEqual(t.Length, 5); Assert.AreEqual(t.ToString(), "hello"); t.SetEmpty().Append("hello2"); Assert.AreEqual(t.Length, 6); Assert.AreEqual(t.ToString(), "hello2"); t.CopyBuffer("hello3".ToCharArray(), 0, 6); Assert.AreEqual(t.ToString(), "hello3"); char[] buffer = t.Buffer; buffer[1] = 'o'; Assert.AreEqual(t.ToString(), "hollo3"); } [Test] public virtual void TestClone() { Token t = new Token(0, 5); char[] content = "hello".ToCharArray(); t.CopyBuffer(content, 0, 5); char[] buf = t.Buffer; Token copy = AssertCloneIsEqual(t); Assert.AreEqual(t.ToString(), copy.ToString()); Assert.AreNotSame(buf, copy.Buffer); BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 }); t.Payload = pl; copy = AssertCloneIsEqual(t); Assert.AreEqual(pl, copy.Payload); Assert.AreNotSame(pl, copy.Payload); } [Test] public virtual void TestCopyTo() { Token t = new Token(); Token copy = AssertCopyIsEqual(t); Assert.AreEqual("", t.ToString()); Assert.AreEqual("", copy.ToString()); t = new Token(0, 5); char[] content = "hello".ToCharArray(); t.CopyBuffer(content, 0, 5); char[] buf = t.Buffer; copy = AssertCopyIsEqual(t); Assert.AreEqual(t.ToString(), copy.ToString()); Assert.AreNotSame(buf, copy.Buffer); BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 }); t.Payload = pl; copy = AssertCopyIsEqual(t); Assert.AreEqual(pl, copy.Payload); Assert.AreNotSame(pl, copy.Payload); } public interface ISenselessAttribute : Lucene.Net.Util.IAttribute { } public sealed class SenselessAttribute : Attribute, ISenselessAttribute { public override void CopyTo(IAttribute target) { } public override void Clear() { } public override bool Equals(object o) { return (o is SenselessAttribute); } public override int GetHashCode() { return 0; } } [Test] public virtual void TestTokenAttributeFactory() { TokenStream ts = new MockTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new System.IO.StringReader("foo bar"), MockTokenizer.WHITESPACE, false, MockTokenizer.DEFAULT_MAX_TOKEN_LENGTH); Assert.IsTrue(ts.AddAttribute<ISenselessAttribute>() is SenselessAttribute, "SenselessAttribute is not implemented by SenselessAttributeImpl"); Assert.IsTrue(ts.AddAttribute<ICharTermAttribute>() is Token, "CharTermAttribute is not implemented by Token"); Assert.IsTrue(ts.AddAttribute<IOffsetAttribute>() is Token, "OffsetAttribute is not implemented by Token"); Assert.IsTrue(ts.AddAttribute<IFlagsAttribute>() is Token, "FlagsAttribute is not implemented by Token"); Assert.IsTrue(ts.AddAttribute<IPayloadAttribute>() is Token, "PayloadAttribute is not implemented by Token"); Assert.IsTrue(ts.AddAttribute<IPositionIncrementAttribute>() is Token, "PositionIncrementAttribute is not implemented by Token"); Assert.IsTrue(ts.AddAttribute<ITypeAttribute>() is Token, "TypeAttribute is not implemented by Token"); } [Test] public virtual void TestAttributeReflection() { Token t = new Token("foobar", 6, 22, 8); TestUtil.AssertAttributeReflection(t, new Dictionary<string, object>() { { typeof(ICharTermAttribute).Name + "#term", "foobar" }, { typeof(ITermToBytesRefAttribute).Name + "#bytes", new BytesRef("foobar") }, { typeof(IOffsetAttribute).Name + "#startOffset", 6 }, { typeof(IOffsetAttribute).Name + "#endOffset", 22 }, { typeof(IPositionIncrementAttribute).Name + "#positionIncrement", 1 }, { typeof(IPayloadAttribute).Name + "#payload", null }, { typeof(ITypeAttribute).Name + "#type", TypeAttribute.DEFAULT_TYPE }, { typeof(IFlagsAttribute).Name + "#flags", 8 } }); } public static T AssertCloneIsEqual<T>(T att) where T : Attribute { T clone = (T)att.Clone(); Assert.AreEqual(att, clone, "Clone must be equal"); Assert.AreEqual(att.GetHashCode(), clone.GetHashCode(), "Clone's hashcode must be equal"); return clone; } public static T AssertCopyIsEqual<T>(T att) where T : Attribute { T copy = (T)System.Activator.CreateInstance(att.GetType()); att.CopyTo(copy); Assert.AreEqual(att, copy, "Copied instance must be equal"); Assert.AreEqual(att.GetHashCode(), copy.GetHashCode(), "Copied instance's hashcode must be equal"); return copy; } } }
/* * This file is part of AceQL C# Client SDK. * AceQL C# Client SDK: Remote SQL access over HTTP with AceQL HTTP. * Copyright (C) 2020, KawanSoft SAS * (http://www.kawansoft.com). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AceQL.Client.Api.File; using Newtonsoft.Json; using PCLStorage; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; namespace AceQL.Client.Api.Util { /// <summary> /// Class RowParser. /// We pass a TextReader instead of a PortableFile as we want the methods to be all sync for end user. /// <seealso cref="System.IDisposable" /> /// </summary> internal class RowParser : IDisposable { private readonly StreamReader streamReader; private readonly JsonTextReader reader; /// <summary> /// The trace on /// </summary> private bool traceOn; /// <summary> /// The values per col index /// </summary> private Dictionary<int, object> valuesPerColIndex; private Dictionary<int, string> typesPerColIndex; private Dictionary<string, int> colIndexesPerColName; /// <summary> /// Constructor /// </summary> /// <param name="readStream">The reading stream on file.</param> public RowParser(Stream readStream) { streamReader = new StreamReader(readStream); reader = new JsonTextReader(streamReader); BuildTypes(); } internal Dictionary<int, string> GetTypesPerColIndex() { return typesPerColIndex; } internal Dictionary<int, object> GetValuesPerColIndex() { return valuesPerColIndex; } internal Dictionary<string, int> GetColIndexesPerColName() { return colIndexesPerColName; } private void BuildTypes() { typesPerColIndex = new Dictionary<int, string>(); while (reader.Read()) { if (reader.Value == null) { continue; } if (reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("column_types")) { continue; } int idx = 0; while (reader.Read()) { // We are done at end of row if (reader.TokenType.Equals(JsonToken.EndArray)) { return; } if (reader.Value != null) { typesPerColIndex.Add(idx++, reader.Value.ToString()); } } } } /// <summary> /// Builds the row number. /// </summary> /// <param name="rowNum">The row number.</param> internal void BuildRowNum(int rowNum) { // Value needed because we don't want to take columns with "row_xxx" names as row numbers bool firstStartArrayPassed = false; bool isInsideRowValuesArray = false; while (reader.Read()) { if (reader.Value == null) { if (reader.TokenType == JsonToken.StartArray) { if (!firstStartArrayPassed) { firstStartArrayPassed = true; } else { isInsideRowValuesArray = true; } } if (reader.TokenType == JsonToken.EndArray) { isInsideRowValuesArray = false; } continue; } if (reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("row_" + rowNum) || isInsideRowValuesArray) { continue; } int colIndex = 0; String colName = null; valuesPerColIndex = new Dictionary<int, object>(); if (colIndexesPerColName == null) { colIndexesPerColName = new Dictionary<string, int>(); } while (reader.Read()) { // We are done at end of row if (reader.TokenType.Equals(JsonToken.EndArray)) { return; } if (reader.TokenType == JsonToken.PropertyName) { colName = reader.Value.ToString(); reader.Read(); String colValue = reader.Value.ToString(); if (colValue.Equals("NULL")) { colValue = null; } if (colValue != null) { colValue = colValue.Trim(); } Trace("" + colValue); valuesPerColIndex.Add(colIndex, colValue); if (rowNum == 1) { colIndexesPerColName.Add(colName, colIndex); } // Do the increment at end to start indexes at 0 colIndex++; } } } } /** * Says if trace is on * * @return true if trace is on */ /// <summary> /// Determines whether [is trace on]. /// </summary> /// <returns><c>true</c> if [is trace on]; otherwise, <c>false</c>.</returns> internal bool IsTraceOn() { return traceOn; } /** * Sets the trace on/off * * @param traceOn * if true, trace will be on */ /// <summary> /// Sets the trace on. /// </summary> /// <param name="traceOn">if set to <c>true</c> [trace on].</param> internal void SetTraceOn(bool traceOn) { this.traceOn = traceOn; } /// <summary> /// Traces the specified s. /// </summary> /// <param name="s">The s.</param> private void Trace(String s) { if (traceOn) { ConsoleEmul.WriteLine(s); } } public void Dispose() { if (this.streamReader != null) { this.streamReader.Dispose(); if (this.reader != null) { this.reader.Close(); } } } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.ResourceSettings.V1.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedResourceSettingsServiceClientTest { [xunit::FactAttribute] public void GetSettingRequestObject() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), View = SettingView.Basic, }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSetting(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting response = client.GetSetting(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetSettingRequestObjectAsync() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), View = SettingView.Basic, }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSettingAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Setting>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting responseCallSettings = await client.GetSettingAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Setting responseCancellationToken = await client.GetSettingAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetSetting() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSetting(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting response = client.GetSetting(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetSettingAsync() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSettingAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Setting>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting responseCallSettings = await client.GetSettingAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Setting responseCancellationToken = await client.GetSettingAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetSettingResourceNames() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSetting(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting response = client.GetSetting(request.SettingName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetSettingResourceNamesAsync() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); GetSettingRequest request = new GetSettingRequest { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.GetSettingAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Setting>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting responseCallSettings = await client.GetSettingAsync(request.SettingName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Setting responseCancellationToken = await client.GetSettingAsync(request.SettingName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateSettingRequestObject() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); UpdateSettingRequest request = new UpdateSettingRequest { Setting = new Setting(), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.UpdateSetting(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting response = client.UpdateSetting(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateSettingRequestObjectAsync() { moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient> mockGrpcClient = new moq::Mock<ResourceSettingsService.ResourceSettingsServiceClient>(moq::MockBehavior.Strict); UpdateSettingRequest request = new UpdateSettingRequest { Setting = new Setting(), }; Setting expectedResponse = new Setting { SettingName = SettingName.FromProjectNumberSettingName("[PROJECT_NUMBER]", "[SETTING_NAME]"), Metadata = new SettingMetadata(), LocalValue = new Value(), EffectiveValue = new Value(), Etag = "etage8ad7218", }; mockGrpcClient.Setup(x => x.UpdateSettingAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Setting>(stt::Task.FromResult(expectedResponse), null, null, null, null)); ResourceSettingsServiceClient client = new ResourceSettingsServiceClientImpl(mockGrpcClient.Object, null); Setting responseCallSettings = await client.UpdateSettingAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Setting responseCancellationToken = await client.UpdateSettingAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
//============================================================================== // TorqueLab -> // Copyright (c) 2015 All Right Reserved, http://nordiklab.com/ //------------------------------------------------------------------------------ // FILTERING: // - Base ArrayObject containing data: // new ArrayObject(MaterialFilterAllArray); // new ArrayObject(MaterialFilterMappedArray); // new ArrayObject(MaterialFilterUnmappedArray); // - Tags //============================================================================== $Pref::MaterialSelector::DefaultMaterialFile = "art/textures/customMaterials.cs"; //============================================================================== $Pref::MaterialSelector::CurrentStaticFilter = "MaterialFilterAllArray"; $Pref::MaterialSelector::CurrentFilter = ""; //ALL $Pref::MaterialSelector::ThumbnailCountIndex = 3; $Pref::MaterialSelector::ThumbnailCustomCount = ""; $MaterialSelector_ThumbPerPage[0] = "20"; $MaterialSelector_ThumbPerPage[1] = "40"; $MaterialSelector_ThumbPerPage[2] = "75"; $MaterialSelector_ThumbPerPage[3] = "100"; $MaterialSelector_ThumbPerPage[4] = "150"; $MaterialSelector_ThumbPerPage[5] = "200"; $MaterialSelector_ThumbPerPage[6] = "All"; $MaterialSelector_ThumbSize[0] = "32"; $MaterialSelector_ThumbSize[1] = "48"; $MaterialSelector_ThumbSize[2] = "64"; $MaterialSelector_ThumbSize[3] = "80"; $MaterialSelector_ThumbSize[4] = "96"; $MaterialSelector_ThumbSize[5] = "128"; $MaterialSelector_ThumbSize[6] = "160"; //------------------------------------------------------------------------------ //============================================================================== if (!isObject(MaterialSelectorPerMan)) new PersistenceManager(MaterialSelectorPerMan); if (!isObject(MaterialSelector)) newScriptObject("MaterialSelector"); //------------------------------------------------------------------------------ if (!isObject(UnlistedMaterials)) { new ArrayObject(UnlistedMaterials); UnlistedMaterials.add( "unlistedMaterials", WarningMaterial ); UnlistedMaterials.add( "unlistedMaterials", materialEd_previewMaterial ); UnlistedMaterials.add( "unlistedMaterials", notDirtyMaterial ); UnlistedMaterials.add( "unlistedMaterials", materialEd_cubemapEd_cubeMapPreview ); UnlistedMaterials.add( "unlistedMaterials", matEdCubeMapPreviewMat ); UnlistedMaterials.add( "unlistedMaterials", materialEd_justAlphaMaterial ); UnlistedMaterials.add( "unlistedMaterials", materialEd_justAlphaShader ); } //------------------------------------------------------------------------------ function materialSelector::showDialog( %this) { MaterialSelector.setVisible(1); } function materialSelector::showDialog( %this) { MaterialSelector.setVisible(1); } //------------------------------------------------------------------------------ function LabMaterialBrowser::onWake( %this) { MaterialSelector.setVisible(1); } //============================================================================== function MaterialSelector::showDialog( %this, %selectCallback, %returnType) { MatSelector_FilterSamples.visible = false; MatSelector_PageTextSample.visible = false; MatSelector_PageButtonSample.visible = false; MatSelector_MaterialPreviewSample.visible = false; MaterialSelector_Creator.visible = false; MaterialSelector.setListFilterText(""); hide(MatSel_SetAsActiveContainer); if (MaterialEditorTools.isAwake()) show(MatSel_SetAsActiveContainer); //FIXME Commented because with update it was staying visible inside hidden container and that was causing an issue //if( MaterialSelector.isVisible() ) //return; %this.showDialogBase(%selectCallback, %returnType, false); } //------------------------------------------------------------------------------ function MaterialSelector::showTerrainDialog( %this, %selectCallback, %returnType) { %this.showDialogBase(%selectCallback, %returnType, true); } //------------------------------------------------------------------------------ function MaterialSelector::showDialogBase( %this, %selectCallback, %returnType, %useTerrainMaterials) { // Set the select callback MaterialSelector.selectCallback = %selectCallback; MaterialSelector.returnType = %returnType; MaterialSelector.currentStaticFilter = $Pref::MaterialSelector::CurrentStaticFilter; MaterialSelector.currentFilter = $Pref::MaterialSelector::CurrentFilter; MaterialSelector.terrainMaterials = %useTerrainMaterials; MaterialSelector-->materialPreviewCountPopup.clear(); %i = 0; while($MaterialSelector_ThumbPerPage[%i] !$="") { MaterialSelector-->materialPreviewCountPopup.add( $MaterialSelector_ThumbPerPage[%i], %i ); %i++; } %selected = $Pref::MaterialSelector::ThumbnailCountIndex; if ($Pref::MaterialSelector::ThumbnailCustomCount !$="") { MaterialSelector-->materialPreviewCountPopup.add( $Pref::MaterialSelector::ThumbnailCustomCount, %i ); %selected = %i; } MaterialSelector-->materialPreviewCountPopup.setSelected( %selected ); %i = 0; while($MaterialSelector_ThumbSize[%i] !$="") { MaterialSelector-->materialPreviewSizePopup.add( $MaterialSelector_ThumbSize[%i], %i ); %i++; } pushDlg(LabMaterialBrowser); MatSelector_MaterialsContainer.clear(); MaterialSelector.setVisible(1); MaterialSelector.buildStaticFilters(); MaterialSelector.selectedMaterial = ""; MaterialSelector.loadMaterialFilters(); } //------------------------------------------------------------------------------ function MaterialSelector::hideDialog( %this ) { MaterialSelector.breakdown(); //oldmatSelector.setVisible(0); Canvas.popDialog(LabMaterialBrowser); } //------------------------------------------------------------------------------ function MaterialSelector::breakdown( %this ) { $Pref::MaterialSelector::CurrentStaticFilter = MaterialSelector.currentStaticFilter; $Pref::MaterialSelector::CurrentFilter = MaterialSelector.currentFilter; %this.clearFilterArray(); MaterialSelector-->materialSelection.deleteAllObjects(); MatEdPreviewArray.delete(); MaterialSelector-->materialCategories.deleteAllObjects(); MaterialFilterAllArray.delete(); MaterialFilterMappedArray.delete(); MaterialFilterUnmappedArray.delete(); } //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ // this should create a new material pretty nicely function MaterialSelector::createNewMaterial( %this ) { // look for a newMaterial name to grab %material = getUniqueName( "newMaterial" ); new Material(%material) { diffuseMap[0] = "art/textures/core/warnMat"; mapTo = "unmapped_mat"; parentGroup = RootGroup; }; // add one to All filter MaterialFilterAllArray.add( "", %material.name ); MaterialFilterAllArrayCheckbox.setText("All ( " @ MaterialFilterAllArray.count() + 1 @ " ) "); MaterialFilterUnmappedArray.add( "", %material.name ); MaterialFilterUnmappedArrayCheckbox.setText("Unmapped ( " @ MaterialFilterUnmappedArray.count() + 1 @ " ) "); if( MaterialSelector.currentStaticFilter !$= "MaterialFilterMappedArray" ) { // create the new material gui %container = new GuiControl() { profile = "ToolsDefaultProfile"; Position = "0 0"; Extent = "74 85"; HorizSizing = "right"; VertSizing = "bottom"; isContainer = "1"; new GuiTextCtrl() { position = "10 70"; profile = "ToolsGuiTextCenterProfile"; extent = "64 16"; text = %material.name; }; }; %previewButton = new GuiBitmapButtonCtrl() { internalName = %material.name; HorizSizing = "right"; VertSizing = "bottom"; profile = "ToolsButtonProfile"; position = "7 4"; extent = "64 64"; buttonType = "PushButton"; bitmap = "art/textures/core/warnMat"; Command = ""; text = "Loading..."; useStates = false; new GuiBitmapButtonCtrl() { HorizSizing = "right"; VertSizing = "bottom"; profile = "ToolsButtonProfile"; position = "0 0"; extent = "64 64"; Variable = ""; buttonType = "toggleButton"; bitmap = "tlab/materialEditor/assets/cubemapBtnBorder"; groupNum = "0"; text = ""; }; }; %previewBorder = new GuiButtonCtrl() { internalName = %material.name@"Border"; HorizSizing = "right"; VertSizing = "bottom"; profile = "ToolsButtonHighlight"; position = "3 0"; extent = "72 88"; Variable = ""; buttonType = "toggleButton"; tooltip = %material.name; Command = "MaterialSelector.updateSelection( $ThisControl.getParent().getObject(1).internalName, $ThisControl.getParent().getObject(1).bitmap );"; groupNum = "0"; text = ""; }; %container.add(%previewButton); %container.add(%previewBorder); // add to the gui control array MaterialSelector-->materialSelection.add(%container); } %material.setFilename($Pref::MaterialSelector::DefaultMaterialFile); // select me MaterialSelector.updateSelection( %material, "art/textures/core/warnMat.png" ); } //------------------------------------------------------------------------------ //needs to be deleted with the persistence manager and needs to be blanked out of the matmanager //also need to update instances... i guess which is the tricky part.... function MaterialSelector::showDeleteDialog( %this ) { %material = MaterialSelector.selectedMaterial; %secondFilter = "MaterialFilterMappedArray"; %secondFilterName = "Mapped"; for( %i = 0; %i < MaterialFilterUnmappedArray.count(); %i++ ) { if( MaterialFilterUnmappedArray.getValue(%i) $= %material ) { %secondFilter = "MaterialFilterUnmappedArray"; %secondFilterName = "Unmapped"; break; } } if( isObject( %material ) ) { MessageBoxYesNoCancel("Delete Material?", "Are you sure you want to delete<br><br>" @ %material.getName() @ "<br><br> Material deletion won't take affect until the engine is quit.", "MaterialSelector.deleteMaterial( " @ %material @ ", " @ %secondFilter @ ", " @ %secondFilterName @" );", "", "" ); } } //------------------------------------------------------------------------------ function MaterialSelector::deleteMaterial( %this, %materialName, %secondFilter, %secondFilterName ) { if( !isObject( %materialName ) ) return; for( %i = 0; %i <= MaterialFilterAllArray.countValue( %materialName ); %i++) { %index = MaterialFilterAllArray.getIndexFromValue( %materialName ); MaterialFilterAllArray.erase( %index ); } MaterialFilterAllArrayCheckbox.setText("All ( " @ MaterialFilterAllArray.count() - 1 @ " ) "); %checkbox = %secondFilter @ "Checkbox"; for( %k = 0; %k <= %secondFilter.countValue( %materialName ); %k++) { %index = %secondFilter.getIndexFromValue( %materialName ); %secondFilter.erase( %index ); } %checkbox.setText( %secondFilterName @ " ( " @ %secondFilter.count() - 1 @ " ) "); for( %i = 0; %materialName.getFieldValue("materialTag" @ %i) !$= ""; %i++ ) { %materialTag = %materialName.getFieldValue("materialTag" @ %i); for( %j = MaterialSelector.staticFilterObjCount; %j < MaterialSelector-->tagFilters.getCount() ; %j++ ) { if( %materialTag $= MaterialSelector-->tagFilters.getObject(%j).getObject(0).filter ) { %count = getWord( MaterialSelector-->tagFilters.getObject(%j).getObject(0).getText(), 2 ); %count--; MaterialSelector-->tagFilters.getObject(%j).getObject(0).setText( %materialTag @ " ( "@ %count @ " )"); } } } UnlistedMaterials.add( "unlistedMaterials", %materialName ); if( %materialName.getFilename() !$= "" && %materialName.getFilename() !$= "tlab/gui/oldmatSelector.ed.gui" && %materialName.getFilename() !$= "tlab/materialEditor/scripts/materialEditor.ed.cs" ) { MaterialSelectorPerMan.removeObjectFromFile(%materialName); MaterialSelectorPerMan.saveDirty(); } MaterialSelector.preloadFilter(); //oldmatSelector.selectMaterial( "WarningMaterial" ); }
// // Azure Media Services REST API v2 - Functions // // Shared Library // using System; using Microsoft.WindowsAzure.MediaServices.Client; using System.IO; using System.Collections.Generic; using System.Linq; using Microsoft.WindowsAzure.Storage.Table; using Microsoft.Azure.WebJobs.Host; using System.Xml.Linq; namespace media_functions_for_logic_app { public class ManifestHelpers { public static Stream GenerateStreamFromString(string s) { MemoryStream stream = new MemoryStream(); StreamWriter writer = new StreamWriter(stream); writer.Write(s); writer.Flush(); stream.Position = 0; return stream; } public class ManifestGenerated { public string FileName; public string Content; } public static ManifestGenerated LoadAndUpdateManifestTemplate(IAsset asset, Microsoft.Azure.WebJobs.ExecutionContext execContext) { var mp4AssetFiles = asset.AssetFiles.ToList().Where(f => f.Name.EndsWith(".mp4", StringComparison.OrdinalIgnoreCase)).ToArray(); var m4aAssetFiles = asset.AssetFiles.ToList().Where(f => f.Name.EndsWith(".m4a", StringComparison.OrdinalIgnoreCase)).ToArray(); var mediaAssetFiles = asset.AssetFiles.ToList().Where(f => f.Name.EndsWith(".mp4", StringComparison.OrdinalIgnoreCase) || f.Name.EndsWith(".m4a", StringComparison.OrdinalIgnoreCase)).ToArray(); if (mediaAssetFiles.Count() != 0) { // Prepare the manifest string mp4fileuniqueaudio = null; // let's load the manifest template string manifestPath = Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", "Manifest.ism"); XDocument doc = XDocument.Load(manifestPath); XNamespace ns = "http://www.w3.org/2001/SMIL20/Language"; var bodyxml = doc.Element(ns + "smil"); var body2 = bodyxml.Element(ns + "body"); var switchxml = body2.Element(ns + "switch"); // audio tracks (m4a) foreach (var file in m4aAssetFiles) { switchxml.Add(new XElement(ns + "audio", new XAttribute("src", file.Name), new XAttribute("title", Path.GetFileNameWithoutExtension(file.Name)))); } if (m4aAssetFiles.Count() == 0) { // audio track(s) var mp4AudioAssetFilesName = mp4AssetFiles.Where(f => (f.Name.ToLower().Contains("audio") && !f.Name.ToLower().Contains("video")) || (f.Name.ToLower().Contains("aac") && !f.Name.ToLower().Contains("h264")) ); var mp4AudioAssetFilesSize = mp4AssetFiles.OrderBy(f => f.ContentFileSize); string mp4fileaudio = (mp4AudioAssetFilesName.Count() == 1) ? mp4AudioAssetFilesName.FirstOrDefault().Name : mp4AudioAssetFilesSize.FirstOrDefault().Name; // if there is one file with audio or AAC in the name then let's use it for the audio track switchxml.Add(new XElement(ns + "audio", new XAttribute("src", mp4fileaudio), new XAttribute("title", "audioname"))); if (mp4AudioAssetFilesName.Count() == 1 && mediaAssetFiles.Count() > 1) //looks like there is one audio file and dome other video files { mp4fileuniqueaudio = mp4fileaudio; } } // video tracks foreach (var file in mp4AssetFiles) { if (file.Name != mp4fileuniqueaudio) // we don't put the unique audio file as a video track { switchxml.Add(new XElement(ns + "video", new XAttribute("src", file.Name))); } } // manifest filename string name = CommonPrefix(mediaAssetFiles.Select(f => Path.GetFileNameWithoutExtension(f.Name)).ToArray()); if (string.IsNullOrEmpty(name)) { name = "manifest"; } else if (name.EndsWith("_") && name.Length > 1) // i string ends with "_", let's remove it { name = name.Substring(0, name.Length - 1); } name = name + ".ism"; return new ManifestGenerated() { Content = doc.Declaration.ToString() + Environment.NewLine + doc.ToString(), FileName = name }; } else { return new ManifestGenerated() { Content = null, FileName = string.Empty }; // no mp4 in asset } } public static string CommonPrefix(string[] ss) { if (ss.Length == 0) { return ""; } if (ss.Length == 1) { return ss[0]; } int prefixLength = 0; foreach (char c in ss[0]) { foreach (string s in ss) { if (s.Length <= prefixLength || s[prefixLength] != c) { return ss[0].Substring(0, prefixLength); } } prefixLength++; } return ss[0]; // all strings identical } static public ManifestTimingData GetManifestTimingData(CloudMediaContext context, IAsset asset, TraceWriter log) // Parse the manifest and get data from it { ManifestTimingData response = new ManifestTimingData() { IsLive = false, Error = false, TimestampOffset = 0, TimestampList = new List<ulong>() }; try { ILocator mytemplocator = null; Uri myuri = MediaServicesHelper.GetValidOnDemandURI(context, asset); if (myuri == null) { mytemplocator = MediaServicesHelper.CreatedTemporaryOnDemandLocator(asset); myuri = MediaServicesHelper.GetValidOnDemandURI(context, asset); } if (myuri != null) { log.Info($"Asset URI {myuri.ToString()}"); XDocument manifest = XDocument.Load(myuri.ToString()); //log.Info($"manifest {manifest}"); var smoothmedia = manifest.Element("SmoothStreamingMedia"); var videotrack = smoothmedia.Elements("StreamIndex").Where(a => a.Attribute("Type").Value == "video"); // TIMESCALE string timescalefrommanifest = smoothmedia.Attribute("TimeScale").Value; if (videotrack.FirstOrDefault().Attribute("TimeScale") != null) // there is timescale value in the video track. Let's take this one. { timescalefrommanifest = videotrack.FirstOrDefault().Attribute("TimeScale").Value; } ulong timescale = ulong.Parse(timescalefrommanifest); response.TimeScale = (ulong?)timescale; // Timestamp offset if (videotrack.FirstOrDefault().Element("c").Attribute("t") != null) { response.TimestampOffset = ulong.Parse(videotrack.FirstOrDefault().Element("c").Attribute("t").Value); } else { response.TimestampOffset = 0; // no timestamp, so it should be 0 } ulong totalduration = 0; ulong durationpreviouschunk = 0; ulong durationchunk; int repeatchunk; foreach (var chunk in videotrack.Elements("c")) { durationchunk = chunk.Attribute("d") != null ? ulong.Parse(chunk.Attribute("d").Value) : 0; log.Info($"duration d {durationchunk}"); repeatchunk = chunk.Attribute("r") != null ? int.Parse(chunk.Attribute("r").Value) : 1; log.Info($"repeat r {repeatchunk}"); totalduration += durationchunk * (ulong)repeatchunk; if (chunk.Attribute("t") != null) { //totalduration = ulong.Parse(chunk.Attribute("t").Value) - response.TimestampOffset; // new timestamp, perhaps gap in live stream.... response.TimestampList.Add(ulong.Parse(chunk.Attribute("t").Value)); log.Info($"t value {ulong.Parse(chunk.Attribute("t").Value)}"); } else { response.TimestampList.Add(response.TimestampList[response.TimestampList.Count() - 1] + durationpreviouschunk); } for (int i = 1; i < repeatchunk; i++) { response.TimestampList.Add(response.TimestampList[response.TimestampList.Count() - 1] + durationchunk); } durationpreviouschunk = durationchunk; } response.TimestampEndLastChunk = response.TimestampList[response.TimestampList.Count() - 1] + durationpreviouschunk; if (smoothmedia.Attribute("IsLive") != null && smoothmedia.Attribute("IsLive").Value == "TRUE") { // Live asset.... No duration to read (but we can read scaling and compute duration if no gap) response.IsLive = true; response.AssetDuration = TimeSpan.FromSeconds((double)totalduration / ((double)timescale)); } else { totalduration = ulong.Parse(smoothmedia.Attribute("Duration").Value); response.AssetDuration = TimeSpan.FromSeconds((double)totalduration / ((double)timescale)); } } else { response.Error = true; } if (mytemplocator != null) mytemplocator.Delete(); } catch (Exception ex) { response.Error = true; } return response; } public static EndTimeInTable RetrieveLastEndTime(CloudTable table, string programID) { TableOperation tableOperation = TableOperation.Retrieve<EndTimeInTable>(programID, "lastEndTime"); TableResult tableResult = table.Execute(tableOperation); return tableResult.Result as EndTimeInTable; } public static void UpdateLastEndTime(CloudTable table, TimeSpan endtime, string programId, int id, ProgramState state) { var EndTimeInTableEntity = new EndTimeInTable(); EndTimeInTableEntity.ProgramId = programId; EndTimeInTableEntity.Id = id.ToString(); EndTimeInTableEntity.ProgramState = state.ToString(); EndTimeInTableEntity.LastEndTime = endtime.ToString(); EndTimeInTableEntity.AssignPartitionKey(); EndTimeInTableEntity.AssignRowKey(); TableOperation tableOperation = TableOperation.InsertOrReplace(EndTimeInTableEntity); table.Execute(tableOperation); } public static IAsset GetAssetFromProgram(CloudMediaContext context, string programId) { IAsset asset = null; try { IProgram program = context.Programs.Where(p => p.Id == programId).FirstOrDefault(); if (program != null) { asset = program.Asset; } } catch { } return asset; } // return the exact timespan on GOP static public TimeSpan ReturnTimeSpanOnGOP(ManifestTimingData data, TimeSpan ts) { var response = ts; ulong timestamp = (ulong)(ts.TotalSeconds * data.TimeScale); int i = 0; foreach (var t in data.TimestampList) { if (t < timestamp && i < (data.TimestampList.Count - 1) && timestamp < data.TimestampList[i + 1]) { response = TimeSpan.FromSeconds((double)t / (double)data.TimeScale); break; } i++; } return response; } public class ManifestTimingData { public TimeSpan AssetDuration { get; set; } public ulong TimestampOffset { get; set; } public ulong? TimeScale { get; set; } public bool IsLive { get; set; } public bool Error { get; set; } public List<ulong> TimestampList { get; set; } public ulong TimestampEndLastChunk { get; set; } } public class SubclipInfo { public TimeSpan subclipStart { get; set; } public TimeSpan subclipDuration { get; set; } public string programId { get; set; } } public class EndTimeInTable : TableEntity { private string programId; private string lastendtime; private string id; private string programState; public void AssignRowKey() { this.RowKey = "lastEndTime"; } public void AssignPartitionKey() { this.PartitionKey = programId; } public string ProgramId { get { return programId; } set { programId = value; } } public string LastEndTime { get { return lastendtime; } set { lastendtime = value; } } public string Id { get { return id; } set { id = value; } } public string ProgramState { get { return programState; } set { programState = value; } } } } }
// <copyright file="KeyFilterTests.cs" company="Basho Technologies, Inc."> // Copyright 2011 - OJ Reeves & Jeremiah Peschka // Copyright 2014 - Basho Technologies, Inc. // // This file is provided to you under the Apache License, // Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // </copyright> namespace RiakClientTests.KeyFilters { using System.Collections.Generic; using NUnit.Framework; using RiakClient.Models.MapReduce.KeyFilters; [TestFixture, UnitTest] public abstract class KeyFilterTests { internal const string IntToStringJson = @"[""int_to_string""]"; internal const string StringToIntJson = @"[""string_to_int""]"; internal const string FloatToStringJson = @"[""float_to_string""]"; internal const string StringToFloatJson = @"[""string_to_float""]"; internal const string ToUpperJson = @"[""to_upper""]"; internal const string ToLowerJson = @"[""to_lower""]"; internal const string TokenizeJson = @"[""tokenize"",""/"",4]"; internal const string UrlDecodeJson = @"[""urldecode""]"; internal const string GreaterThanJson = @"[""greater_than"",50]"; internal const string LessThanJson = @"[""less_than"",10]"; internal const string GreaterThanOrEqualToJson = @"[""greater_than_eq"",2000]"; internal const string LessThanOrEqualToJson = @"[""less_than_eq"",-2]"; internal const string BetweenJson = @"[""between"",10,20,false]"; internal const string MatchesJson = @"[""matches"",""solutions""]"; internal const string NotEqualJson = @"[""neq"",""foo""]"; internal const string EqualJson = @"[""eq"",""basho""]"; internal const string SetMemberJson = @"[""set_member"",""basho"",""google"",""yahoo""]"; internal const string SimilarToJson = @"[""similar_to"",""newyork"",3]"; internal const string StartsWithJson = @"[""starts_with"",""closed""]"; internal const string EndsWithJson = @"[""ends_with"",""0603""]"; internal const string AndJson = @"[""and"",[[""ends_with"",""0603""]],[[""starts_with"",""basho""]]]"; internal const string OrJson = @"[""or"",[[""eq"",""google""]],[[""less_than"",""g""]]]"; internal const string NotJson = @"[""not"",[[""matches"",""solution""]]]"; } public class WhenConstructingSimpleKeyFilters : KeyFilterTests { [Test] public void IntToStringCorrectlyConvertsToJson() { var its = new IntToString(); its.ToString().ShouldEqual(IntToStringJson); } [Test] public void StringToIntCorrectlyConvertsToJson() { var sti = new StringToInt(); sti.ToString().ShouldEqual(StringToIntJson); } [Test] public void FloatToStringCorrectlyConvertsToJson() { var fts = new FloatToString(); fts.ToString().ShouldEqual(FloatToStringJson); } [Test] public void StringToFloatCorrectlyConvertsToJson() { var stf = new StringToFloat(); stf.ToString().ShouldEqual(StringToFloatJson); } [Test] public void ToUpperCorrectlyConvertsToJson() { var tu = new ToUpper(); tu.ToString().ShouldEqual(ToUpperJson); } [Test] public void ToLowerCorrectlyConvertsToJson() { var tl = new ToLower(); tl.ToString().ShouldEqual(ToLowerJson); } [Test] public void TokenizeCorrectlyConvertsToJson() { var tokenize = new Tokenize("/", 4); tokenize.ToString().ShouldEqual(TokenizeJson); } [Test] public void UrlDecodeCorrectlyConvertsToJson() { var ud = new UrlDecode(); ud.ToString().ShouldEqual(UrlDecodeJson); } } public class WhenConstructingSimplePredicates : KeyFilterTests { [Test] public void GreaterThanCorrectlyConvertsToJson() { var gt = new GreaterThan<int>(50); gt.ToString().ShouldEqual(GreaterThanJson); } [Test] public void LessThanCorrectlyConvertsToJson() { var lt = new LessThan<int>(10); lt.ToString().ShouldEqual(LessThanJson); } [Test] public void GreaterThanOrEqualCorrectlyConvertsToJson() { var gte = new GreaterThanOrEqualTo<int>(2000); gte.ToString().ShouldEqual(GreaterThanOrEqualToJson); } [Test] public void LessThanOrEqualCorrectlyConvertsToJson() { var lte = new LessThanOrEqualTo<int>(-2); lte.ToString().ShouldEqual(LessThanOrEqualToJson); } [Test] public void BetweenCorrectlyConvertsToJson() { var between = new Between<int>(10, 20, false); between.ToString().ShouldEqual(BetweenJson); } [Test] public void MatchesCorrectlyConvertsToJson() { var matches = new Matches("solutions"); matches.ToString().ShouldEqual(MatchesJson); } [Test] public void NotEqualCorrectlyConvertsToJson() { var neq = new NotEqual<string>("foo"); neq.ToString().ShouldEqual(NotEqualJson); } [Test] public void EqualCorrectlyConvertsToJson() { var eq = new Equal<string>("basho"); eq.ToString().ShouldEqual(EqualJson); } [Test] public void SetMemberCorrectlyConvertsToJson() { var setMember = new SetMember<string>(new List<string> { "basho", "google", "yahoo" }); setMember.ToString().ShouldEqual(SetMemberJson); } [Test] public void SimilarToCorrectlyConvertsToJson() { var st = new SimilarTo<string>("newyork", 3); st.ToString().ShouldEqual(SimilarToJson); } [Test] public void StartsWithCorrectlyConvertsToJson() { var sw = new StartsWith("closed"); sw.ToString().ShouldEqual(StartsWithJson); } [Test] public void EndsWithCorrectlyConvertsToJson() { var ew = new EndsWith("0603"); ew.ToString().ShouldEqual(EndsWithJson); } } public class WhenConstructingComplexPredicates : KeyFilterTests { [Test] [Ignore("Commented-out test")] public void AndCorrectlyConvertsToJson() { //var and = new And(new EndsWith("0603"), new StartsWith("basho")); //and.ToString().ShouldEqual(AndJson); } [Test] [Ignore("Commented-out test")] public void OrCorrectlyConvertsToJson() { //var or = new Or(new Equal<string>("google"), new LessThan<string>("g")); //or.ToString().ShouldEqual(OrJson); } [Test] [Ignore("Commented-out test")] public void NotCorrectlyConvertsToJson() { //var not = new Not(new Matches("solution")); //not.ToString().ShouldEqual(NotJson); } } }
using J2N.Collections.Generic.Extensions; using J2N.Numerics; using Lucene.Net.Diagnostics; using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; namespace Lucene.Net.Util { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Class that Posting and PostingVector use to write byte /// streams into shared fixed-size <see cref="T:byte[]"/> arrays. The idea /// is to allocate slices of increasing lengths. For /// example, the first slice is 5 bytes, the next slice is /// 14, etc. We start by writing our bytes into the first /// 5 bytes. When we hit the end of the slice, we allocate /// the next slice and then write the address of the new /// slice into the last 4 bytes of the previous slice (the /// "forwarding address"). /// <para/> /// Each slice is filled with 0's initially, and we mark /// the end with a non-zero byte. This way the methods /// that are writing into the slice don't need to record /// its length and instead allocate a new slice once they /// hit a non-zero byte. /// <para/> /// @lucene.internal /// </summary> public sealed class ByteBlockPool { public static readonly int BYTE_BLOCK_SHIFT = 15; public static readonly int BYTE_BLOCK_SIZE = 1 << BYTE_BLOCK_SHIFT; public static readonly int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1; /// <summary> /// Abstract class for allocating and freeing byte /// blocks. /// </summary> public abstract class Allocator { protected readonly int m_blockSize; protected Allocator(int blockSize) { this.m_blockSize = blockSize; } public abstract void RecycleByteBlocks(byte[][] blocks, int start, int end); // LUCENENT TODO: API - Change to use IList<byte[]> public virtual void RecycleByteBlocks(IList<byte[]> blocks) { var b = blocks.ToArray(); RecycleByteBlocks(b, 0, b.Length); } public virtual byte[] GetByteBlock() { return new byte[m_blockSize]; } } /// <summary> /// A simple <see cref="Allocator"/> that never recycles. </summary> public sealed class DirectAllocator : Allocator { public DirectAllocator() : this(BYTE_BLOCK_SIZE) { } public DirectAllocator(int blockSize) : base(blockSize) { } public override void RecycleByteBlocks(byte[][] blocks, int start, int end) { } } /// <summary> /// A simple <see cref="Allocator"/> that never recycles, but /// tracks how much total RAM is in use. /// </summary> public class DirectTrackingAllocator : Allocator { private readonly Counter bytesUsed; public DirectTrackingAllocator(Counter bytesUsed) : this(BYTE_BLOCK_SIZE, bytesUsed) { } public DirectTrackingAllocator(int blockSize, Counter bytesUsed) : base(blockSize) { this.bytesUsed = bytesUsed; } public override byte[] GetByteBlock() { bytesUsed.AddAndGet(m_blockSize); return new byte[m_blockSize]; } public override void RecycleByteBlocks(byte[][] blocks, int start, int end) { bytesUsed.AddAndGet(-((end - start) * m_blockSize)); for (var i = start; i < end; i++) { blocks[i] = null; } } } /// <summary> /// Array of buffers currently used in the pool. Buffers are allocated if /// needed don't modify this outside of this class. /// </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public byte[][] Buffers { get => buffers; set => buffers = value; } private byte[][] buffers = new byte[10][]; /// <summary> /// index into the buffers array pointing to the current buffer used as the head </summary> private int bufferUpto = -1; // Which buffer we are upto /// <summary> /// Where we are in head buffer </summary> public int ByteUpto { get; set; } /// <summary> /// Current head buffer /// </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public byte[] Buffer { get => buffer; set => buffer = value; } private byte[] buffer; /// <summary> /// Current head offset </summary> public int ByteOffset { get; set; } private readonly Allocator allocator; public ByteBlockPool(Allocator allocator) { // set defaults ByteUpto = BYTE_BLOCK_SIZE; ByteOffset = -BYTE_BLOCK_SIZE; this.allocator = allocator; } /// <summary> /// Resets the pool to its initial state reusing the first buffer and fills all /// buffers with <c>0</c> bytes before they reused or passed to /// <see cref="Allocator.RecycleByteBlocks(byte[][], int, int)"/>. Calling /// <see cref="ByteBlockPool.NextBuffer()"/> is not needed after reset. /// </summary> public void Reset() { Reset(true, true); } /// <summary> /// Expert: Resets the pool to its initial state reusing the first buffer. Calling /// <see cref="ByteBlockPool.NextBuffer()"/> is not needed after reset. </summary> /// <param name="zeroFillBuffers"> if <c>true</c> the buffers are filled with <tt>0</tt>. /// this should be set to <c>true</c> if this pool is used with slices. </param> /// <param name="reuseFirst"> if <c>true</c> the first buffer will be reused and calling /// <see cref="ByteBlockPool.NextBuffer()"/> is not needed after reset if the /// block pool was used before ie. <see cref="ByteBlockPool.NextBuffer()"/> was called before. </param> public void Reset(bool zeroFillBuffers, bool reuseFirst) { if (bufferUpto != -1) { // We allocated at least one buffer if (zeroFillBuffers) { for (int i = 0; i < bufferUpto; i++) { // Fully zero fill buffers that we fully used Arrays.Fill(buffers[i], (byte)0); } // Partial zero fill the final buffer Arrays.Fill(buffers[bufferUpto], 0, ByteUpto, (byte)0); } if (bufferUpto > 0 || !reuseFirst) { int offset = reuseFirst ? 1 : 0; // Recycle all but the first buffer allocator.RecycleByteBlocks(buffers, offset, 1 + bufferUpto); Arrays.Fill(buffers, offset, 1 + bufferUpto, null); } if (reuseFirst) { // Re-use the first buffer bufferUpto = 0; ByteUpto = 0; ByteOffset = 0; buffer = buffers[0]; } else { bufferUpto = -1; ByteUpto = BYTE_BLOCK_SIZE; ByteOffset = -BYTE_BLOCK_SIZE; buffer = null; } } } /// <summary> /// Advances the pool to its next buffer. This method should be called once /// after the constructor to initialize the pool. In contrast to the /// constructor a <see cref="ByteBlockPool.Reset()"/> call will advance the pool to /// its first buffer immediately. /// </summary> public void NextBuffer() { if (1 + bufferUpto == buffers.Length) { var newBuffers = new byte[ArrayUtil.Oversize(buffers.Length + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][]; Array.Copy(buffers, 0, newBuffers, 0, buffers.Length); buffers = newBuffers; } buffer = buffers[1 + bufferUpto] = allocator.GetByteBlock(); bufferUpto++; ByteUpto = 0; ByteOffset += BYTE_BLOCK_SIZE; } /// <summary> /// Allocates a new slice with the given size.</summary> /// <seealso cref="ByteBlockPool.FIRST_LEVEL_SIZE"/> public int NewSlice(int size) { if (ByteUpto > BYTE_BLOCK_SIZE - size) { NextBuffer(); } int upto = ByteUpto; ByteUpto += size; buffer[ByteUpto - 1] = 16; return upto; } // Size of each slice. These arrays should be at most 16 // elements (index is encoded with 4 bits). First array // is just a compact way to encode X+1 with a max. Second // array is the length of each slice, ie first slice is 5 // bytes, next slice is 14 bytes, etc. /// <summary> /// An array holding the offset into the <see cref="ByteBlockPool.LEVEL_SIZE_ARRAY"/> /// to quickly navigate to the next slice level. /// </summary> public static readonly int[] NEXT_LEVEL_ARRAY = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 9 }; /// <summary> /// An array holding the level sizes for byte slices. /// </summary> public static readonly int[] LEVEL_SIZE_ARRAY = new int[] { 5, 14, 20, 30, 40, 40, 80, 80, 120, 200 }; /// <summary> /// The first level size for new slices </summary> /// <seealso cref="ByteBlockPool.NewSlice(int)"/> public static readonly int FIRST_LEVEL_SIZE = LEVEL_SIZE_ARRAY[0]; /// <summary> /// Creates a new byte slice with the given starting size and /// returns the slices offset in the pool. /// </summary> public int AllocSlice(byte[] slice, int upto) { int level = slice[upto] & 15; int newLevel = NEXT_LEVEL_ARRAY[level]; int newSize = LEVEL_SIZE_ARRAY[newLevel]; // Maybe allocate another block if (ByteUpto > BYTE_BLOCK_SIZE - newSize) { NextBuffer(); } int newUpto = ByteUpto; int offset = newUpto + ByteOffset; ByteUpto += newSize; // Copy forward the past 3 bytes (which we are about // to overwrite with the forwarding address): buffer[newUpto] = slice[upto - 3]; buffer[newUpto + 1] = slice[upto - 2]; buffer[newUpto + 2] = slice[upto - 1]; // Write forwarding address at end of last slice: slice[upto - 3] = (byte)offset.TripleShift(24); slice[upto - 2] = (byte)offset.TripleShift(16); slice[upto - 1] = (byte)offset.TripleShift(8); slice[upto] = (byte)offset; // Write new level: buffer[ByteUpto - 1] = (byte)(16 | newLevel); return newUpto + 3; } // Fill in a BytesRef from term's length & bytes encoded in // byte block public void SetBytesRef(BytesRef term, int textStart) { var bytes = term.Bytes = buffers[textStart >> BYTE_BLOCK_SHIFT]; var pos = textStart & BYTE_BLOCK_MASK; if ((bytes[pos] & 0x80) == 0) { // length is 1 byte term.Length = bytes[pos]; term.Offset = pos + 1; } else { // length is 2 bytes term.Length = (bytes[pos] & 0x7f) + ((bytes[pos + 1] & 0xff) << 7); term.Offset = pos + 2; } if (Debugging.AssertsEnabled) Debugging.Assert(term.Length >= 0); } /// <summary> /// Appends the bytes in the provided <see cref="BytesRef"/> at /// the current position. /// </summary> public void Append(BytesRef bytes) { var length = bytes.Length; if (length == 0) { return; } int offset = bytes.Offset; int overflow = (length + ByteUpto) - BYTE_BLOCK_SIZE; do { if (overflow <= 0) { Array.Copy(bytes.Bytes, offset, buffer, ByteUpto, length); ByteUpto += length; break; } else { int bytesToCopy = length - overflow; if (bytesToCopy > 0) { Array.Copy(bytes.Bytes, offset, buffer, ByteUpto, bytesToCopy); offset += bytesToCopy; length -= bytesToCopy; } NextBuffer(); overflow = overflow - BYTE_BLOCK_SIZE; } } while (true); } /// <summary> /// Reads bytes bytes out of the pool starting at the given offset with the given /// length into the given byte array at offset <c>off</c>. /// <para>Note: this method allows to copy across block boundaries.</para> /// </summary> public void ReadBytes(long offset, byte[] bytes, int off, int length) { if (length == 0) { return; } var bytesOffset = off; var bytesLength = length; var bufferIndex = (int)(offset >> BYTE_BLOCK_SHIFT); var buffer = buffers[bufferIndex]; var pos = (int)(offset & BYTE_BLOCK_MASK); var overflow = (pos + length) - BYTE_BLOCK_SIZE; do { if (overflow <= 0) { Array.Copy(buffer, pos, bytes, bytesOffset, bytesLength); break; } else { int bytesToCopy = length - overflow; Array.Copy(buffer, pos, bytes, bytesOffset, bytesToCopy); pos = 0; bytesLength -= bytesToCopy; bytesOffset += bytesToCopy; buffer = buffers[++bufferIndex]; overflow = overflow - BYTE_BLOCK_SIZE; } } while (true); } } }
//----------------------------------------------------------------------------- // Filename: Digest.cs // // Description: Implements Digest Authentication as defined in RFC2617. // // History: // 08 Sep 2005 Aaron Clauson Created. // // License: // This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php // // Copyright (c) 2006-2008 Aaron Clauson (aaron@sipsorcery.com), SIP Sorcery PTY LTD, Hobart, Australia (www.sipsorcery.com) // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that // the following conditions are met: // // Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of SIP Sorcery PTY LTD. // nor the names of its contributors may be used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, // BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, // OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. //----------------------------------------------------------------------------- using System; using System.Collections; using System.Net; using System.Security.Cryptography; using System.Text; using System.Text.RegularExpressions; using SIPSorcery.Sys; #if UNITTEST using NUnit.Framework; #endif namespace SIPSorcery.SIP { public enum SIPAuthorisationHeadersEnum { Unknown = 0, Authorize = 1, ProxyAuthenticate = 2, ProxyAuthorization = 3, WWWAuthenticate = 4, } public class SIPAuthorisationDigest { public const string AUTH_ALGORITHM = "MD5"; public const string QOP_AUTHENTICATION_VALUE = "auth"; private const int NONCE_DEFAULT_COUNT = 1; private static char[] m_headerFieldRemoveChars = new char[] { ' ', '"', '\'' }; public SIPAuthorisationHeadersEnum AuthorisationType { get; private set; } // This is the type of authorisation request received. public SIPAuthorisationHeadersEnum AuthorisationResponseType { get; private set; } // If this is set it's the type of authorisation response to use otherwise use the same as the request (God knows why you need a different response header?!?) public string Realm; public string Username; public string Password; public string DestinationURL; public string URI; public string Nonce; public string RequestType; public string Response; public string Algorithhm; public string Cnonce; // Client nonce (used with WWW-Authenticate and qop=auth). public string Qop; // Quality of Protection. Values permitted are auth (authentication) and auth-int (authentication with integrity protection). private int NonceCount = 0; // Client nonce count. public string Opaque; public string Digest { get { Algorithhm = AUTH_ALGORITHM; // Just to make things difficult For some authorisation requests the header changes when the authenticated response is generated. if (AuthorisationType == SIPAuthorisationHeadersEnum.ProxyAuthenticate) { AuthorisationResponseType = SIPAuthorisationHeadersEnum.ProxyAuthorization; } else if (AuthorisationType == SIPAuthorisationHeadersEnum.WWWAuthenticate) { AuthorisationResponseType = SIPAuthorisationHeadersEnum.Authorize; } // If the authorisation header has specified quality of protection equal to "auth" a client nonce needs to be supplied. string nonceCountStr = null; if (Qop == QOP_AUTHENTICATION_VALUE) { NonceCount = (NonceCount != 0) ? NonceCount : NONCE_DEFAULT_COUNT; nonceCountStr = GetPaddedNonceCount(NonceCount); if (Cnonce == null || Cnonce.Trim().Length == 0) { Cnonce = Crypto.GetRandomInt().ToString(); } } if (Nonce == null) { Nonce = Crypto.GetRandomString(12); } return HTTPDigest.DigestCalcResponse( AUTH_ALGORITHM, Username, Realm, Password, URI, Nonce, nonceCountStr, Cnonce, Qop, RequestType, null, null); } } public SIPAuthorisationDigest() { AuthorisationType = SIPAuthorisationHeadersEnum.ProxyAuthorization; } public SIPAuthorisationDigest(SIPAuthorisationHeadersEnum authorisationType) { AuthorisationType = authorisationType; } public static SIPAuthorisationDigest ParseAuthorisationDigest(SIPAuthorisationHeadersEnum authorisationType, string authorisationRequest) { SIPAuthorisationDigest authRequest = new SIPAuthorisationDigest(authorisationType); string noDigestHeader = Regex.Replace(authorisationRequest, @"^\s*Digest\s*", "", RegexOptions.IgnoreCase); string[] headerFields = noDigestHeader.Split(','); if (headerFields != null && headerFields.Length > 0) { foreach (string headerField in headerFields) { int equalsIndex = headerField.IndexOf('='); if (equalsIndex != -1 && equalsIndex < headerField.Length) { string headerName = headerField.Substring(0, equalsIndex).Trim(); string headerValue = headerField.Substring(equalsIndex + 1).Trim(m_headerFieldRemoveChars); if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_REALM_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Realm = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_NONCE_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Nonce = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_USERNAME_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Username = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_RESPONSE_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Response = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_URI_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.URI = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_CNONCE_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Cnonce = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_NONCECOUNT_KEY + "$", RegexOptions.IgnoreCase).Success) { Int32.TryParse(headerValue, out authRequest.NonceCount); } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_QOP_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Qop = headerValue.ToLower(); } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_OPAQUE_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Opaque = headerValue; } else if (Regex.Match(headerName, "^" + AuthHeaders.AUTH_ALGORITHM_KEY + "$", RegexOptions.IgnoreCase).Success) { authRequest.Algorithhm = headerValue; } } } } return authRequest; } public SIPAuthorisationDigest(SIPAuthorisationHeadersEnum authorisationType, string realm, string username, string password, string uri, string nonce, string request) { AuthorisationType = authorisationType; Realm = realm; Username = username; Password = password; URI = uri; Nonce = nonce; RequestType = request; } public void SetCredentials(string username, string password, string uri, string method) { Username = username; Password = password; URI = uri; RequestType = method; } public override string ToString() { string authHeader = AuthHeaders.AUTH_DIGEST_KEY + " "; authHeader += (Username != null && Username.Trim().Length != 0) ? AuthHeaders.AUTH_USERNAME_KEY + "=\"" + Username + "\"" : null; authHeader += (authHeader.IndexOf('=') != -1) ? "," + AuthHeaders.AUTH_REALM_KEY + "=\"" + Realm + "\"" : AuthHeaders.AUTH_REALM_KEY + "=\"" + Realm + "\""; authHeader += (Nonce != null) ? "," + AuthHeaders.AUTH_NONCE_KEY + "=\"" + Nonce + "\"" : null; authHeader += (URI != null && URI.Trim().Length != 0) ? "," + AuthHeaders.AUTH_URI_KEY + "=\"" + URI + "\"" : null; authHeader += (Response != null && Response.Length != 0) ? "," + AuthHeaders.AUTH_RESPONSE_KEY + "=\"" + Response + "\"" : null; authHeader += (Cnonce != null) ? "," + AuthHeaders.AUTH_CNONCE_KEY + "=\"" + Cnonce + "\"" : null; authHeader += (NonceCount != 0) ? "," + AuthHeaders.AUTH_NONCECOUNT_KEY + "=" + GetPaddedNonceCount(NonceCount) : null; authHeader += (Qop != null) ? "," + AuthHeaders.AUTH_QOP_KEY + "=" + Qop : null; authHeader += (Opaque != null) ? "," + AuthHeaders.AUTH_OPAQUE_KEY + "=\"" + Opaque + "\"": null; authHeader += (Algorithhm != null) ? "," + AuthHeaders.AUTH_ALGORITHM_KEY + "=" + Algorithhm : null; return authHeader; } private string GetPaddedNonceCount(int count) { return "00000000".Substring(0, 8 - NonceCount.ToString().Length) + count; } } public class HTTPDigest { /// <summary> /// Calculate H(A1) as per HTTP Digest specification. /// </summary> public static string DigestCalcHA1( string username, string realm, string password) { string a1 = String.Format("{0}:{1}:{2}", username, realm, password); return GetMD5HashBinHex(a1); } /// <summary> /// Calculate H(A2) as per HTTP Digest specification. /// </summary> public static string DigestCalcHA2( string method, string uri) { string A2 = String.Format("{0}:{1}", method, uri); return GetMD5HashBinHex(A2); } public static string DigestCalcResponse( string algorithm, string username, string realm, string password, string uri, string nonce, string nonceCount, string cnonce, string qop, // qop-value: "", "auth", "auth-int". string method, string digestURL, string hEntity ) { string HA1 = DigestCalcHA1(username, realm, password); string HA2 = DigestCalcHA2(method, uri); string unhashedDigest= null; if (nonceCount != null && cnonce != null && qop != null) { unhashedDigest = String.Format("{0}:{1}:{2}:{3}:{4}:{5}", HA1, nonce, nonceCount, cnonce, qop, HA2); } else { unhashedDigest = String.Format("{0}:{1}:{2}", HA1, nonce, HA2); } return GetMD5HashBinHex(unhashedDigest); } public static string GetMD5HashBinHex(string val) { MD5 md5 = new MD5CryptoServiceProvider(); byte[] bHA1 = md5.ComputeHash(Encoding.UTF8.GetBytes(val)); string HA1 = null; for (int i = 0 ; i < 16 ; i++) HA1 += String.Format("{0:x02}",bHA1[i]); return HA1; } } }
//----------------------------------------------------------------------- // <copyright file="AuthTests.cs" company="Marimer LLC"> // Copyright (c) Marimer LLC. All rights reserved. // Website: https://cslanet.com // </copyright> // <summary>no summary</summary> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.IO; using System.IO.IsolatedStorage; using System.Reflection; using System.Text; using Csla; using Csla.Serialization; using Csla.Rules; using Csla.Test.Security; using UnitDriven; using System.Diagnostics; #if NUNIT using NUnit.Framework; using TestClass = NUnit.Framework.TestFixtureAttribute; using TestInitialize = NUnit.Framework.SetUpAttribute; using TestCleanup = NUnit.Framework.TearDownAttribute; using TestMethod = NUnit.Framework.TestAttribute; #elif MSTEST using Microsoft.VisualStudio.TestTools.UnitTesting; #endif namespace Csla.Test.Authorization { #if TESTING [DebuggerNonUserCode] [DebuggerStepThrough] #endif [TestClass()] public class AuthTests { private DataPortal.DpRoot root = DataPortal.DpRoot.NewRoot(); [TestCleanup] public void Cleanup() { ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; } [TestMethod()] public void TestAuthCloneRules() { ApplicationContext.GlobalContext.Clear(); #pragma warning disable CS0436 // Type conflicts with imported type Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.AreEqual(true, Csla.ApplicationContext.User.IsInRole("Admin")); #region "Pre Cloning Tests" //Is it denying read properly? Assert.AreEqual("[DenyReadOnProperty] Can't read property", root.DenyReadOnProperty, "Read should have been denied 1"); //Is it denying write properly? root.DenyWriteOnProperty = "DenyWriteOnProperty"; Assert.AreEqual("[DenyWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied 2"); //Is it denying both read and write properly? Assert.AreEqual("[DenyReadWriteOnProperty] Can't read property", root.DenyReadWriteOnProperty, "Read should have been denied 3"); root.DenyReadWriteOnProperty = "DenyReadWriteONproperty"; Assert.AreEqual("[DenyReadWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied 4"); //Is it allowing both read and write properly? Assert.AreEqual(root.AllowReadWriteOnProperty, root.Auth, "Read should have been allowed 5"); root.AllowReadWriteOnProperty = "No value"; Assert.AreEqual("No value", root.Auth, "Write should have been allowed 6"); #endregion #region "After Cloning Tests" //Do they work under cloning as well? DataPortal.DpRoot NewRoot = root.Clone(); ApplicationContext.GlobalContext.Clear(); //Is it denying read properly? Assert.AreEqual("[DenyReadOnProperty] Can't read property", NewRoot.DenyReadOnProperty, "Read should have been denied 7"); //Is it denying write properly? NewRoot.DenyWriteOnProperty = "DenyWriteOnProperty"; Assert.AreEqual("[DenyWriteOnProperty] Can't write variable", NewRoot.Auth, "Write should have been denied 8"); //Is it denying both read and write properly? Assert.AreEqual("[DenyReadWriteOnProperty] Can't read property", NewRoot.DenyReadWriteOnProperty, "Read should have been denied 9"); NewRoot.DenyReadWriteOnProperty = "DenyReadWriteONproperty"; Assert.AreEqual("[DenyReadWriteOnProperty] Can't write variable", NewRoot.Auth, "Write should have been denied 10"); //Is it allowing both read and write properly? Assert.AreEqual(NewRoot.AllowReadWriteOnProperty, NewRoot.Auth, "Read should have been allowed 11"); NewRoot.AllowReadWriteOnProperty = "AllowReadWriteOnProperty"; Assert.AreEqual("AllowReadWriteOnProperty", NewRoot.Auth, "Write should have been allowed 12"); #endregion #pragma warning disable CS0436 // Type conflicts with imported type Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type } [TestMethod()] public void TestAuthBeginEditRules() { ApplicationContext.GlobalContext.Clear(); #pragma warning disable CS0436 // Type conflicts with imported type Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.AreEqual(true, System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); root.Data = "Something new"; root.BeginEdit(); #region "Pre-Testing" root.Data = "Something new 1"; //Is it denying read properly? Assert.AreEqual("[DenyReadOnProperty] Can't read property", root.DenyReadOnProperty, "Read should have been denied"); //Is it denying write properly? root.DenyWriteOnProperty = "DenyWriteOnProperty"; Assert.AreEqual("[DenyWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it denying both read and write properly? Assert.AreEqual("[DenyReadWriteOnProperty] Can't read property", root.DenyReadWriteOnProperty, "Read should have been denied"); root.DenyReadWriteOnProperty = "DenyReadWriteONproperty"; Assert.AreEqual("[DenyReadWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it allowing both read and write properly? Assert.AreEqual(root.AllowReadWriteOnProperty, root.Auth, "Read should have been allowed"); root.AllowReadWriteOnProperty = "No value"; Assert.AreEqual("No value", root.Auth, "Write should have been allowed"); #endregion #region "Cancel Edit" //Cancel the edit and see if the authorization rules still work root.CancelEdit(); //Is it denying read properly? Assert.AreEqual("[DenyReadOnProperty] Can't read property", root.DenyReadOnProperty, "Read should have been denied"); //Is it denying write properly? root.DenyWriteOnProperty = "DenyWriteOnProperty"; Assert.AreEqual("[DenyWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it denying both read and write properly? Assert.AreEqual("[DenyReadWriteOnProperty] Can't read property", root.DenyReadWriteOnProperty, "Read should have been denied"); root.DenyReadWriteOnProperty = "DenyReadWriteONproperty"; Assert.AreEqual("[DenyReadWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it allowing both read and write properly? Assert.AreEqual(root.AllowReadWriteOnProperty, root.Auth, "Read should have been allowed"); root.AllowReadWriteOnProperty = "No value"; Assert.AreEqual("No value", root.Auth, "Write should have been allowed"); #endregion #region "Apply Edit" //Apply this edit and see if the authorization rules still work //Is it denying read properly? Assert.AreEqual("[DenyReadOnProperty] Can't read property", root.DenyReadOnProperty, "Read should have been denied"); //Is it denying write properly? root.DenyWriteOnProperty = "DenyWriteOnProperty"; Assert.AreEqual("[DenyWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it denying both read and write properly? Assert.AreEqual("[DenyReadWriteOnProperty] Can't read property", root.DenyReadWriteOnProperty, "Read should have been denied"); root.DenyReadWriteOnProperty = "DenyReadWriteONproperty"; Assert.AreEqual("[DenyReadWriteOnProperty] Can't write variable", root.Auth, "Write should have been denied"); //Is it allowing both read and write properly? Assert.AreEqual(root.AllowReadWriteOnProperty, root.Auth, "Read should have been allowed"); root.AllowReadWriteOnProperty = "No value"; Assert.AreEqual("No value", root.Auth, "Write should have been allowed"); #endregion #pragma warning disable CS0436 // Type conflicts with imported type Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type } [TestMethod()] public void TestAuthorizationAfterEditCycle() { Csla.ApplicationContext.GlobalContext.Clear(); Csla.Test.Security.PermissionsRoot pr = Csla.Test.Security.PermissionsRoot.NewPermissionsRoot(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type pr.FirstName = "something"; pr.BeginEdit(); pr.FirstName = "ba"; pr.CancelEdit(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type Csla.Test.Security.PermissionsRoot prClone = pr.Clone(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type prClone.FirstName = "somethiansdfasdf"; #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type } [ExpectedException(typeof(Csla.Security.SecurityException))] [TestMethod] public void TestUnauthorizedAccessToGet() { Csla.ApplicationContext.GlobalContext.Clear(); PermissionsRoot pr = PermissionsRoot.NewPermissionsRoot(); //this should throw an exception, since only admins have access to this property string something = pr.FirstName; } [ExpectedException(typeof(Csla.Security.SecurityException))] [TestMethod] public void TestUnauthorizedAccessToSet() { PermissionsRoot pr = PermissionsRoot.NewPermissionsRoot(); //will cause an exception, because only admins can write to property pr.FirstName = "test"; } [TestMethod] public void TestAuthorizedAccess() { Csla.ApplicationContext.GlobalContext.Clear(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type PermissionsRoot pr = PermissionsRoot.NewPermissionsRoot(); //should work, because we are now logged in as an admin pr.FirstName = "something"; string something = pr.FirstName; Assert.AreEqual(true, System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); //set to null so the other testmethods continue to throw exceptions #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.AreEqual(false, System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); } [TestMethod] public void TestAuthExecute() { Csla.ApplicationContext.GlobalContext.Clear(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type PermissionsRoot pr = PermissionsRoot.NewPermissionsRoot(); //should work, because we are now logged in as an admin pr.DoWork(); Assert.AreEqual(true, System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); //set to null so the other testmethods continue to throw exceptions #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.AreEqual(false, System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); } [TestMethod] [ExpectedException(typeof(Csla.Security.SecurityException))] public void TestUnAuthExecute() { Csla.ApplicationContext.GlobalContext.Clear(); Assert.AreEqual(false, Csla.ApplicationContext.User.IsInRole("Admin")); PermissionsRoot pr = PermissionsRoot.NewPermissionsRoot(); //should fail, because we're not an admin pr.DoWork(); } [TestMethod] public void TestAuthRuleSetsOnStaticHasPermissionMethodsWhenAddingAuthzRuleSetExplicitly() { var root = PermissionsRoot.NewPermissionsRoot(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.IsTrue(System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); Assert.IsFalse(System.Threading.Thread.CurrentPrincipal.IsInRole("User")); // implicit usage of ApplicationContext.RuleSet ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot))); ApplicationContext.RuleSet = "custom1"; Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot))); ApplicationContext.RuleSet = "custom2"; Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot))); ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; // directly specifying which ruleset to use Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot), ApplicationContext.DefaultRuleSet)); Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot), "custom1")); Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot), "custom2")); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type } [TestMethod] public void TestAuthRuleSetsOnStaticHasPermissionMethodsWhenAddingAuthzRuleSetUsingApplicationContextRuleSet() { var root = PermissionsRoot2.NewPermissionsRoot(); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogin(); #pragma warning restore CS0436 // Type conflicts with imported type Assert.IsTrue(System.Threading.Thread.CurrentPrincipal.IsInRole("Admin")); Assert.IsFalse(System.Threading.Thread.CurrentPrincipal.IsInRole("User")); //BusinessRules.AddRule(typeof(PermissionsRoot), new IsInRole(AuthorizationActions.DeleteObject, "User"), ApplicationContext.DefaultRuleSet); //BusinessRules.AddRule(typeof(PermissionsRoot), new IsInRole(AuthorizationActions.DeleteObject, "Admin"), "custom1"); //BusinessRules.AddRule(typeof(PermissionsRoot), new IsInRole(AuthorizationActions.DeleteObject, "User", "Admin"), "custom2"); // implicit usage of ApplicationContext.RuleSet ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2))); ApplicationContext.RuleSet = "custom1"; Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2))); ApplicationContext.RuleSet = "custom2"; Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2))); ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; // directly specifying which ruleset to use Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2), ApplicationContext.DefaultRuleSet)); Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2), "custom1")); Assert.IsTrue(BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(PermissionsRoot2), "custom2")); #pragma warning disable CS0436 // Type conflicts with imported type Csla.Test.Security.TestPrincipal.SimulateLogout(); #pragma warning restore CS0436 // Type conflicts with imported type } [TestMethod] public void TestAuthRulesCleanupAndAddAgainWhenExceptionIsThrownInAddObjectBusinessRules() { RootException.Counter = 0; ApplicationContext.RuleSet = ApplicationContext.DefaultRuleSet; // AddObjectAuthorizations should throw exception try { BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(RootException)); } catch (Exception ex) { Assert.IsInstanceOfType(ex, typeof(TargetInvocationException)); Assert.IsInstanceOfType(ex.InnerException, typeof(ArgumentException)); } // AddObjectAuthorizations should be called again and // should throw exception again try { BusinessRules.HasPermission(AuthorizationActions.DeleteObject, typeof(RootException)); } catch (Exception ex) { Assert.IsInstanceOfType(ex, typeof(TargetInvocationException)); Assert.IsInstanceOfType(ex.InnerException, typeof(ArgumentException)); } Assert.IsTrue(RootException.Counter == 2); } [TestMethod] public void AuthorizeRemoveFromList() { var root = new RootList(); root.RemoveAt(0); } [TestMethod] public void PerTypeAuthEditObject() { ApplicationContext.DataPortalActivator = new PerTypeAuthDPActivator(); try { Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.EditObject, typeof(PerTypeAuthRoot))); } finally { Csla.ApplicationContext.DataPortalActivator = null; } } [TestMethod] public void PerTypeAuthEditObjectViaInterface() { ApplicationContext.DataPortalActivator = new PerTypeAuthDPActivator(); try { Assert.IsFalse(BusinessRules.HasPermission(AuthorizationActions.EditObject, typeof(IPerTypeAuthRoot))); } finally { Csla.ApplicationContext.DataPortalActivator = null; } } } public class PerTypeAuthDPActivator : Server.IDataPortalActivator { public object CreateInstance(Type requestedType) { return Activator.CreateInstance(ResolveType(requestedType)); } public void FinalizeInstance(object obj) { } public void InitializeInstance(object obj) { } public Type ResolveType(Type requestedType) { if (requestedType.Equals(typeof(IPerTypeAuthRoot))) return typeof(PerTypeAuthRoot); else return requestedType; } } public interface IPerTypeAuthRoot { } [Serializable] public class PerTypeAuthRoot : BusinessBase<PerTypeAuthRoot>, IPerTypeAuthRoot { [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public static void AddObjectAuthorizationRules() { Csla.Rules.BusinessRules.AddRule( typeof(PerTypeAuthRoot), new Csla.Rules.CommonRules.IsInRole(Csla.Rules.AuthorizationActions.EditObject, "Test")); } } [Serializable] public class RootList : BusinessListBase<RootList, ChildItem> { public RootList() { using (SuppressListChangedEvents) { Add(Csla.DataPortal.CreateChild<ChildItem>()); } } } [Serializable] public class ChildItem : BusinessBase<ChildItem> { protected override void AddBusinessRules() { base.AddBusinessRules(); BusinessRules.AddRule(new NoAuth(AuthorizationActions.DeleteObject)); } private class NoAuth : Csla.Rules.AuthorizationRule { public NoAuth(AuthorizationActions action) : base(action) {} protected override void Execute(AuthorizationContext context) { context.HasPermission = false; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Linq; using System.Security.Principal; using Xunit; namespace System.Security.Claims { public class ClaimsPrincipalTests { [Fact] public void Ctor_Default() { var cp = new ClaimsPrincipal(); Assert.NotNull(cp.Identities); Assert.Equal(0, cp.Identities.Count()); Assert.NotNull(cp.Claims); Assert.Equal(0, cp.Claims.Count()); Assert.Null(cp.Identity); } [Fact] public void Ctor_IIdentity() { var id = new ClaimsIdentity( new List<Claim> { new Claim("claim_type", "claim_value") }, ""); var cp = new ClaimsPrincipal(id); Assert.NotNull(cp.Identities); Assert.Equal(1, cp.Identities.Count()); Assert.Same(id, cp.Identities.First()); Assert.Same(id, cp.Identity); Assert.NotNull(cp.Claims); Assert.Equal(1, cp.Claims.Count()); Assert.True(cp.Claims.Any(claim => claim.Type == "claim_type" && claim.Value == "claim_value")); } [Fact] public void Ctor_IIdentity_NonClaims() { var id = new NonClaimsIdentity() { Name = "NonClaimsIdentity_Name" }; var cp = new ClaimsPrincipal(id); Assert.NotNull(cp.Identities); Assert.Equal(1, cp.Identities.Count()); Assert.NotSame(id, cp.Identities.First()); Assert.NotSame(id, cp.Identity); Assert.Equal(id.Name, cp.Identity.Name); Assert.NotNull(cp.Claims); Assert.Equal(1, cp.Claims.Count()); Assert.True(cp.Claims.Any(claim => claim.Type == ClaimsIdentity.DefaultNameClaimType && claim.Value == "NonClaimsIdentity_Name")); } [Fact] public void Ctor_IPrincipal() { var baseId = new ClaimsIdentity( new List<Claim> { new Claim("claim_type", "claim_value") }, ""); var basePrincipal = new ClaimsPrincipal(); basePrincipal.AddIdentity(baseId); var cp = new ClaimsPrincipal(basePrincipal); Assert.NotNull(cp.Identities); Assert.Equal(1, cp.Identities.Count()); Assert.Same(baseId, cp.Identities.First()); Assert.Same(baseId, cp.Identity); Assert.NotNull(cp.Claims); Assert.Equal(1, cp.Claims.Count()); Assert.True(cp.Claims.Any(claim => claim.Type == "claim_type" && claim.Value == "claim_value"), "#7"); } [Fact] public void Ctor_NonClaimsIPrincipal_NonClaimsIdentity() { var id = new NonClaimsIdentity() { Name = "NonClaimsIdentity_Name" }; var basePrincipal = new NonClaimsPrincipal { Identity = id }; var cp = new ClaimsPrincipal(basePrincipal); Assert.NotNull(cp.Identities); Assert.Equal(1, cp.Identities.Count()); Assert.NotSame(id, cp.Identities.First()); Assert.NotSame(id, cp.Identity); Assert.Equal(id.Name, cp.Identity.Name); Assert.NotNull(cp.Claims); Assert.Equal(1, cp.Claims.Count()); Assert.True(cp.Claims.Any(claim => claim.Type == ClaimsIdentity.DefaultNameClaimType && claim.Value == "NonClaimsIdentity_Name")); } [Fact] public void Ctor_NonClaimsIPrincipal_NoIdentity() { var p = new ClaimsPrincipal(new NonClaimsPrincipal()); Assert.NotNull(p.Identities); Assert.Equal(1, p.Identities.Count()); Assert.NotNull(p.Claims); Assert.Equal(0, p.Claims.Count()); Assert.NotNull(p.Identity); Assert.False(p.Identity.IsAuthenticated); } [Fact] public void Ctor_IPrincipal_NoIdentity() { var cp = new ClaimsPrincipal(new ClaimsPrincipal()); Assert.NotNull(cp.Identities); Assert.Equal(0, cp.Identities.Count()); Assert.NotNull(cp.Claims); Assert.Equal(0, cp.Claims.Count()); Assert.Null(cp.Identity); } [Fact] public void Ctor_IPrincipal_MultipleIdentities() { var baseId1 = new ClaimsIdentity("baseId1"); var baseId2 = new GenericIdentity("generic_name", "baseId2"); var baseId3 = new ClaimsIdentity("customType"); var basePrincipal = new ClaimsPrincipal(baseId1); basePrincipal.AddIdentity(baseId2); basePrincipal.AddIdentity(baseId3); var cp = new ClaimsPrincipal(basePrincipal); Assert.NotNull(cp.Identities); Assert.Equal(3, cp.Identities.Count()); Assert.NotNull(cp.Claims); Assert.Equal(1, cp.Claims.Count()); Assert.Equal(baseId1, cp.Identity); Assert.True(cp.Claims.Any(claim => claim.Type == ClaimsIdentity.DefaultNameClaimType && claim.Value == "generic_name")); Assert.Equal(baseId2.Claims.First(), cp.Claims.First()); } [Fact] public void Ctor_IEnumerableClaimsIdentity_Empty() { var cp = new ClaimsPrincipal(new ClaimsIdentity[0]); Assert.NotNull(cp.Identities); Assert.Equal(0, cp.Identities.Count()); Assert.NotNull(cp.Claims); Assert.Equal(0, cp.Claims.Count()); Assert.Null(cp.Identity); } [Fact] public void Ctor_IEnumerableClaimsIdentity_Multiple() { var baseId1 = new ClaimsIdentity("baseId1"); var baseId2 = new GenericIdentity("generic_name2", "baseId2"); var baseId3 = new GenericIdentity("generic_name3", "baseId3"); var cp = new ClaimsPrincipal(new List<ClaimsIdentity> { baseId1, baseId2, baseId3 }); Assert.NotNull(cp.Identities); Assert.Equal(3, cp.Identities.Count()); Assert.NotNull(cp.Claims); Assert.Equal(2, cp.Claims.Count()); Assert.Equal(baseId1, cp.Identity); Assert.True(cp.Claims.Any(claim => claim.Type == ClaimsIdentity.DefaultNameClaimType && claim.Value == "generic_name2")); Assert.True(cp.Claims.Any(claim => claim.Type == ClaimsIdentity.DefaultNameClaimType && claim.Value == "generic_name3")); Assert.Equal(baseId2.Claims.First(), cp.Claims.First()); Assert.Equal(baseId3.Claims.Last(), cp.Claims.Last()); } [Fact] public void Ctor_ArgumentValidation() { AssertExtensions.Throws<ArgumentNullException>("identities", () => new ClaimsPrincipal((IEnumerable<ClaimsIdentity>)null)); AssertExtensions.Throws<ArgumentNullException>("identity", () => new ClaimsPrincipal((IIdentity)null)); AssertExtensions.Throws<ArgumentNullException>("principal", () => new ClaimsPrincipal((IPrincipal)null)); AssertExtensions.Throws<ArgumentNullException>("reader", () => new ClaimsPrincipal((BinaryReader)null)); } private class NonClaimsPrincipal : IPrincipal { public IIdentity Identity { get; set; } public bool IsInRole(string role) { throw new NotImplementedException(); } } } }
// **************************************************************** // This is free software licensed under the NUnit license. You // may obtain a copy of the license as well as information regarding // copyright ownership at http://nunit.org. // **************************************************************** using System; using System.Collections; using System.Xml; using System.Xml.Schema; using System.IO; using System.Threading; using NUnit.Core; namespace NUnit.Util { /// <summary> /// Class that represents an NUnit test project /// </summary> public class NUnitProject { #region Constants public static readonly string Extension = ".nunit"; #endregion #region Instance variables /// <summary> /// Path to the file storing this project /// </summary> private string projectPath; /// <summary> /// Application Base for the project. Since this /// can be null, always fetch from the property /// rather than using the field directly. /// </summary> private string basePath; /// <summary> /// Whether the project is dirty /// </summary> private bool isDirty = false; /// <summary> /// Whether canges have been made requiring a reload /// </summary> private bool reloadRequired = false; /// <summary> /// Collection of configs for the project /// </summary> private ProjectConfigCollection configs; /// <summary> /// True for NUnit-related projects that follow the config /// of the NUnit build under which they are running. /// </summary> private bool autoConfig; /// <summary> /// The currently active configuration /// </summary> private ProjectConfig activeConfig; /// <summary> /// Flag indicating that this project is a /// temporary wrapper for an assembly. /// </summary> private bool isAssemblyWrapper = false; /// <summary> /// The ProcessModel to be used in loading this project /// </summary> private ProcessModel processModel; /// <summary> /// The DomainUsage setting to be used in loading this project /// </summary> private DomainUsage domainUsage; #endregion #region Constructor public NUnitProject( string projectPath ) { this.projectPath = Path.GetFullPath( projectPath ); configs = new ProjectConfigCollection( this ); } #endregion #region Properties and Events /// <summary> /// The path to which a project will be saved. /// </summary> public string ProjectPath { get { return projectPath; } set { projectPath = Path.GetFullPath( value ); isDirty = true; } } public string DefaultBasePath { get { return Path.GetDirectoryName( projectPath ); } } /// <summary> /// Indicates whether a base path was specified for the project /// </summary> public bool BasePathSpecified { get { return basePath != null && basePath != string.Empty; } } /// <summary> /// The base path for the project. Constructor sets /// it to the directory part of the project path. /// </summary> public string BasePath { get { if ( !BasePathSpecified ) return DefaultBasePath; return basePath; } set { basePath = value; if (basePath != null && basePath != string.Empty && !Path.IsPathRooted(basePath)) { basePath = Path.Combine( DefaultBasePath, basePath); } basePath = PathUtils.Canonicalize(basePath); HasChangesRequiringReload = IsDirty = true; } } /// <summary> /// The name of the project. /// </summary> public string Name { get { return Path.GetFileNameWithoutExtension( projectPath ); } } public bool AutoConfig { get { return autoConfig; } set { autoConfig = value; } } public ProjectConfig ActiveConfig { get { // In case the previous active config was removed if ( activeConfig != null && !configs.Contains( activeConfig ) ) activeConfig = null; // In case no active config is set or it was removed if (activeConfig == null && configs.Count > 0) activeConfig = configs[0]; return activeConfig; } } // Safe access to name of the active config public string ActiveConfigName { get { ProjectConfig config = ActiveConfig; return config == null ? null : config.Name; } } public bool IsLoadable { get { return ActiveConfig != null && ActiveConfig.Assemblies.Count > 0; } } // A project made from a single assembly is treated // as a transparent wrapper for some purposes until // a change is made to it. public bool IsAssemblyWrapper { get { return isAssemblyWrapper; } set { isAssemblyWrapper = value; } } public string ConfigurationFile { get { // TODO: Check this return isAssemblyWrapper ? Path.GetFileName( projectPath ) + ".config" : Path.GetFileNameWithoutExtension( projectPath ) + ".config"; } } public bool IsDirty { get { return isDirty; } set { isDirty = value; if (isAssemblyWrapper && value == true) { projectPath = Path.ChangeExtension(projectPath, ".nunit"); isAssemblyWrapper = false; HasChangesRequiringReload = true; } } } public bool HasChangesRequiringReload { get { return reloadRequired; } set { reloadRequired = value; } } public ProcessModel ProcessModel { get { return processModel; } set { processModel = value; HasChangesRequiringReload = IsDirty = true; } } public DomainUsage DomainUsage { get { return domainUsage; } set { domainUsage = value; HasChangesRequiringReload = IsDirty = true; } } public ProjectConfigCollection Configs { get { return configs; } } #endregion #region Static Methods public static bool IsNUnitProjectFile(string path) { return Path.GetExtension(path) == Extension; } public static string ProjectPathFromFile(string path) { string fileName = Path.GetFileNameWithoutExtension(path) + NUnitProject.Extension; return Path.Combine(Path.GetDirectoryName(path), fileName); } #endregion #region Instance Methods public void SetActiveConfig( int index ) { activeConfig = configs[index]; HasChangesRequiringReload = IsDirty = true; } public void SetActiveConfig( string name ) { foreach( ProjectConfig config in configs ) { if ( config.Name == name ) { activeConfig = config; HasChangesRequiringReload = IsDirty = true; break; } } } public void Add( VSProject vsProject ) { foreach( VSProjectConfig vsConfig in vsProject.Configs ) { string name = vsConfig.Name; if ( !configs.Contains( name ) ) configs.Add( name ); ProjectConfig config = this.Configs[name]; foreach ( string assembly in vsConfig.Assemblies ) config.Assemblies.Add( assembly ); } } public void Load() { XmlTextReader reader = new XmlTextReader( projectPath ); string activeConfigName = null; ProjectConfig currentConfig = null; try { reader.MoveToContent(); if ( reader.NodeType != XmlNodeType.Element || reader.Name != "NUnitProject" ) throw new ProjectFormatException( "Invalid project format: <NUnitProject> expected.", reader.LineNumber, reader.LinePosition ); while( reader.Read() ) if ( reader.NodeType == XmlNodeType.Element ) switch( reader.Name ) { case "Settings": if ( reader.NodeType == XmlNodeType.Element ) { activeConfigName = reader.GetAttribute( "activeconfig" ); string autoConfig = reader.GetAttribute("autoconfig"); if (autoConfig != null) this.AutoConfig = autoConfig.ToLower() == "true"; if (this.AutoConfig) activeConfigName = NUnitConfiguration.BuildConfiguration; string appbase = reader.GetAttribute( "appbase" ); if ( appbase != null ) this.BasePath = appbase; string processModel = reader.GetAttribute("processModel"); if (processModel != null) this.ProcessModel = (ProcessModel)Enum.Parse(typeof(ProcessModel), processModel); string domainUsage = reader.GetAttribute("domainUsage"); if (domainUsage != null) this.DomainUsage = (DomainUsage)Enum.Parse(typeof(DomainUsage), domainUsage); } break; case "Config": if ( reader.NodeType == XmlNodeType.Element ) { string configName = reader.GetAttribute( "name" ); currentConfig = new ProjectConfig( configName ); currentConfig.BasePath = reader.GetAttribute( "appbase" ); currentConfig.ConfigurationFile = reader.GetAttribute( "configfile" ); string binpath = reader.GetAttribute( "binpath" ); currentConfig.PrivateBinPath = binpath; string type = reader.GetAttribute( "binpathtype" ); if ( type == null ) if ( binpath == null ) currentConfig.BinPathType = BinPathType.Auto; else currentConfig.BinPathType = BinPathType.Manual; else currentConfig.BinPathType = (BinPathType)Enum.Parse( typeof( BinPathType ), type, true ); string runtime = reader.GetAttribute("runtimeFramework"); if ( runtime != null ) currentConfig.RuntimeFramework = RuntimeFramework.Parse(runtime); Configs.Add(currentConfig); if ( configName == activeConfigName ) activeConfig = currentConfig; } else if ( reader.NodeType == XmlNodeType.EndElement ) currentConfig = null; break; case "assembly": if ( reader.NodeType == XmlNodeType.Element && currentConfig != null ) { string path = reader.GetAttribute( "path" ); currentConfig.Assemblies.Add( Path.Combine( currentConfig.BasePath, path ) ); } break; default: break; } this.IsDirty = false; this.reloadRequired = false; } catch( FileNotFoundException ) { throw; } catch( XmlException e ) { throw new ProjectFormatException( string.Format( "Invalid project format: {0}", e.Message ), e.LineNumber, e.LinePosition ); } catch( Exception e ) { throw new ProjectFormatException( string.Format( "Invalid project format: {0} Line {1}, Position {2}", e.Message, reader.LineNumber, reader.LinePosition ), reader.LineNumber, reader.LinePosition ); } finally { reader.Close(); } } public void Save() { projectPath = ProjectPathFromFile( projectPath ); XmlTextWriter writer = new XmlTextWriter( projectPath, System.Text.Encoding.UTF8 ); writer.Formatting = Formatting.Indented; writer.WriteStartElement( "NUnitProject" ); if ( configs.Count > 0 || this.BasePath != this.DefaultBasePath ) { writer.WriteStartElement( "Settings" ); if ( configs.Count > 0 ) writer.WriteAttributeString( "activeconfig", ActiveConfigName ); if ( this.BasePath != this.DefaultBasePath ) writer.WriteAttributeString( "appbase", this.BasePath ); if (this.AutoConfig) writer.WriteAttributeString("autoconfig", "true"); if (this.ProcessModel != ProcessModel.Default) writer.WriteAttributeString("processModel", this.ProcessModel.ToString()); if (this.DomainUsage != DomainUsage.Default) writer.WriteAttributeString("domainUsage", this.DomainUsage.ToString()); writer.WriteEndElement(); } foreach( ProjectConfig config in Configs ) { writer.WriteStartElement( "Config" ); writer.WriteAttributeString( "name", config.Name ); string appbase = config.BasePath; if ( !PathUtils.SamePathOrUnder( this.BasePath, appbase ) ) writer.WriteAttributeString( "appbase", appbase ); else if ( config.RelativeBasePath != null ) writer.WriteAttributeString( "appbase", config.RelativeBasePath ); string configFile = config.ConfigurationFile; if ( configFile != null && configFile != this.ConfigurationFile ) writer.WriteAttributeString( "configfile", config.ConfigurationFile ); if ( config.BinPathType == BinPathType.Manual ) writer.WriteAttributeString( "binpath", config.PrivateBinPath ); else writer.WriteAttributeString( "binpathtype", config.BinPathType.ToString() ); if (config.RuntimeFramework != null) writer.WriteAttributeString("runtimeFramework", config.RuntimeFramework.ToString()); foreach( string assembly in config.Assemblies ) { writer.WriteStartElement( "assembly" ); writer.WriteAttributeString( "path", PathUtils.RelativePath( config.BasePath, assembly ) ); writer.WriteEndElement(); } writer.WriteEndElement(); } writer.WriteEndElement(); writer.Close(); this.IsDirty = false; // Once we save a project, it's no longer // loaded as an assembly wrapper on reload. this.isAssemblyWrapper = false; } public void Save( string projectPath ) { this.ProjectPath = projectPath; Save(); } #endregion } }
using PholioVisualisation.Analysis.TrendMarkers; using PholioVisualisation.DataAccess; using PholioVisualisation.DataConstruction; using PholioVisualisation.Formatting; using PholioVisualisation.Parsers; using PholioVisualisation.PholioObjects; using PholioVisualisation.RequestParameters; using PholioVisualisation.ServiceActions; using PholioVisualisation.Services; using PholioVisualisation.ServicesWeb.Managers; using PholioVisualisation.ServicesWeb.Validations; using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; namespace PholioVisualisation.ServicesWeb.Controllers { /// <summary> /// Data generator controller /// </summary> [RoutePrefix("api")] public class DataController : DataBaseController { /// <summary> /// Get data for specific indicators in CSV format. /// </summary> /// <remarks>This service returns data in CSV not JSON format so the response will not be viewable on this page</remarks> /// <param name="indicator_ids">Comma separated list of indicator IDs [Maximum 100]</param> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_type_id">Parent area type ID</param> /// <param name="profile_id">Profile ID [optional]</param> /// <param name="parent_area_code">The parent area code [default is England]</param> /// <param name="category_area_code">Ignore this parameter</param> [HttpGet] [Route("all_data/csv/by_indicator_id")] public HttpResponseMessage GetDataFileForIndicatorList(string indicator_ids, int child_area_type_id, int parent_area_type_id, int profile_id = ProfileIds.Undefined, string parent_area_code = AreaCodes.England, string category_area_code = null) { var receivedParameters = new DataServicesParameters(DataServiceUse.AllDataFileForIndicatorList, child_area_type_id, parent_area_type_id, parent_area_code, string.Empty, string.Empty, indicator_ids, null, category_area_code, profile_id); if (!receivedParameters.IsValid()) { return new HttpResponseMessage(HttpStatusCode.BadRequest) { Content = receivedParameters.GetExceptionStringContentMessages() }; } try { var dataInternalServiceManager = new DataServicesManager(receivedParameters, GroupDataReader, ProfileReader, AreasReader); var exportParameters = dataInternalServiceManager.ExportParameters; var onDemandParameters = dataInternalServiceManager.OnDemandParameters; return GetOnDemandIndicatorDataResponse(AreasReader, exportParameters, onDemandParameters); } catch (Exception ex) { Log(ex); return new HttpResponseMessage(HttpStatusCode.InternalServerError) { Content = new StringContent(ex.Message) }; } } /// <summary> /// Get data for all the indicators in a profile group in CSV format /// </summary> /// <remarks>This service returns data in CSV not JSON format so the response will not be viewable on this page</remarks> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_type_id">Parent area type ID</param> /// <param name="group_id">Profile group ID</param> /// <param name="parent_area_code">The parent area code [default is England]</param> /// <param name="category_area_code">Ignore this parameter</param> [HttpGet] [Route("all_data/csv/by_group_id")] public HttpResponseMessage GetDataFileForGroup(int child_area_type_id, int parent_area_type_id, int group_id, string parent_area_code = AreaCodes.England, string category_area_code = null) { var receivedParameters = new DataServicesParameters(DataServiceUse.AllDataFileForGroup, child_area_type_id, parent_area_type_id, parent_area_code, "", "", null, null, category_area_code, null, group_id); if (!receivedParameters.IsValid()) { return new HttpResponseMessage(HttpStatusCode.BadRequest) { Content = receivedParameters.GetExceptionStringContentMessages() }; } try { var dataInternalServiceManager = new DataServicesManager(receivedParameters, GroupDataReader, ProfileReader, AreasReader); var exportParameters = dataInternalServiceManager.ExportParameters; var onDemandParameters = dataInternalServiceManager.OnDemandParameters; return GetOnDemandIndicatorDataResponse(AreasReader, exportParameters, onDemandParameters); } catch (Exception ex) { Log(ex); return new HttpResponseMessage(HttpStatusCode.InternalServerError) { Content = new StringContent(ex.Message) }; } } /// <summary> /// Get data for all the indicators in a profile in CSV format /// </summary> /// <remarks>This service returns data in CSV not JSON format so the response will not be viewable on this page</remarks> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_type_id">Parent area type ID</param> /// <param name="profile_id">Profile ID</param> /// <param name="parent_area_code">The parent area code [default is England]</param> /// <param name="category_area_code">Ignore this parameter</param> [HttpGet] [Route("all_data/csv/by_profile_id")] public HttpResponseMessage GetDataFileForProfile(int child_area_type_id, int parent_area_type_id, int profile_id, string parent_area_code = AreaCodes.England, string category_area_code = null) { var receivedParameters = new DataServicesParameters(DataServiceUse.AllDataFileForProfile, child_area_type_id, parent_area_type_id, parent_area_code, "", "", null, null, category_area_code, profile_id); if (!receivedParameters.IsValid()) { return new HttpResponseMessage(HttpStatusCode.BadRequest) { Content = receivedParameters.GetExceptionStringContentMessages() }; } try { var dataInternalServiceManager = new DataServicesManager(receivedParameters, GroupDataReader, ProfileReader, AreasReader); var exportParameters = dataInternalServiceManager.ExportParameters; var onDemandParameters = dataInternalServiceManager.OnDemandParameters; return GetOnDemandIndicatorDataResponse(AreasReader, exportParameters, onDemandParameters); } catch (Exception ex) { Log(ex); return new HttpResponseMessage(HttpStatusCode.InternalServerError) { Content = new StringContent(ex.Message) }; } } /// <summary> /// Get the most recent data for a profile group /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="group_id">Profile group ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("latest_data/all_indicators_in_profile_group_for_child_areas")] public IList<GroupRoot> GetGroupDataAtDataPoint(int profile_id, int group_id, int area_type_id, string parent_area_code) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_id.ToString()); nameValues.Add(ParameterNames.ProfileId, profile_id.ToString()); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); var parameters = new GroupDataAtDataPointParameters(nameValues); return new JsonBuilderGroupDataAtDataPoint(parameters).GetGroupRoots(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the most recent data for a list of indicator IDs /// </summary> /// <param name="area_type_id">Area type ID</param> /// <param name="indicator_ids">Comma separated list of indicator IDs</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="restrict_to_profile_ids">Comma separated list of profile IDs</param> [HttpGet] [Route("latest_data/specific_indicators_for_child_areas")] public IList<GroupRoot> GetGroupDataAtDataPoint(int area_type_id, string parent_area_code, string indicator_ids, int profile_id, string restrict_to_profile_ids = "") { try { var comparatorMap = GetComparatorMapForParentArea(area_type_id, parent_area_code); var builder = new GroupDataBuilderByIndicatorIds { IndicatorIds = new IntListStringParser(indicator_ids).IntList, ProfileId = profile_id, RestrictSearchProfileIds = GetProfileIds(restrict_to_profile_ids), ComparatorMap = comparatorMap, AreaCode = null, AreaTypeId = area_type_id }; return new JsonBuilderGroupDataAtDataPointBySearch(builder).GetGroupRoots(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the most recent data for a list of indicator IDs /// </summary> /// <param name="area_type_id">Area type ID</param> /// <param name="indicator_ids">Comma separated list of indicator IDs</param> /// <param name="area_code">Area code</param> /// <param name="restrict_to_profile_ids">Comma separated list of profile IDs</param> [HttpGet] [Route("latest_data/specific_indicators_for_single_area")] public IList<GroupRoot> GetGroupDataAtDataPointForSpecificArea(int area_type_id, string area_code, string indicator_ids, string restrict_to_profile_ids = "") { try { var comparatorMap = new ComparatorMapBuilder(area_type_id).ComparatorMap; var builder = new GroupDataBuilderByIndicatorIds { IndicatorIds = new IntListStringParser(indicator_ids).IntList, ProfileId = ProfileIds.Undefined, RestrictSearchProfileIds = GetProfileIds(restrict_to_profile_ids), ComparatorMap = comparatorMap, AreaCode = area_code, AreaTypeId = area_type_id }; return new JsonBuilderGroupDataAtDataPointBySearch(builder).GetGroupRoots(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get data values for a group for one specific area /// </summary> /// <remarks> /// Get CoreDataSet objects for every group root in a domain /// </remarks> /// <param name="area_code">Area code</param> /// <param name="area_type_id">Area type ID</param> /// <param name="profile_id">Profile ID</param> /// <param name="group_id">Group ID</param> [HttpGet] [Route("latest_data/all_indicators_in_profile_group_for_single_area")] public List<CoreDataSet> GetGroupDataAtDataPointOfSpecificAreas(string area_code, int area_type_id, int profile_id, int group_id) { try { IAreasReader areasReader = ReaderFactory.GetAreasReader(); IArea parentArea = AreaFactory.NewArea(areasReader, area_code); if (parentArea.IsCountry == false && parentArea is CategoryArea == false && parentArea is NearestNeighbourArea == false) { parentArea = areasReader.GetParentAreas(area_code).First(); } GroupData data = new GroupDataAtDataPointRepository().GetGroupDataProcessed(parentArea.Code, area_type_id, profile_id, group_id); IList<GroupRoot> roots = data.GroupRoots; var dataForArea = new List<CoreDataSet>(); foreach (GroupRoot groupRoot in roots) { CoreDataSet coreData; if (parentArea.IsCountry) { coreData = groupRoot.GetNationalGrouping().ComparatorData; } else { IList<CoreDataSet> dataList = groupRoot.Data; coreData = dataList.FirstOrDefault(x => x.AreaCode == area_code); } dataForArea.Add(coreData); } return dataForArea; } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get a list of the core data for all the areas within a parent area /// </summary> /// <param name="group_id">Profile group ID</param> /// <param name="area_type_id">Area type ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="comparator_id">Comparator ID</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="age_id">Age ID</param> /// <param name="profile_id">Profile ID</param> /// <param name="template_profile_id">ID of the profile to use as template if accessing search results</param> /// <param name="data_point_offset">Offset in years, quarters or months from the most recent time point [default is 0]</param> [HttpGet] [Route("latest_data/single_indicator_for_all_areas")] public IList<CoreDataSet> GetAreaValues(int group_id, int area_type_id, string parent_area_code, int comparator_id, int indicator_id, int sex_id, int age_id, int profile_id = ProfileIds.Undefined, int template_profile_id = ProfileIds.Undefined, int data_point_offset = 0) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_id.ToString()); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); nameValues.Add(ParameterNames.ComparatorId, comparator_id.ToString()); nameValues.Add(ParameterNames.IndicatorId, indicator_id.ToString()); nameValues.Add(ParameterNames.SexId, sex_id.ToString()); nameValues.Add(ParameterNames.AgeId, age_id.ToString()); nameValues.Add(ParameterNames.ProfileId, profile_id.ToString()); nameValues.Add(ParameterNames.TemplateProfileId, template_profile_id.ToString()); nameValues.Add(IndicatorStatsParameters.ParameterDataPointOffset, data_point_offset.ToString()); var parameters = new AreaValuesParameters(nameValues); return new JsonBuilderAreaValues(parameters).GetValues(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get an ordered list of indicators with core data for requested profiles groups and areas /// </summary> /// <param name="group_ids">Comma separated list of profile group IDs</param> /// <param name="area_type_id">Area type ID</param> /// <param name="area_codes">Comma separated list of area codes</param> /// <param name="comparator_area_codes">Comma separated list of comparator area codes</param> /// <param name="include_time_periods">Whether to include time periods in response [yes/no - no is default]</param> /// <param name="latest_data_only">Whether to include only the latest data [yes/no - no is default]</param> [HttpGet] [Route("latest_data/all_indicators_in_multiple_profile_groups_for_multiple_areas")] public Dictionary<int, Dictionary<string, IList<SimpleAreaData>>> GetAreaData(string group_ids, int area_type_id, string area_codes, string comparator_area_codes = null, string include_time_periods = null, string latest_data_only = null) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_ids); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); nameValues.Add(ParameterNames.AreaCode, area_codes); nameValues.Add(AreaDataParameters.ParameterComparatorAreaCodes, comparator_area_codes); nameValues.Add(AreaDataParameters.ParameterIncludeTimePeriods, include_time_periods); nameValues.Add(AreaDataParameters.ParameterLatestDataOnly, latest_data_only); var parameters = new AreaDataParameters(nameValues); return new JsonBuilderAreaData(parameters).GetAreaData(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the recent trends for every area under a parent area. /// </summary> /// <param name="parent_area_code">Parent area code</param> /// <param name="group_id">Profile group ID</param> /// <param name="area_type_id">Area type ID</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="age_id">Age ID</param> [HttpGet] [Route("recent_trends/for_child_areas")] public Dictionary<string, TrendMarkerResult> GetTrendMarkers(string parent_area_code, int group_id, int area_type_id, int indicator_id, int sex_id, int age_id) { try { // Create dependencies var trendMarkersProvider = new TrendMarkersProvider(ReaderFactory.GetTrendDataReader(), new TrendMarkerCalculator()); var areaListProvider = new FilteredChildAreaListProvider(ReaderFactory.GetAreasReader()); var singleGroupingProvider = GetSingleGroupingProvider(GroupDataReader); var groupMetadataList = GroupDataReader.GetGroupingMetadataList(new List<int> { group_id }); var profileId = groupMetadataList.First().ProfileId; return new TrendMarkersAction(areaListProvider, trendMarkersProvider, singleGroupingProvider) .GetTrendMarkers(parent_area_code, profileId, group_id, area_type_id, indicator_id, sex_id, age_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the trend data for a profile group /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="group_id">Profile group ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("trend_data/all_indicators_in_profile_group_for_child_areas")] public IList<TrendRoot> GetTrendData(int profile_id, int group_id, int area_type_id, string parent_area_code) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_id.ToString()); nameValues.Add(ParameterNames.ProfileId, profile_id.ToString()); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); var parameters = new TrendDataParameters(nameValues); return new JsonBuilderTrendData(parameters).GetTrendData(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the trend data for a list of indicator IDs /// </summary> /// <param name="area_type_id">Area type ID</param> /// <param name="indicator_ids">Comma separated list of indicator IDs</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="restrict_to_profile_ids">Comma separated list of profile IDs</param> [HttpGet] [Route("trend_data/specific_indicators_for_child_areas")] public IList<TrendRoot> GetTrendData(int area_type_id, string parent_area_code, string indicator_ids, string restrict_to_profile_ids = "") { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(DataParameters.ParameterIndicatorIds, indicator_ids); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); nameValues.Add(ParameterNames.ProfileId, ProfileIds.Search.ToString()); var parameters = new TrendDataBySearchParameters(nameValues); parameters.RestrictResultsToProfileIdList = GetProfileIds(restrict_to_profile_ids); return new JsonBuilderTrendDataBySearch(parameters).GetTrendData(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get all the most recently available category data /// </summary> /// <remarks> /// Used in the inequality tab of Fingertips /// </remarks> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="age_id">Age ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_data/by_category")] public PartitionDataForAllCategories GetMostRecentDataForAllCategories(int profile_id, string area_code, int indicator_id, int sex_id, int age_id, int area_type_id) { try { return new PartitionDataForAllCategoriesBuilder().GetPartitionData(profile_id, area_code, indicator_id, sex_id, age_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get all the most recently available data for all ages /// </summary> /// <remarks> /// Used in the inequality tab of Fingertips /// </remarks> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_data/by_age")] public PartitionDataForAllAges GetMostRecentDataForAllAges(int profile_id, string area_code, int indicator_id, int sex_id, int area_type_id) { try { return new PartitionDataForAllAgesBuilder().GetPartitionData(profile_id, area_code, indicator_id, sex_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get all the most recently available data for all sexes /// </summary> /// <remarks> /// Used in the inequality tab of Fingertips /// </remarks> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="age_id">Age ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_data/by_sex")] public PartitionDataForAllSexes GetMostRecentDataForAllSexes(int profile_id, string area_code, int indicator_id, int age_id, int area_type_id) { try { return new PartitionDataForAllSexesBuilder().GetPartitionData(profile_id, area_code, indicator_id, age_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get trend data partitioned by age /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_trend_data/by_age")] public PartitionTrendData TrendDataForInequalitiesByAge(int profile_id, string area_code, int indicator_id, int sex_id, int area_type_id) { try { return new PartitionDataForAllAgesBuilder().GetPartitionTrendData( profile_id, area_code, indicator_id, sex_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get trend data partitioned by sex /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="age_id">Age ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_trend_data/by_sex")] public PartitionTrendData TrendDataForInequalitiesBySex(int profile_id, string area_code, int indicator_id, int age_id, int area_type_id) { try { return new PartitionDataForAllSexesBuilder().GetPartitionTrendData( profile_id, area_code, indicator_id, age_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get trend data partitioned by category /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="age_id">Age ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="category_type_id">Category type ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_trend_data/by_category")] public PartitionTrendData TrendDataForInequalitiesByCategory(int profile_id, string area_code, int indicator_id, int age_id, int sex_id, int category_type_id, int area_type_id) { try { return new PartitionDataForAllCategoriesBuilder().GetPartitionTrendData(profile_id, area_code, indicator_id, age_id, sex_id, category_type_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get trend data partitioned by categories /// </summary> /// <param name="profile_id">Profile ID</param> /// <param name="area_code">Area code</param> /// <param name="indicator_id">Indicator ID</param> /// <param name="age_id">Age ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("partition_trend_data/by_categories")] public IList<PartitionTrendData> TrendDataForInequalitiesByCategories(int profile_id, string area_code, int indicator_id, int age_id, int sex_id, int area_type_id) { try { return new PartitionDataForAllCategoriesBuilder().GetPartitionTrendDataForAllCategories(profile_id, area_code, indicator_id, age_id, sex_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get certain details of the specified indicators. /// </summary> /// <remarks> /// The indicators are differentiated by age and sex where appropriate. Returns a list of group root summaries. /// These can optionally be limited to belonging to a specific profile. /// </remarks> /// <param name="indicator_ids">Comma separated list of indicator IDs</param> /// <param name="profile_id">Profile ID [optional]</param> [HttpGet] [Route("grouproot_summaries/by_indicator_id")] public IList<GroupRootSummary> GetGroupDataForProfile(string indicator_ids, int profile_id = ProfileIds.Undefined) { try { var summaries = new GroupRootSummaryBuilder(GroupDataReader) .BuildForIndicatorIds(new IntListStringParser(indicator_ids).IntList, profile_id) .ToList(); summaries.Sort(); return summaries; } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get summaries of a specific list of indicators. /// </summary> /// <remarks> /// The indicators are differentiated by age and sex where appropriate. Returns a list of group root summaries. /// </remarks> /// <param name="profile_id">Profile ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("grouproot_summaries/by_profile_id")] public IList<GroupRootSummary> GetGroupDataForProfile(int profile_id, int area_type_id) { try { var summaries = new GroupRootSummaryBuilder(GroupDataReader) .BuildForProfileAndAreaType(profile_id, area_type_id) .ToList(); summaries.Sort(); return summaries; } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get the confidence limits for a funnel plot /// </summary> /// <param name="comparator_value">Comparator value</param> /// <param name="population_min">Minimum population value</param> /// <param name="population_max">Maximum population value</param> /// <param name="unit_value">Unit value</param> /// <param name="year_range">Year range</param> [HttpGet] [Route("spc_for_dsr_limits")] public SpcForDsrLimitsResponseObject GetSpcForDsrLimits(double comparator_value = 0, double population_min = 0, double population_max = 0, double unit_value = 0, int year_range = 0) { try { return new SpcForDsrLimitsAction().GetResponse(comparator_value, population_min, population_max, unit_value, year_range); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get population data used in Profiles for population, ccg's and population without data in db /// </summary> /// <param name="area_code">Area code</param> /// <param name="area_type_id">Area type ID</param> /// <param name="data_point_offset">Time period offset from the data point (i.e. latest available time period) [Default is 0]</param> [HttpGet] [Route("quinary_population")] public Dictionary<string, object> GetQuinaryPopulation(string area_code, int area_type_id, int data_point_offset = 0) { try { return new QuinaryPopulationDataAction().GetPopulationOnly(area_code, area_type_id, data_point_offset); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Summary information for a GP practice /// </summary> /// <param name="area_code">Area code</param> /// <param name="area_type_id">Area type ID</param> /// <param name="data_point_offset">Time period offset from the data point (i.e. latest available time period) [Default is 0]</param> [HttpGet] [Route("quinary_population_summary")] public Dictionary<string, object> GetQuinaryPopulationSummary(string area_code, int area_type_id, int data_point_offset = 0) { try { return new QuinaryPopulationDataAction().GetSummaryOnly(area_code, area_type_id, data_point_offset); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get a list of minimum and maximum value limits for a group /// </summary> /// <remarks> /// Useful for setting limits on charts. /// </remarks> /// <param name="group_id">Group ID</param> /// <param name="area_type_id">Area type ID</param> /// <param name="parent_area_code">Parent area code</param> [HttpGet] [Route("value_limits")] public IList<Limits> GetValueLimits(int group_id = 0, int area_type_id = 0, string parent_area_code = null) { try { return new ValueLimitsAction().GetResponse(group_id, area_type_id, parent_area_code); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get a formatted time period /// </summary> /// <param name="year">Year</param> /// <param name="quarter">Quarter</param> /// <param name="month">Month</param> /// <param name="year_range">Year range</param> /// <param name="year_type_id">Year type ID</param> [HttpGet] [Route("time_period")] public string GetTimePeriod(int year, int quarter, int month, int year_range, int year_type_id) { try { return TimePeriodFormatter.GetTimePeriodString(new TimePeriod { Month = month, Quarter = quarter, Year = year, YearRange = year_range }, year_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get an ordered list of profile group roots for a profile /// </summary> /// <remarks> /// Group roots are returned without data. This service is used for find out the order of indicators within a group. /// </remarks> /// <param name="group_id">Profile group ID</param> /// <param name="area_type_id">Area type ID</param> [HttpGet] [Route("profile_group_roots")] public IList<GroupRoot> GetGroupRoots(int group_id, int area_type_id) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_id.ToString()); nameValues.Add(ParameterNames.AreaTypeId, area_type_id.ToString()); var parameters = new GroupRootsParameters(nameValues); return new JsonBuilderGroupRoots(parameters).GetGroupRoots(); } catch (Exception ex) { Log(ex); throw; } } [HttpGet] [Route("indicator_statistics")] [Obsolete("Deprecated, use indicator_statistics/by_indicator_id or indicator_statistics/by_profile_id instead")] public Dictionary<int, IndicatorStats> GetIndicatorStatistics(int child_area_type_id, string parent_area_code, int? profile_id = null, int? group_id = null, string indicator_ids = null, string restrict_to_profile_ids = "", int data_point_offset = 0) { // Deprecated in favour of more specific methods var groupId = group_id ?? 1; var profileId = profile_id ?? ProfileIds.Search; try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, groupId.ToString()); nameValues.Add(ParameterNames.ProfileId, profileId.ToString()); nameValues.Add(ParameterNames.AreaTypeId, child_area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); nameValues.Add(DataParameters.ParameterIndicatorIds, indicator_ids); nameValues.Add(IndicatorStatsParameters.ParameterDataPointOffset, data_point_offset.ToString()); var parameters = new IndicatorStatsParameters(nameValues); parameters.RestrictResultsToProfileIdList = GetProfileIds(restrict_to_profile_ids); return new JsonBuilderIndicatorStats(parameters).GetIndicatorStats(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get descriptive statistics (min, max, median, interquartile range) for the latest values for /// a list of indicators /// </summary> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="indicator_ids">Comma separated list of indicator IDs</param> /// <param name="restrict_to_profile_ids">Comma separated list of profile IDs</param> /// <param name="data_point_offset">Offset in years, quarters or months from the most recent time point [default is 0]</param> [HttpGet] [Route("indicator_statistics/by_indicator_id")] public Dictionary<int, IndicatorStats> GetIndicatorStatisticsByIndicatorId(int child_area_type_id, string parent_area_code, string indicator_ids = null, string restrict_to_profile_ids = "", int data_point_offset = 0) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, GroupIds.Search.ToString()); nameValues.Add(ParameterNames.ProfileId, ProfileIds.Search.ToString()); nameValues.Add(ParameterNames.AreaTypeId, child_area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); nameValues.Add(DataParameters.ParameterIndicatorIds, indicator_ids); nameValues.Add(IndicatorStatsParameters.ParameterDataPointOffset, data_point_offset.ToString()); var parameters = new IndicatorStatsParameters(nameValues); parameters.RestrictResultsToProfileIdList = GetProfileIds(restrict_to_profile_ids); return new JsonBuilderIndicatorStats(parameters).GetIndicatorStats(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get descriptive statistics (min, max, median, interquartile range) for the latest values for /// all the indicators in a profile group /// </summary> /// <param name="group_id">Profile group ID</param> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="profile_id">Profile ID</param> /// <param name="data_point_offset">Offset in years, quarters or months from the most recent time point [default is 0]</param> [HttpGet] [Route("indicator_statistics/by_profile_id")] public Dictionary<int, IndicatorStats> GetIndicatorStatisticsByProfileId(int child_area_type_id, string parent_area_code, int profile_id, int group_id, int data_point_offset = 0) { try { NameValueCollection nameValues = new NameValueCollection(); nameValues.Add(ParameterNames.GroupIds, group_id.ToString()); nameValues.Add(ParameterNames.ProfileId, profile_id.ToString()); nameValues.Add(ParameterNames.AreaTypeId, child_area_type_id.ToString()); nameValues.Add(ParameterNames.ParentAreaCode, parent_area_code); nameValues.Add(IndicatorStatsParameters.ParameterDataPointOffset, data_point_offset.ToString()); var parameters = new IndicatorStatsParameters(nameValues); return new JsonBuilderIndicatorStats(parameters).GetIndicatorStats(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Get descriptive statistics (min, max, median, interquartile range) for the values of /// a specific indicator for each available time period /// </summary> /// <param name="indicator_id">Indicator ID</param> /// <param name="sex_id">Sex ID</param> /// <param name="age_id">Age ID</param> /// <param name="child_area_type_id">Child area type ID</param> /// <param name="parent_area_code">Parent area code</param> /// <param name="profile_id">Profile ID [optional]</param> [HttpGet] [Route("indicator_statistics/trends_for_single_indicator")] public IList<IndicatorStats> GetIndicatorStatisticsTrendsForIndicator(int indicator_id, int sex_id, int age_id, int child_area_type_id, string parent_area_code, int profile_id = ProfileIds.Undefined) { try { var groupingDifferentiator = new GroupingDifferentiator { IndicatorId = indicator_id, SexId = sex_id, AgeId = age_id }; var parentArea = new ParentArea(parent_area_code, child_area_type_id); var indicatorMetadata = IndicatorMetadataProvider.Instance.GetIndicatorMetadata(indicator_id); var singleGroupingProvider = GetSingleGroupingProvider(GroupDataReader); return new BoxPlotPointListBuilder(singleGroupingProvider) .GetBoxPlotPoints(groupingDifferentiator, parentArea, profile_id, indicatorMetadata); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Returns the available data for each combination of indicator ID and area type ID. /// </summary> /// <remarks>If only the indicator ID is specified then the results will be returned /// for every area type for which data is available. /// </remarks> /// <param name="indicator_id">Filter by indicator ID</param> /// <param name="area_type_id">Filter by area type ID [optional]</param> [HttpGet] [Route("available_data")] public IList<GroupingData> GetAvailableDataForGrouping(int? indicator_id = null, int? area_type_id = null) { try { return GroupDataReader.GetAvailableDataByIndicatorIdAndAreaTypeId(indicator_id, area_type_id); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Returns the time and person deleted and uploaded data for a given indicator /// </summary> /// <param name="indicator_id">Indicator ID</param> [HttpGet] [Route("data_changes")] public DataChange GetDataChanges(int indicator_id) { try { var dataChange = new AuditProvider().GetLatestAuditData(indicator_id); if (dataChange != null) { // Only serialise user names in test environment dataChange.ShouldSerializeUserNames = !ApplicationConfiguration.Instance.IsEnvironmentLive; } return dataChange; } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Returns the list of all ages /// </summary> [HttpGet] [Route("all_ages")] public IList<Age> GetAllAges() { try { var reader = ReaderFactory.GetPholioReader(); return reader.GetAllAges(); } catch (Exception ex) { Log(ex); throw; } } /// <summary> /// Remove when have DI framework /// </summary> private static SingleGroupingProvider GetSingleGroupingProvider(IGroupDataReader groupDataReader) { var groupIdProvider = new GroupIdProvider(ReaderFactory.GetProfileReader()); var singleGroupingProvider = new SingleGroupingProvider(groupDataReader, groupIdProvider); return singleGroupingProvider; } public static ComparatorMap GetComparatorMapForParentArea(int area_type_id, string parent_area_code) { var parentArea = new ParentArea(parent_area_code, area_type_id); return new ComparatorMapBuilder(parentArea).ComparatorMap; } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:2.0.50727.42 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ // // This source code was auto-generated by wsdl, Version=2.0.50727.42. // namespace WebsitePanel.EnterpriseServer { using System.Diagnostics; using System.Web.Services; using System.ComponentModel; using System.Web.Services.Protocols; using System; using System.Xml.Serialization; using WebsitePanel.Providers; /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Web.Services.WebServiceBindingAttribute(Name = "esImportSoap", Namespace = "http://smbsaas/websitepanel/enterpriseserver")] public partial class esImport : Microsoft.Web.Services3.WebServicesClientProtocol { private System.Threading.SendOrPostCallback GetImportableItemTypesOperationCompleted; private System.Threading.SendOrPostCallback GetImportableItemsOperationCompleted; private System.Threading.SendOrPostCallback ImportItemsOperationCompleted; /// <remarks/> public esImport() { this.Url = "http://localhost/EnterpriseServer/esImport.asmx"; } /// <remarks/> public event GetImportableItemTypesCompletedEventHandler GetImportableItemTypesCompleted; /// <remarks/> public event GetImportableItemsCompletedEventHandler GetImportableItemsCompleted; /// <remarks/> public event ImportItemsCompletedEventHandler ImportItemsCompleted; /// <remarks/> [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/enterpriseserver/GetImportableItemTypes", RequestNamespace = "http://smbsaas/websitepanel/enterpriseserver", ResponseNamespace = "http://smbsaas/websitepanel/enterpriseserver", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public ServiceProviderItemType[] GetImportableItemTypes(int packageId) { object[] results = this.Invoke("GetImportableItemTypes", new object[] { packageId}); return ((ServiceProviderItemType[])(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetImportableItemTypes(int packageId, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetImportableItemTypes", new object[] { packageId}, callback, asyncState); } /// <remarks/> public ServiceProviderItemType[] EndGetImportableItemTypes(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((ServiceProviderItemType[])(results[0])); } /// <remarks/> public void GetImportableItemTypesAsync(int packageId) { this.GetImportableItemTypesAsync(packageId, null); } /// <remarks/> public void GetImportableItemTypesAsync(int packageId, object userState) { if ((this.GetImportableItemTypesOperationCompleted == null)) { this.GetImportableItemTypesOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetImportableItemTypesOperationCompleted); } this.InvokeAsync("GetImportableItemTypes", new object[] { packageId}, this.GetImportableItemTypesOperationCompleted, userState); } private void OnGetImportableItemTypesOperationCompleted(object arg) { if ((this.GetImportableItemTypesCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetImportableItemTypesCompleted(this, new GetImportableItemTypesCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/enterpriseserver/GetImportableItems", RequestNamespace = "http://smbsaas/websitepanel/enterpriseserver", ResponseNamespace = "http://smbsaas/websitepanel/enterpriseserver", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public string[] GetImportableItems(int packageId, int itemTypeId) { object[] results = this.Invoke("GetImportableItems", new object[] { packageId, itemTypeId}); return ((string[])(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetImportableItems(int packageId, int itemTypeId, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetImportableItems", new object[] { packageId, itemTypeId}, callback, asyncState); } /// <remarks/> public string[] EndGetImportableItems(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((string[])(results[0])); } /// <remarks/> public void GetImportableItemsAsync(int packageId, int itemTypeId) { this.GetImportableItemsAsync(packageId, itemTypeId, null); } /// <remarks/> public void GetImportableItemsAsync(int packageId, int itemTypeId, object userState) { if ((this.GetImportableItemsOperationCompleted == null)) { this.GetImportableItemsOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetImportableItemsOperationCompleted); } this.InvokeAsync("GetImportableItems", new object[] { packageId, itemTypeId}, this.GetImportableItemsOperationCompleted, userState); } private void OnGetImportableItemsOperationCompleted(object arg) { if ((this.GetImportableItemsCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetImportableItemsCompleted(this, new GetImportableItemsCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/enterpriseserver/ImportItems", RequestNamespace = "http://smbsaas/websitepanel/enterpriseserver", ResponseNamespace = "http://smbsaas/websitepanel/enterpriseserver", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public int ImportItems(bool async, string taskId, int packageId, string[] items) { object[] results = this.Invoke("ImportItems", new object[] { async, taskId, packageId, items}); return ((int)(results[0])); } /// <remarks/> public System.IAsyncResult BeginImportItems(bool async, string taskId, int packageId, string[] items, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ImportItems", new object[] { async, taskId, packageId, items}, callback, asyncState); } /// <remarks/> public int EndImportItems(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((int)(results[0])); } /// <remarks/> public void ImportItemsAsync(bool async, string taskId, int packageId, string[] items) { this.ImportItemsAsync(async, taskId, packageId, items, null); } /// <remarks/> public void ImportItemsAsync(bool async, string taskId, int packageId, string[] items, object userState) { if ((this.ImportItemsOperationCompleted == null)) { this.ImportItemsOperationCompleted = new System.Threading.SendOrPostCallback(this.OnImportItemsOperationCompleted); } this.InvokeAsync("ImportItems", new object[] { async, taskId, packageId, items}, this.ImportItemsOperationCompleted, userState); } private void OnImportItemsOperationCompleted(object arg) { if ((this.ImportItemsCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ImportItemsCompleted(this, new ImportItemsCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> public new void CancelAsync(object userState) { base.CancelAsync(userState); } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetImportableItemTypesCompletedEventHandler(object sender, GetImportableItemTypesCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetImportableItemTypesCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetImportableItemTypesCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public ServiceProviderItemType[] Result { get { this.RaiseExceptionIfNecessary(); return ((ServiceProviderItemType[])(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetImportableItemsCompletedEventHandler(object sender, GetImportableItemsCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetImportableItemsCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetImportableItemsCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public string[] Result { get { this.RaiseExceptionIfNecessary(); return ((string[])(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ImportItemsCompletedEventHandler(object sender, ImportItemsCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class ImportItemsCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal ImportItemsCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public int Result { get { this.RaiseExceptionIfNecessary(); return ((int)(this.results[0])); } } } }
//#define TRACE_SERIALIZATION using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; using System.Runtime.Serialization; using System.Text; using Orleans.CodeGeneration; using Orleans.GrainDirectory; using Orleans.Runtime; namespace Orleans.Serialization { /// <summary> /// Reader for Orleans binary token streams /// </summary> public class BinaryTokenStreamReader { private readonly IList<ArraySegment<byte>> buffers; private int currentSegmentIndex; private ArraySegment<byte> currentSegment; private byte[] currentBuffer; private int currentOffset; private int totalProcessedBytes; private readonly int totalLength; private static readonly ArraySegment<byte> emptySegment = new ArraySegment<byte>(new byte[0]); /// <summary> /// Create a new BinaryTokenStreamReader to read from the specified input byte array. /// </summary> /// <param name="input">Input binary data to be tokenized.</param> public BinaryTokenStreamReader(byte[] input) : this(new List<ArraySegment<byte>> { new ArraySegment<byte>(input) }) { } /// <summary> /// Create a new BinaryTokenStreamReader to read from the specified input buffers. /// </summary> /// <param name="buffs">The list of ArraySegments to use for the data.</param> public BinaryTokenStreamReader(IList<ArraySegment<byte>> buffs) { buffers = buffs; totalProcessedBytes = 0; currentSegmentIndex = 0; currentSegment = buffs[0]; currentBuffer = currentSegment.Array; currentOffset = currentSegment.Offset; totalLength = buffs.Sum(b => b.Count); Trace("Starting new stream reader"); } /// <summary> /// Create a new BinaryTokenStreamReader to read from the specified input buffer. /// </summary> /// <param name="buff">ArraySegment to use for the data.</param> public BinaryTokenStreamReader(ArraySegment<byte> buff) : this(new[] { buff }) { } /// <summary> Current read position in the stream. </summary> public int CurrentPosition { get { return currentOffset + totalProcessedBytes - currentSegment.Offset; } } /// <summary> /// Creates a copy of the current stream reader. /// </summary> /// <returns>The new copy</returns> public BinaryTokenStreamReader Copy() { return new BinaryTokenStreamReader(this.buffers); } private void StartNextSegment() { totalProcessedBytes += currentSegment.Count; currentSegmentIndex++; if (currentSegmentIndex < buffers.Count) { currentSegment = buffers[currentSegmentIndex]; currentBuffer = currentSegment.Array; currentOffset = currentSegment.Offset; } else { currentSegment = emptySegment; currentBuffer = null; currentOffset = 0; } } private ArraySegment<byte> CheckLength(int n) { bool ignore; return CheckLength(n, out ignore); } private ArraySegment<byte> CheckLength(int n, out bool safeToUse) { safeToUse = false; if (n == 0) { safeToUse = true; return emptySegment; } if ((CurrentPosition + n > totalLength)) { throw new SerializationException( String.Format("Attempt to read past the end of the input stream: CurrentPosition={0}, n={1}, totalLength={2}", CurrentPosition, n, totalLength)); } if (currentSegmentIndex >= buffers.Count) { throw new SerializationException( String.Format("Attempt to read past buffers.Count: currentSegmentIndex={0}, buffers.Count={1}.", currentSegmentIndex, buffers.Count)); } if (currentOffset == currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } if (currentOffset + n <= currentSegment.Offset + currentSegment.Count) { var result = new ArraySegment<byte>(currentBuffer, currentOffset, n); currentOffset += n; if (currentOffset >= currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } return result; } var temp = new byte[n]; var i = 0; while (i < n) { var bytesFromThisBuffer = Math.Min(currentSegment.Offset + currentSegment.Count - currentOffset, n - i); Buffer.BlockCopy(currentBuffer, currentOffset, temp, i, bytesFromThisBuffer); i += bytesFromThisBuffer; currentOffset += bytesFromThisBuffer; if (currentOffset >= currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } } safeToUse = true; return new ArraySegment<byte>(temp); } /// <summary> Read an <c>Int32</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public int ReadInt() { var buff = CheckLength(sizeof(int)); var val = BitConverter.ToInt32(buff.Array, buff.Offset); Trace("--Read int {0}", val); return val; } /// <summary> Read an <c>UInt32</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public uint ReadUInt() { var buff = CheckLength(sizeof(uint)); var val = BitConverter.ToUInt32(buff.Array, buff.Offset); Trace("--Read uint {0}", val); return val; } /// <summary> Read an <c>Int16</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public short ReadShort() { var buff = CheckLength(sizeof(short)); var val = BitConverter.ToInt16(buff.Array, buff.Offset); Trace("--Read short {0}", val); return val; } /// <summary> Read an <c>UInt16</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public ushort ReadUShort() { var buff = CheckLength(sizeof(ushort)); var val = BitConverter.ToUInt16(buff.Array, buff.Offset); Trace("--Read ushort {0}", val); return val; } /// <summary> Read an <c>Int64</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public long ReadLong() { var buff = CheckLength(sizeof(long)); var val = BitConverter.ToInt64(buff.Array, buff.Offset); Trace("--Read long {0}", val); return val; } /// <summary> Read an <c>UInt64</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public ulong ReadULong() { var buff = CheckLength(sizeof(ulong)); var val = BitConverter.ToUInt64(buff.Array, buff.Offset); Trace("--Read ulong {0}", val); return val; } /// <summary> Read an <c>float</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public float ReadFloat() { var buff = CheckLength(sizeof(float)); var val = BitConverter.ToSingle(buff.Array, buff.Offset); Trace("--Read float {0}", val); return val; } /// <summary> Read an <c>double</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public double ReadDouble() { var buff = CheckLength(sizeof(double)); var val = BitConverter.ToDouble(buff.Array, buff.Offset); Trace("--Read double {0}", val); return val; } /// <summary> Read an <c>decimal</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public decimal ReadDecimal() { var buff = CheckLength(4 * sizeof(int)); var raw = new int[4]; Trace("--Read decimal"); var n = buff.Offset; for (var i = 0; i < 4; i++) { raw[i] = BitConverter.ToInt32(buff.Array, n); n += sizeof(int); } return new decimal(raw); } /// <summary> Read an <c>string</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public string ReadString() { var n = ReadInt(); if (n == 0) { Trace("--Read empty string"); return String.Empty; } string s = null; // a length of -1 indicates that the string is null. if (-1 != n) { var buff = CheckLength(n); s = Encoding.UTF8.GetString(buff.Array, buff.Offset, n); } Trace("--Read string '{0}'", s); return s; } /// <summary> Read the next bytes from the stream. </summary> /// <param name="count">Number of bytes to read.</param> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public byte[] ReadBytes(int count) { if (count == 0) { return new byte[0]; } bool safeToUse; var buff = CheckLength(count, out safeToUse); Trace("--Read byte array of length {0}", count); if (!safeToUse) { var result = new byte[count]; Array.Copy(buff.Array, buff.Offset, result, 0, count); return result; } else { return buff.Array; } } /// <summary> Read the next bytes from the stream. </summary> /// <param name="destination">Output array to store the returned data in.</param> /// <param name="offset">Offset into the destination array to write to.</param> /// <param name="count">Number of bytes to read.</param> public void ReadByteArray(byte[] destination, int offset, int count) { if (offset + count > destination.Length) { throw new ArgumentOutOfRangeException("count", "Reading into an array that is too small"); } var buff = CheckLength(count); Buffer.BlockCopy(buff.Array, buff.Offset, destination, offset, count); } /// <summary> Read an <c>char</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public char ReadChar() { Trace("--Read char"); return Convert.ToChar(ReadShort()); } /// <summary> Read an <c>byte</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public byte ReadByte() { var buff = CheckLength(1); Trace("--Read byte"); return buff.Array[buff.Offset]; } /// <summary> Read an <c>sbyte</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public sbyte ReadSByte() { var buff = CheckLength(1); Trace("--Read sbyte"); return unchecked((sbyte)(buff.Array[buff.Offset])); } /// <summary> Read an <c>IPAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public IPAddress ReadIPAddress() { var buff = CheckLength(16); bool v4 = true; for (var i = 0; i < 12; i++) { if (buff.Array[buff.Offset + i] != 0) { v4 = false; break; } } if (v4) { var v4Bytes = new byte[4]; for (var i = 0; i < 4; i++) { v4Bytes[i] = buff.Array[buff.Offset + 12 + i]; } return new IPAddress(v4Bytes); } else { var v6Bytes = new byte[16]; for (var i = 0; i < 16; i++) { v6Bytes[i] = buff.Array[buff.Offset + i]; } return new IPAddress(v6Bytes); } } /// <summary> Read an <c>IPEndPoint</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public IPEndPoint ReadIPEndPoint() { var addr = ReadIPAddress(); var port = ReadInt(); return new IPEndPoint(addr, port); } /// <summary> Read an <c>SiloAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public SiloAddress ReadSiloAddress() { var ep = ReadIPEndPoint(); var gen = ReadInt(); return SiloAddress.New(ep, gen); } /// <summary> Read an <c>GrainId</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal GrainId ReadGrainId() { UniqueKey key = ReadUniqueKey(); return GrainId.GetGrainId(key); } /// <summary> Read an <c>ActivationId</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal ActivationId ReadActivationId() { UniqueKey key = ReadUniqueKey(); return ActivationId.GetActivationId(key); } internal UniqueKey ReadUniqueKey() { ulong n0 = ReadULong(); ulong n1 = ReadULong(); ulong typeCodeData = ReadULong(); string keyExt = ReadString(); return UniqueKey.NewKey(n0, n1, typeCodeData, keyExt); } internal Guid ReadGuid() { byte[] bytes = ReadBytes(16); return new Guid(bytes); } internal MultiClusterStatus ReadMultiClusterStatus() { byte val = ReadByte(); return (MultiClusterStatus) val; } /// <summary> Read an <c>ActivationAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal ActivationAddress ReadActivationAddress() { var silo = ReadSiloAddress(); var grain = ReadGrainId(); var act = ReadActivationId(); var mcstatus = ReadMultiClusterStatus(); if (silo.Equals(SiloAddress.Zero)) silo = null; if (act.Equals(ActivationId.Zero)) act = null; return ActivationAddress.GetAddress(silo, grain, act, mcstatus); } /// <summary> /// Read a block of data into the specified output <c>Array</c>. /// </summary> /// <param name="array">Array to output the data to.</param> /// <param name="n">Number of bytes to read.</param> public void ReadBlockInto(Array array, int n) { var buff = CheckLength(n); Buffer.BlockCopy(buff.Array, buff.Offset, array, 0, n); Trace("--Read block of {0} bytes", n); } /// <summary> /// Peek at the next token in this input stream. /// </summary> /// <returns>Next token thatr will be read from the stream.</returns> internal SerializationTokenType PeekToken() { if (currentOffset == currentSegment.Count + currentSegment.Offset) StartNextSegment(); return (SerializationTokenType)currentBuffer[currentOffset]; } /// <summary> Read a <c>SerializationTokenType</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal SerializationTokenType ReadToken() { var buff = CheckLength(1); Trace("--Read token {0}", (SerializationTokenType)buff.Array[buff.Offset]); return (SerializationTokenType)buff.Array[buff.Offset]; } internal bool TryReadSimpleType(out object result, out SerializationTokenType token) { token = ReadToken(); byte[] bytes; switch (token) { case SerializationTokenType.True: result = true; break; case SerializationTokenType.False: result = false; break; case SerializationTokenType.Null: result = null; break; case SerializationTokenType.Object: result = new object(); break; case SerializationTokenType.Int: result = ReadInt(); break; case SerializationTokenType.Uint: result = ReadUInt(); break; case SerializationTokenType.Short: result = ReadShort(); break; case SerializationTokenType.Ushort: result = ReadUShort(); break; case SerializationTokenType.Long: result = ReadLong(); break; case SerializationTokenType.Ulong: result = ReadULong(); break; case SerializationTokenType.Byte: result = ReadByte(); break; case SerializationTokenType.Sbyte: result = ReadSByte(); break; case SerializationTokenType.Float: result = ReadFloat(); break; case SerializationTokenType.Double: result = ReadDouble(); break; case SerializationTokenType.Decimal: result = ReadDecimal(); break; case SerializationTokenType.String: result = ReadString(); break; case SerializationTokenType.Character: result = ReadChar(); break; case SerializationTokenType.Guid: bytes = ReadBytes(16); result = new Guid(bytes); break; case SerializationTokenType.Date: result = DateTime.FromBinary(ReadLong()); break; case SerializationTokenType.TimeSpan: result = new TimeSpan(ReadLong()); break; case SerializationTokenType.GrainId: result = ReadGrainId(); break; case SerializationTokenType.ActivationId: result = ReadActivationId(); break; case SerializationTokenType.SiloAddress: result = ReadSiloAddress(); break; case SerializationTokenType.ActivationAddress: result = ReadActivationAddress(); break; case SerializationTokenType.IpAddress: result = ReadIPAddress(); break; case SerializationTokenType.IpEndPoint: result = ReadIPEndPoint(); break; case SerializationTokenType.CorrelationId: result = new CorrelationId(ReadBytes(CorrelationId.SIZE_BYTES)); break; default: result = null; return false; } return true; } /// <summary> Read a <c>Type</c> value from the stream. </summary> /// <param name="expected">Expected Type, if known.</param> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public Type ReadFullTypeHeader(Type expected = null) { var token = ReadToken(); if (token == SerializationTokenType.ExpectedType) { return expected; } var t = CheckSpecialTypeCode(token); if (t != null) { return t; } if (token == SerializationTokenType.SpecifiedType) { #if TRACE_SERIALIZATION var tt = ReadSpecifiedTypeHeader(); Trace("--Read specified type header for type {0}", tt); return tt; #else return ReadSpecifiedTypeHeader(); #endif } throw new SerializationException("Invalid '" + token + "'token in input stream where full type header is expected"); } internal static Type CheckSpecialTypeCode(SerializationTokenType token) { switch (token) { case SerializationTokenType.Boolean: return typeof(bool); case SerializationTokenType.Int: return typeof(int); case SerializationTokenType.Short: return typeof(short); case SerializationTokenType.Long: return typeof(long); case SerializationTokenType.Sbyte: return typeof(sbyte); case SerializationTokenType.Uint: return typeof(uint); case SerializationTokenType.Ushort: return typeof(ushort); case SerializationTokenType.Ulong: return typeof(ulong); case SerializationTokenType.Byte: return typeof(byte); case SerializationTokenType.Float: return typeof(float); case SerializationTokenType.Double: return typeof(double); case SerializationTokenType.Decimal: return typeof(decimal); case SerializationTokenType.String: return typeof(string); case SerializationTokenType.Character: return typeof(char); case SerializationTokenType.Guid: return typeof(Guid); case SerializationTokenType.Date: return typeof(DateTime); case SerializationTokenType.TimeSpan: return typeof(TimeSpan); case SerializationTokenType.IpAddress: return typeof(IPAddress); case SerializationTokenType.IpEndPoint: return typeof(IPEndPoint); case SerializationTokenType.GrainId: return typeof(GrainId); case SerializationTokenType.ActivationId: return typeof(ActivationId); case SerializationTokenType.SiloAddress: return typeof(SiloAddress); case SerializationTokenType.ActivationAddress: return typeof(ActivationAddress); case SerializationTokenType.CorrelationId: return typeof(CorrelationId); #if false // Note: not yet implemented as simple types on the Writer side case SerializationTokenType.Object: return typeof(Object); case SerializationTokenType.ByteArray: return typeof(byte[]); case SerializationTokenType.ShortArray: return typeof(short[]); case SerializationTokenType.IntArray: return typeof(int[]); case SerializationTokenType.LongArray: return typeof(long[]); case SerializationTokenType.UShortArray: return typeof(ushort[]); case SerializationTokenType.UIntArray: return typeof(uint[]); case SerializationTokenType.ULongArray: return typeof(ulong[]); case SerializationTokenType.FloatArray: return typeof(float[]); case SerializationTokenType.DoubleArray: return typeof(double[]); case SerializationTokenType.CharArray: return typeof(char[]); case SerializationTokenType.BoolArray: return typeof(bool[]); #endif default: break; } return null; } /// <summary> Read a <c>Type</c> value from the stream. </summary> internal Type ReadSpecifiedTypeHeader() { // Assumes that the SpecifiedType token has already been read var token = ReadToken(); switch (token) { case SerializationTokenType.Boolean: return typeof(bool); case SerializationTokenType.Int: return typeof(int); case SerializationTokenType.Short: return typeof(short); case SerializationTokenType.Long: return typeof(long); case SerializationTokenType.Sbyte: return typeof(sbyte); case SerializationTokenType.Uint: return typeof(uint); case SerializationTokenType.Ushort: return typeof(ushort); case SerializationTokenType.Ulong: return typeof(ulong); case SerializationTokenType.Byte: return typeof(byte); case SerializationTokenType.Float: return typeof(float); case SerializationTokenType.Double: return typeof(double); case SerializationTokenType.Decimal: return typeof(decimal); case SerializationTokenType.String: return typeof(string); case SerializationTokenType.Character: return typeof(char); case SerializationTokenType.Guid: return typeof(Guid); case SerializationTokenType.Date: return typeof(DateTime); case SerializationTokenType.TimeSpan: return typeof(TimeSpan); case SerializationTokenType.IpAddress: return typeof(IPAddress); case SerializationTokenType.IpEndPoint: return typeof(IPEndPoint); case SerializationTokenType.GrainId: return typeof(GrainId); case SerializationTokenType.ActivationId: return typeof(ActivationId); case SerializationTokenType.SiloAddress: return typeof(SiloAddress); case SerializationTokenType.ActivationAddress: return typeof(ActivationAddress); case SerializationTokenType.CorrelationId: return typeof(CorrelationId); case SerializationTokenType.Request: return typeof(InvokeMethodRequest); case SerializationTokenType.Response: return typeof(Response); case SerializationTokenType.StringObjDict: return typeof(Dictionary<string, object>); case SerializationTokenType.Object: return typeof(Object); case SerializationTokenType.Tuple + 1: Trace("----Reading type info for a Tuple'1"); return typeof(Tuple<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Tuple + 2: Trace("----Reading type info for a Tuple'2"); return typeof(Tuple<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.Tuple + 3: Trace("----Reading type info for a Tuple'3"); return typeof(Tuple<,,>).MakeGenericType(ReadGenericArguments(3)); case SerializationTokenType.Tuple + 4: Trace("----Reading type info for a Tuple'4"); return typeof(Tuple<,,,>).MakeGenericType(ReadGenericArguments(4)); case SerializationTokenType.Tuple + 5: Trace("----Reading type info for a Tuple'5"); return typeof(Tuple<,,,,>).MakeGenericType(ReadGenericArguments(5)); case SerializationTokenType.Tuple + 6: Trace("----Reading type info for a Tuple'6"); return typeof(Tuple<,,,,,>).MakeGenericType(ReadGenericArguments(6)); case SerializationTokenType.Tuple + 7: Trace("----Reading type info for a Tuple'7"); return typeof(Tuple<,,,,,,>).MakeGenericType(ReadGenericArguments(7)); case SerializationTokenType.Array + 1: var et1 = ReadFullTypeHeader(); return et1.MakeArrayType(); case SerializationTokenType.Array + 2: var et2 = ReadFullTypeHeader(); return et2.MakeArrayType(2); case SerializationTokenType.Array + 3: var et3 = ReadFullTypeHeader(); return et3.MakeArrayType(3); case SerializationTokenType.Array + 4: var et4 = ReadFullTypeHeader(); return et4.MakeArrayType(4); case SerializationTokenType.Array + 5: var et5 = ReadFullTypeHeader(); return et5.MakeArrayType(5); case SerializationTokenType.Array + 6: var et6 = ReadFullTypeHeader(); return et6.MakeArrayType(6); case SerializationTokenType.Array + 7: var et7 = ReadFullTypeHeader(); return et7.MakeArrayType(7); case SerializationTokenType.Array + 8: var et8 = ReadFullTypeHeader(); return et8.MakeArrayType(8); case SerializationTokenType.List: return typeof(List<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Dictionary: return typeof(Dictionary<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.KeyValuePair: return typeof(KeyValuePair<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.Set: return typeof(HashSet<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.SortedList: return typeof(SortedList<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.SortedSet: return typeof(SortedSet<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Stack: return typeof(Stack<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Queue: return typeof(Queue<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.LinkedList: return typeof(LinkedList<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Nullable: return typeof(Nullable<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.ByteArray: return typeof(byte[]); case SerializationTokenType.ShortArray: return typeof(short[]); case SerializationTokenType.IntArray: return typeof(int[]); case SerializationTokenType.LongArray: return typeof(long[]); case SerializationTokenType.UShortArray: return typeof(ushort[]); case SerializationTokenType.UIntArray: return typeof(uint[]); case SerializationTokenType.ULongArray: return typeof(ulong[]); case SerializationTokenType.FloatArray: return typeof(float[]); case SerializationTokenType.DoubleArray: return typeof(double[]); case SerializationTokenType.CharArray: return typeof(char[]); case SerializationTokenType.BoolArray: return typeof(bool[]); case SerializationTokenType.NamedType: var typeName = ReadString(); try { return SerializationManager.ResolveTypeName(typeName); } catch (TypeAccessException ex) { throw new TypeAccessException("Named type \"" + typeName + "\" is invalid: " + ex.Message); } default: break; } throw new SerializationException("Unexpected '" + token + "' found when expecting a type reference"); } private Type[] ReadGenericArguments(int n) { Trace("About to read {0} generic arguments", n); var args = new Type[n]; for (var i = 0; i < n; i++) { args[i] = ReadFullTypeHeader(); } Trace("Finished reading {0} generic arguments", n); return args; } private StreamWriter trace; [Conditional("TRACE_SERIALIZATION")] private void Trace(string format, params object[] args) { if (trace == null) { var path = String.Format("d:\\Trace-{0}.{1}.{2}.txt", DateTime.UtcNow.Hour, DateTime.UtcNow.Minute, DateTime.UtcNow.Ticks); Console.WriteLine("Opening trace file at '{0}'", path); trace = File.CreateText(path); } trace.Write(format, args); trace.WriteLine(" at offset {0}", CurrentPosition); trace.Flush(); } } }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ // **NOTE** This file was generated by a tool and any changes will be overwritten. namespace Microsoft.Graph { using System; using System.Collections.Generic; using System.IO; using System.Net.Http; using System.Threading; using System.Linq.Expressions; /// <summary> /// The type WorkbookRangeRequest. /// </summary> public partial class WorkbookRangeRequest : BaseRequest, IWorkbookRangeRequest { /// <summary> /// Constructs a new WorkbookRangeRequest. /// </summary> /// <param name="requestUrl">The URL for the built request.</param> /// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param> /// <param name="options">Query and header option name value pairs for the request.</param> public WorkbookRangeRequest( string requestUrl, IBaseClient client, IEnumerable<Option> options) : base(requestUrl, client, options) { } /// <summary> /// Creates the specified WorkbookRange using POST. /// </summary> /// <param name="workbookRangeToCreate">The WorkbookRange to create.</param> /// <returns>The created WorkbookRange.</returns> public System.Threading.Tasks.Task<WorkbookRange> CreateAsync(WorkbookRange workbookRangeToCreate) { return this.CreateAsync(workbookRangeToCreate, CancellationToken.None); } /// <summary> /// Creates the specified WorkbookRange using POST. /// </summary> /// <param name="workbookRangeToCreate">The WorkbookRange to create.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The created WorkbookRange.</returns> public async System.Threading.Tasks.Task<WorkbookRange> CreateAsync(WorkbookRange workbookRangeToCreate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "POST"; var newEntity = await this.SendAsync<WorkbookRange>(workbookRangeToCreate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(newEntity); return newEntity; } /// <summary> /// Deletes the specified WorkbookRange. /// </summary> /// <returns>The task to await.</returns> public System.Threading.Tasks.Task DeleteAsync() { return this.DeleteAsync(CancellationToken.None); } /// <summary> /// Deletes the specified WorkbookRange. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The task to await.</returns> public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken) { this.Method = "DELETE"; await this.SendAsync<WorkbookRange>(null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets the specified WorkbookRange. /// </summary> /// <returns>The WorkbookRange.</returns> public System.Threading.Tasks.Task<WorkbookRange> GetAsync() { return this.GetAsync(CancellationToken.None); } /// <summary> /// Gets the specified WorkbookRange. /// </summary> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The WorkbookRange.</returns> public async System.Threading.Tasks.Task<WorkbookRange> GetAsync(CancellationToken cancellationToken) { this.Method = "GET"; var retrievedEntity = await this.SendAsync<WorkbookRange>(null, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(retrievedEntity); return retrievedEntity; } /// <summary> /// Updates the specified WorkbookRange using PATCH. /// </summary> /// <param name="workbookRangeToUpdate">The WorkbookRange to update.</param> /// <returns>The updated WorkbookRange.</returns> public System.Threading.Tasks.Task<WorkbookRange> UpdateAsync(WorkbookRange workbookRangeToUpdate) { return this.UpdateAsync(workbookRangeToUpdate, CancellationToken.None); } /// <summary> /// Updates the specified WorkbookRange using PATCH. /// </summary> /// <param name="workbookRangeToUpdate">The WorkbookRange to update.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param> /// <returns>The updated WorkbookRange.</returns> public async System.Threading.Tasks.Task<WorkbookRange> UpdateAsync(WorkbookRange workbookRangeToUpdate, CancellationToken cancellationToken) { this.ContentType = "application/json"; this.Method = "PATCH"; var updatedEntity = await this.SendAsync<WorkbookRange>(workbookRangeToUpdate, cancellationToken).ConfigureAwait(false); this.InitializeCollectionProperties(updatedEntity); return updatedEntity; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="value">The expand value.</param> /// <returns>The request object to send.</returns> public IWorkbookRangeRequest Expand(string value) { this.QueryOptions.Add(new QueryOption("$expand", value)); return this; } /// <summary> /// Adds the specified expand value to the request. /// </summary> /// <param name="expandExpression">The expression from which to calculate the expand value.</param> /// <returns>The request object to send.</returns> public IWorkbookRangeRequest Expand(Expression<Func<WorkbookRange, object>> expandExpression) { if (expandExpression == null) { throw new ArgumentNullException(nameof(expandExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(expandExpression)); } else { this.QueryOptions.Add(new QueryOption("$expand", value)); } return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="value">The select value.</param> /// <returns>The request object to send.</returns> public IWorkbookRangeRequest Select(string value) { this.QueryOptions.Add(new QueryOption("$select", value)); return this; } /// <summary> /// Adds the specified select value to the request. /// </summary> /// <param name="selectExpression">The expression from which to calculate the select value.</param> /// <returns>The request object to send.</returns> public IWorkbookRangeRequest Select(Expression<Func<WorkbookRange, object>> selectExpression) { if (selectExpression == null) { throw new ArgumentNullException(nameof(selectExpression)); } string error; string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error); if (value == null) { throw new ArgumentException(error, nameof(selectExpression)); } else { this.QueryOptions.Add(new QueryOption("$select", value)); } return this; } /// <summary> /// Initializes any collection properties after deserialization, like next requests for paging. /// </summary> /// <param name="workbookRangeToInitialize">The <see cref="WorkbookRange"/> with the collection properties to initialize.</param> private void InitializeCollectionProperties(WorkbookRange workbookRangeToInitialize) { } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.Diagnostics.Eventing.Reader.EventLogRecord.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System.Diagnostics.Eventing.Reader { public partial class EventLogRecord : EventRecord { #region Methods and constructors protected override void Dispose(bool disposing) { } internal EventLogRecord() { } public override string FormatDescription(IEnumerable<Object> values) { return default(string); } public override string FormatDescription() { return default(string); } public IList<Object> GetPropertyValues(EventLogPropertySelector propertySelector) { Contract.Ensures(Contract.Result<System.Collections.Generic.IList<System.Object>>() != null); return default(IList<Object>); } public override string ToXml() { return default(string); } #endregion #region Properties and indexers public override Nullable<Guid> ActivityId { get { return default(Nullable<Guid>); } } public override EventBookmark Bookmark { get { return default(EventBookmark); } } public string ContainerLog { get { return default(string); } } public override int Id { get { return default(int); } } public override Nullable<long> Keywords { get { return default(Nullable<long>); } } public override IEnumerable<string> KeywordsDisplayNames { get { return default(IEnumerable<string>); } } public override Nullable<byte> Level { get { return default(Nullable<byte>); } } public override string LevelDisplayName { get { return default(string); } } public override string LogName { get { return default(string); } } public override string MachineName { get { return default(string); } } public IEnumerable<int> MatchedQueryIds { get { return default(IEnumerable<int>); } } public override Nullable<short> Opcode { get { return default(Nullable<short>); } } public override string OpcodeDisplayName { get { return default(string); } } public override Nullable<int> ProcessId { get { return default(Nullable<int>); } } public override IList<EventProperty> Properties { get { return default(IList<EventProperty>); } } public override Nullable<Guid> ProviderId { get { return default(Nullable<Guid>); } } public override string ProviderName { get { return default(string); } } public override Nullable<int> Qualifiers { get { return default(Nullable<int>); } } public override Nullable<long> RecordId { get { return default(Nullable<long>); } } public override Nullable<Guid> RelatedActivityId { get { return default(Nullable<Guid>); } } public override Nullable<int> Task { get { return default(Nullable<int>); } } public override string TaskDisplayName { get { return default(string); } } public override Nullable<int> ThreadId { get { return default(Nullable<int>); } } public override Nullable<DateTime> TimeCreated { get { return default(Nullable<DateTime>); } } public override System.Security.Principal.SecurityIdentifier UserId { get { return default(System.Security.Principal.SecurityIdentifier); } } public override Nullable<byte> Version { get { return default(Nullable<byte>); } } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using csscript; namespace CSScripting { /// <summary> /// Various string extensions /// </summary> public static class StringExtensions { /// <summary> /// Determines whether the string is empty (or null). /// </summary> /// <param name="text">The text.</param> /// <returns> /// <c>true</c> if the specified text is empty; otherwise, <c>false</c>. /// </returns> public static bool IsEmpty(this string text) => string.IsNullOrEmpty(text); /// <summary> /// Determines whether the string is not empty (or null). /// </summary> /// <param name="text">The text.</param> /// <returns> /// <c>true</c> if [is not empty] [the specified text]; otherwise, <c>false</c>. /// </returns> public static bool IsNotEmpty(this string text) => !string.IsNullOrEmpty(text); /// <summary> /// Determines whether this instance has text. /// </summary> /// <param name="text">The text.</param> /// <returns> /// <c>true</c> if the specified text has text; otherwise, <c>false</c>. /// </returns> public static bool HasText(this string text) => !string.IsNullOrEmpty(text) && !string.IsNullOrWhiteSpace(text); /// <summary> /// Trims a single character form the head and the end of the string. /// </summary> /// <param name="text">The text.</param> /// <param name="trimChars">The trim chars.</param> /// <returns>The result of trimming.</returns> public static string TrimSingle(this string text, params char[] trimChars) { if (text.IsEmpty()) return text; var startOffset = trimChars.Contains(text[0]) ? 1 : 0; var endOffset = (trimChars.Contains(text.Last()) ? 1 : 0); if (startOffset != 0 || endOffset != 0) return text.Substring(startOffset, (text.Length - startOffset) - endOffset); else return text; } /// <summary> /// Gets the lines. /// </summary> /// <param name="str">The string.</param> /// <returns>The method result.</returns> public static string[] GetLines(this string str) =>// too simplistic though adequate str.Replace("\r\n", "\n").Split('\n'); /// <summary> /// Determines whether this string contains the substring defined by the pattern. /// </summary> /// <param name="text">The text.</param> /// <param name="pattern">The pattern.</param> /// <param name="ignoreCase">if set to <c>true</c> [ignore case].</param> /// <returns> /// <c>true</c> if [contains] [the specified pattern]; otherwise, <c>false</c>. /// </returns> public static bool Contains(this string text, string pattern, bool ignoreCase) => text.IndexOf(pattern, ignoreCase ? StringComparison.OrdinalIgnoreCase : default(StringComparison)) != -1; /// <summary> /// Compares two strings. /// </summary> /// <param name="text">The text.</param> /// <param name="pattern">The pattern.</param> /// <param name="ignoreCase">if set to <c>true</c> [ignore case].</param> /// <returns>The result of the test.</returns> public static bool SameAs(this string text, string pattern, bool ignoreCase = true) => 0 == string.Compare(text, pattern, ignoreCase); /// <summary> /// Checks if the given string matches any of the provided patterns. /// </summary> /// <param name="text">The text.</param> /// <param name="patterns">The patterns</param> /// <returns>The method result.</returns> public static bool IsOneOf(this string text, params string[] patterns) => patterns.Any(x => x == text); /// <summary> /// Joins strings the by the specified separator. /// </summary> /// <param name="values">The values.</param> /// <param name="separator">The separator.</param> /// <returns>The method result.</returns> public static string JoinBy(this IEnumerable<string> values, string separator) => string.Join(separator, values); /// <summary> /// The custom implementation of the <see cref="string.GetHashCode"/> method. /// </summary> /// <param name="text">The text to generate the hash for.s.</param> /// <returns>The method result.</returns> public static int GetHashCodeEx(this string text) { //during the script first compilation GetHashCodeEx is called ~10 times //during the cached execution ~5 times only //and for hosted scenarios it is twice less //The following profiling demonstrates that in the worst case scenario hashing would //only add ~2 microseconds to the execution time //Native executions cost (milliseconds)=> 100000: 7; 10 : 0.0007 //Custom Safe executions cost (milliseconds)=> 100000: 40; 10: 0.004 //Custom Unsafe executions cost (milliseconds)=> 100000: 13; 10: 0.0013 #if !class_lib if (csscript.ExecuteOptions.options.customHashing) { // deterministic GetHashCode; useful for integration with third party products (e.g. CS-Script.Npp) return text.GetHashCode32(); } else { return text.GetHashCode(); } #else return text.GetHashCode(); #endif } //needed to have reliable HASH as x64 and x32 have different algorithms; This leads to the inability of script clients calculate cache directory correctly } } namespace CSScripting { internal static class CommandArgParser { public static string TrimMatchingQuotes(this string input, char quote) { if (input.Length >= 2) { //"-sconfig:My Script.cs.config" if (input.First() == quote && input.Last() == quote) { return input.Substring(1, input.Length - 2); } //-sconfig:"My Script.cs.config" else if (input.Last() == quote) { var firstQuote = input.IndexOf(quote); if (firstQuote != input.Length - 1) //not the last one return input.Substring(0, firstQuote) + input.Substring(firstQuote + 1, input.Length - 2 - firstQuote); } } return input; } public static IEnumerable<string> Split(this string str, Func<char, bool> controller) { int nextPiece = 0; for (int c = 0; c < str.Length; c++) { if (controller(str[c])) { yield return str.Substring(nextPiece, c - nextPiece); nextPiece = c + 1; } } yield return str.Substring(nextPiece); } public static string[] SplitCommandLine(this string commandLine) { bool inQuotes = false; bool isEscaping = false; return commandLine.Split(c => { if (c == '\\' && !isEscaping) { isEscaping = true; return false; } if (c == '\"' && !isEscaping) inQuotes = !inQuotes; isEscaping = false; return !inQuotes && char.IsWhiteSpace(c)/*c == ' '*/; }) .Select(arg => arg.Trim().TrimMatchingQuotes('\"').Replace("\\\"", "\"")) .Where(arg => !string.IsNullOrEmpty(arg)) .ToArray(); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using RedBox.Web.Areas.HelpPage.ModelDescriptions; using RedBox.Web.Areas.HelpPage.Models; using RedBox.Web.Areas.HelpPage.SampleGeneration; namespace RedBox.Web.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
namespace GitVersion { using Configuration.Init.Wizard; using GitVersion.Helpers; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using WarningException = GitTools.WarningException; public class ConfigurationProvider { internal const string DefaultTagPrefix = "[vV]"; public const string DefaultConfigFileName = "GitVersion.yml"; public const string ObsoleteConfigFileName = "GitVersionConfig.yaml"; public const string ReleaseBranchRegex = "releases?[/-]"; public const string FeatureBranchRegex = "features?[/-]"; public const string PullRequestRegex = @"(pull|pull\-requests|pr)[/-]"; public const string HotfixBranchRegex = "hotfix(es)?[/-]"; public const string SupportBranchRegex = "support[/-]"; public const string DevelopBranchRegex = "dev(elop)?(ment)?$"; public const string MasterBranchRegex = "master$"; public const string MasterBranchKey = "master"; public const string ReleaseBranchKey = "release"; public const string FeatureBranchKey = "feature"; public const string PullRequestBranchKey = "pull-request"; public const string HotfixBranchKey = "hotfix"; public const string SupportBranchKey = "support"; public const string DevelopBranchKey = "develop"; private const IncrementStrategy DefaultIncrementStrategy = IncrementStrategy.Inherit; public static Config Provide(GitPreparer gitPreparer, IFileSystem fileSystem, bool applyDefaults = true, Config overrideConfig = null) { var workingDirectory = gitPreparer.WorkingDirectory; var projectRootDirectory = gitPreparer.GetProjectRootDirectory(); if (HasConfigFileAt(workingDirectory, fileSystem)) { return Provide(workingDirectory, fileSystem, applyDefaults, overrideConfig); } return Provide(projectRootDirectory, fileSystem, applyDefaults, overrideConfig); } public static string SelectConfigFilePath(GitPreparer gitPreparer, IFileSystem fileSystem) { var workingDirectory = gitPreparer.WorkingDirectory; var projectRootDirectory = gitPreparer.GetProjectRootDirectory(); if (HasConfigFileAt(workingDirectory, fileSystem)) { return GetConfigFilePath(workingDirectory, fileSystem); } return GetConfigFilePath(projectRootDirectory, fileSystem); } public static Config Provide(string workingDirectory, IFileSystem fileSystem, bool applyDefaults = true, Config overrideConfig = null) { var readConfig = ReadConfig(workingDirectory, fileSystem); VerifyConfiguration(readConfig); if (applyDefaults) ApplyDefaultsTo(readConfig); if (null != overrideConfig) ApplyOverridesTo(readConfig, overrideConfig); return readConfig; } static void VerifyConfiguration(Config readConfig) { // Verify no branches are set to mainline mode if (readConfig.Branches.Any(b => b.Value.VersioningMode == VersioningMode.Mainline)) { throw new GitVersionConfigurationException(@"Mainline mode only works at the repository level, a single branch cannot be put into mainline mode This is because mainline mode treats your entire git repository as an event source with each merge into the 'mainline' incrementing the version. If the docs do not help you decide on the mode open an issue to discuss what you are trying to do."); } } public static void ApplyDefaultsTo(Config config) { config.AssemblyVersioningScheme = config.AssemblyVersioningScheme ?? AssemblyVersioningScheme.MajorMinorPatch; config.AssemblyFileVersioningScheme = config.AssemblyFileVersioningScheme ?? AssemblyFileVersioningScheme.MajorMinorPatch; config.AssemblyInformationalFormat = config.AssemblyInformationalFormat; config.AssemblyVersioningFormat = config.AssemblyVersioningFormat; config.AssemblyFileVersioningFormat = config.AssemblyFileVersioningFormat; config.TagPrefix = config.TagPrefix ?? DefaultTagPrefix; config.VersioningMode = config.VersioningMode ?? VersioningMode.ContinuousDelivery; config.ContinuousDeploymentFallbackTag = config.ContinuousDeploymentFallbackTag ?? "ci"; config.MajorVersionBumpMessage = config.MajorVersionBumpMessage ?? IncrementStrategyFinder.DefaultMajorPattern; config.MinorVersionBumpMessage = config.MinorVersionBumpMessage ?? IncrementStrategyFinder.DefaultMinorPattern; config.PatchVersionBumpMessage = config.PatchVersionBumpMessage ?? IncrementStrategyFinder.DefaultPatchPattern; config.NoBumpMessage = config.NoBumpMessage ?? IncrementStrategyFinder.DefaultNoBumpPattern; config.CommitMessageIncrementing = config.CommitMessageIncrementing ?? CommitMessageIncrementMode.Enabled; config.LegacySemVerPadding = config.LegacySemVerPadding ?? 4; config.BuildMetaDataPadding = config.BuildMetaDataPadding ?? 4; config.CommitsSinceVersionSourcePadding = config.CommitsSinceVersionSourcePadding ?? 4; config.CommitDateFormat = config.CommitDateFormat ?? "yyyy-MM-dd"; var configBranches = config.Branches.ToList(); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, DevelopBranchKey), DevelopBranchRegex, new List<string>(), defaultTag: "alpha", defaultIncrementStrategy: IncrementStrategy.Minor, defaultVersioningMode: config.VersioningMode == VersioningMode.Mainline? VersioningMode.Mainline : VersioningMode.ContinuousDeployment, defaultTrackMergeTarget: true, tracksReleaseBranches: true); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, MasterBranchKey), MasterBranchRegex, new List<string> { "develop", "release" }, defaultTag: string.Empty, defaultPreventIncrement: true, defaultIncrementStrategy: IncrementStrategy.Patch, isMainline: true); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, ReleaseBranchKey), ReleaseBranchRegex, new List<string> { "develop", "master", "support", "release" }, defaultTag: "beta", defaultPreventIncrement: true, defaultIncrementStrategy: IncrementStrategy.Patch, isReleaseBranch: true); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, FeatureBranchKey), FeatureBranchRegex, new List<string> { "develop", "master", "release", "feature", "support", "hotfix" }, defaultIncrementStrategy: IncrementStrategy.Inherit); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, PullRequestBranchKey), PullRequestRegex, new List<string> { "develop", "master", "release", "feature", "support", "hotfix" }, defaultTag: "PullRequest", defaultTagNumberPattern: @"[/-](?<number>\d+)", defaultIncrementStrategy: IncrementStrategy.Inherit); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, HotfixBranchKey), HotfixBranchRegex, new List<string> { "develop", "master", "support" }, defaultTag: "beta", defaultIncrementStrategy: IncrementStrategy.Patch); ApplyBranchDefaults(config, GetOrCreateBranchDefaults(config, SupportBranchKey), SupportBranchRegex, new List<string> { "master" }, defaultTag: string.Empty, defaultPreventIncrement: true, defaultIncrementStrategy: IncrementStrategy.Patch, isMainline: true); // Any user defined branches should have other values defaulted after known branches filled in. // This allows users to override any of the value. foreach (var branchConfig in configBranches) { var regex = branchConfig.Value.Regex; if (regex == null) { throw new GitVersionConfigurationException($"Branch configuration '{branchConfig.Key}' is missing required configuration 'regex'\n\n" + "See http://gitversion.readthedocs.io/en/latest/configuration/ for more info"); } var sourceBranches = branchConfig.Value.SourceBranches; if (sourceBranches == null) { throw new GitVersionConfigurationException($"Branch configuration '{branchConfig.Key}' is missing required configuration 'source-branches'\n\n" + "See http://gitversion.readthedocs.io/en/latest/configuration/ for more info"); } ApplyBranchDefaults(config, branchConfig.Value, regex, sourceBranches); } // This is a second pass to add additional sources, it has to be another pass to prevent ordering issues foreach (var branchConfig in configBranches) { if (branchConfig.Value.IsSourceBranchFor == null) continue; foreach (var isSourceBranch in branchConfig.Value.IsSourceBranchFor) { config.Branches[isSourceBranch].SourceBranches.Add(branchConfig.Key); } } } static void ApplyOverridesTo(Config config, Config overrideConfig) { config.TagPrefix = string.IsNullOrWhiteSpace(overrideConfig.TagPrefix) ? config.TagPrefix : overrideConfig.TagPrefix; } static BranchConfig GetOrCreateBranchDefaults(Config config, string branchKey) { if (!config.Branches.ContainsKey(branchKey)) { var branchConfig = new BranchConfig {Name = branchKey}; config.Branches.Add(branchKey, branchConfig); return branchConfig; } return config.Branches[branchKey]; } public static void ApplyBranchDefaults(Config config, BranchConfig branchConfig, string branchRegex, List<string> sourceBranches, string defaultTag = "useBranchName", IncrementStrategy? defaultIncrementStrategy = null, // Looked up from main config bool defaultPreventIncrement = false, VersioningMode? defaultVersioningMode = null, // Looked up from main config bool defaultTrackMergeTarget = false, string defaultTagNumberPattern = null, bool tracksReleaseBranches = false, bool isReleaseBranch = false, bool isMainline = false) { branchConfig.Regex = string.IsNullOrEmpty(branchConfig.Regex) ? branchRegex : branchConfig.Regex; branchConfig.SourceBranches = sourceBranches; branchConfig.Tag = branchConfig.Tag ?? defaultTag; branchConfig.TagNumberPattern = branchConfig.TagNumberPattern ?? defaultTagNumberPattern; branchConfig.Increment = branchConfig.Increment ?? defaultIncrementStrategy ?? config.Increment ?? DefaultIncrementStrategy; branchConfig.PreventIncrementOfMergedBranchVersion = branchConfig.PreventIncrementOfMergedBranchVersion ?? defaultPreventIncrement; branchConfig.TrackMergeTarget = branchConfig.TrackMergeTarget ?? defaultTrackMergeTarget; branchConfig.VersioningMode = branchConfig.VersioningMode ?? defaultVersioningMode ?? config.VersioningMode; branchConfig.TracksReleaseBranches = branchConfig.TracksReleaseBranches ?? tracksReleaseBranches; branchConfig.IsReleaseBranch = branchConfig.IsReleaseBranch ?? isReleaseBranch; branchConfig.IsMainline = branchConfig.IsMainline ?? isMainline; } static Config ReadConfig(string workingDirectory, IFileSystem fileSystem) { var configFilePath = GetConfigFilePath(workingDirectory, fileSystem); if (fileSystem.Exists(configFilePath)) { var readAllText = fileSystem.ReadAllText(configFilePath); LegacyConfigNotifier.Notify(new StringReader(readAllText)); return ConfigSerialiser.Read(new StringReader(readAllText)); } return new Config(); } public static string GetEffectiveConfigAsString(string workingDirectory, IFileSystem fileSystem) { var config = Provide(workingDirectory, fileSystem); var stringBuilder = new StringBuilder(); using (var stream = new StringWriter(stringBuilder)) { ConfigSerialiser.Write(config, stream); stream.Flush(); } return stringBuilder.ToString(); } public static void Verify(GitPreparer gitPreparer, IFileSystem fileSystem) { if (!string.IsNullOrWhiteSpace(gitPreparer.TargetUrl)) { // Assuming this is a dynamic repository. At this stage it's unsure whether we have // any .git info so we need to skip verification return; } var workingDirectory = gitPreparer.WorkingDirectory; var projectRootDirectory = gitPreparer.GetProjectRootDirectory(); Verify(workingDirectory, projectRootDirectory, fileSystem); } public static void Verify(string workingDirectory, string projectRootDirectory, IFileSystem fileSystem) { if (fileSystem.PathsEqual(workingDirectory, projectRootDirectory)) { WarnAboutObsoleteConfigFile(workingDirectory, fileSystem); return; } WarnAboutObsoleteConfigFile(workingDirectory, fileSystem); WarnAboutObsoleteConfigFile(projectRootDirectory, fileSystem); WarnAboutAmbiguousConfigFileSelection(workingDirectory, projectRootDirectory, fileSystem); } static void WarnAboutAmbiguousConfigFileSelection(string workingDirectory, string projectRootDirectory, IFileSystem fileSystem) { var workingConfigFile = GetConfigFilePath(workingDirectory, fileSystem); var projectRootConfigFile = GetConfigFilePath(projectRootDirectory, fileSystem); bool hasConfigInWorkingDirectory = fileSystem.Exists(workingConfigFile); bool hasConfigInProjectRootDirectory = fileSystem.Exists(projectRootConfigFile); if (hasConfigInProjectRootDirectory && hasConfigInWorkingDirectory) { throw new WarningException(string.Format("Ambiguous config file selection from '{0}' and '{1}'", workingConfigFile, projectRootConfigFile)); } } static string GetConfigFilePath(string workingDirectory, IFileSystem fileSystem) { var ymlPath = Path.Combine(workingDirectory, DefaultConfigFileName); if (fileSystem.Exists(ymlPath)) { return ymlPath; } var deprecatedPath = Path.Combine(workingDirectory, ObsoleteConfigFileName); if (fileSystem.Exists(deprecatedPath)) { return deprecatedPath; } return ymlPath; } static bool HasConfigFileAt(string workingDirectory, IFileSystem fileSystem) { var defaultConfigFilePath = Path.Combine(workingDirectory, DefaultConfigFileName); if (fileSystem.Exists(defaultConfigFilePath)) { return true; } var deprecatedConfigFilePath = Path.Combine(workingDirectory, ObsoleteConfigFileName); if (fileSystem.Exists(deprecatedConfigFilePath)) { return true; } return false; } static void WarnAboutObsoleteConfigFile(string workingDirectory, IFileSystem fileSystem) { var deprecatedConfigFilePath = Path.Combine(workingDirectory, ObsoleteConfigFileName); if (!fileSystem.Exists(deprecatedConfigFilePath)) { return; } var defaultConfigFilePath = Path.Combine(workingDirectory, DefaultConfigFileName); if (fileSystem.Exists(defaultConfigFilePath)) { Logger.WriteWarning(string.Format("Ambiguous config files at '{0}': '{1}' (deprecated) and '{2}'. Will be used '{2}'", workingDirectory, ObsoleteConfigFileName, DefaultConfigFileName)); return; } Logger.WriteWarning(string.Format("'{0}' is deprecated, use '{1}' instead.", deprecatedConfigFilePath, DefaultConfigFileName)); } public static void Init(string workingDirectory, IFileSystem fileSystem, IConsole console) { var configFilePath = GetConfigFilePath(workingDirectory, fileSystem); var currentConfiguration = Provide(workingDirectory, fileSystem, applyDefaults: false); var config = new ConfigInitWizard(console, fileSystem).Run(currentConfiguration, workingDirectory); if (config == null) return; using (var stream = fileSystem.OpenWrite(configFilePath)) using (var writer = new StreamWriter(stream)) { Logger.WriteInfo("Saving config file"); ConfigSerialiser.Write(config, writer); stream.Flush(); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Text; using System.Web; using System.Web.UI; using System.Web.UI.WebControls; using Juice.Framework; using Juice.Framework.TypeConverters; namespace Juice { /// <summary> /// Extend a TextBox with jQuery UI Autocomplete http://api.jqueryui.com/autocomplete/ /// </summary> [TargetControlType(typeof(TextBox))] [WidgetEvent("create")] [WidgetEvent("search")] [WidgetEvent("open")] [WidgetEvent("focus")] [WidgetEvent("close")] [WidgetEvent("response")] public class Autocomplete : JuiceExtender { public Autocomplete() : base("autocomplete") { } #region Widget Options /// <summary> /// Which element the menu should be appended to. /// Reference: http://api.jqueryui.com/autocomplete/#option-appendTo /// </summary> [WidgetOption("appendTo", "body")] [Category("Behavior")] [DefaultValue("body")] [Description("Which element the menu should be appended to.")] public string AppendTo { get; set; } /// <summary> /// If set to true the first item will be automatically focused. /// Reference: http://api.jqueryui.com/autocomplete/#option-autoFocus /// </summary> [WidgetOption("autoFocus", false)] [Category("Behavior")] [DefaultValue(false)] [Description("If set to true the first item will be automatically focused.")] public bool AutoFocus { get; set; } /// <summary> /// The delay in milliseconds the Autocomplete waits after a keystroke to activate itself. A zero-delay makes sense for local data (more responsive), but can produce a lot of load for remote data, while being less responsive. /// Reference: http://api.jqueryui.com/autocomplete/#option-delay /// </summary> [WidgetOption("delay", 300)] [Category("Behavior")] [DefaultValue(300)] [Description("The delay in milliseconds the Autocomplete waits after a keystroke to activate itself. A zero-delay makes sense for local data (more responsive), but can produce a lot of load for remote data, while being less responsive.")] public int Delay { get; set; } /// <summary> /// The minimum number of characters a user has to type before the Autocomplete activates. Zero is useful for local data with just a few items. Should be increased when there are a lot of items, where a single character would match a few thousand items. /// Reference: http://api.jqueryui.com/autocomplete/#option-minLength /// </summary> [WidgetOption("minLength", 1)] [Category("Key")] [DefaultValue(1)] [Description("The minimum number of characters a user has to type before the Autocomplete activates. Zero is useful for local data with just a few items. Should be increased when there are a lot of items, where a single character would match a few thousand items.")] public int MinLength { get; set; } /// <summary> /// Identifies the position of the Autocomplete widget in relation to the associated input element. The "of" option defaults to the input element, but you can specify another element to position against. You can refer to the jQuery UI Position utility for more details about the various options. /// Reference: http://api.jqueryui.com/autocomplete/#option-position /// </summary> [WidgetOption("position", "{}", Eval = true)] [TypeConverter(typeof(JsonObjectConverter))] [Category("Layout")] [DefaultValue("{}")] [Description("Identifies the position of the Autocomplete widget in relation to the associated input element. The \"of\" option defaults to the input element, but you can specify another element to position against. You can refer to the jQuery UI Position utility for more details about the various options.")] public string Position { get; set; } private String _sourceUrl = null; private String[] _source = null; private List<AutocompleteItem> _sourceList = null; /// <summary> /// Defines a data source url for the data to use. Source, Source List or SourceUrl must be specified. /// If SourceUrl, SourceList and Source are specified, Source or SourceList will take priority. /// Reference: http://api.jqueryui.com/autocomplete/#option-source /// </summary> [WidgetDocument("source", null)] [Category("Data")] [DefaultValue(null)] [Description("Defines a data source url for the data to use. Source, Source List or SourceUrl must be specified. If SourceUrl, SourceList and Source are specified, Source or SourceList will take priority.")] public String SourceUrl { get { return _sourceUrl; } set { this._sourceUrl = value; } } /// <summary> /// Defines the data to use. Source, Source List or SourceUrl must be specified. /// Reference: http://api.jqueryui.com/autocomplete/#option-source /// </summary> [WidgetDocument("source", null)] [TypeConverter(typeof(Framework.TypeConverters.StringArrayConverter))] [Category("Data")] [DefaultValue(null)] [Description("Defines the data to use. Source, Source List or SourceUrl must be specified.")] public String[] Source { get { return this._source; } set { this._source = value; } } /// <summary> /// Defines an array of label/value pairs to use as source data. Source, Source List or SourceUrl must be specified. /// If both SourceList and Source are specified, Source will take priority. /// Reference: http://api.jqueryui.com/autocomplete/#option-source /// </summary> [WidgetDocument("source", null)] [TypeConverter(typeof(AutocompleteListConverter))] [Category("Data")] [DefaultValue(null)] [Description("Defines an array of label/value pairs to use as source data. If both SourceList and Source are specified, Source will take priority.")] public List<AutocompleteItem> SourceList { get { return this._sourceList; } set { this._sourceList = value; } } /// <summary> /// Internal container for the source option. /// </summary> /// <remarks> /// Yes, this is ugly. It's really ugly. /// This is (so far) the only instance where we have to do something like this. /// There's no way to differentiate between a string and an array of length(1) in control attributes. /// If we run across this scenario again, we should write a TypeDescriptorProvider that will pull Internal/Private properties /// or switch back to using PropertyInfo instead of PropertyDescriptors. /// </remarks> [WidgetOption("source", null)] [TypeConverter(typeof(AutocompleteSourceConverter))] [EditorBrowsable(EditorBrowsableState.Never)] [Browsable(false)] [PropertyLink("SourceUrl", typeof(String))] [PropertyLink("Source", typeof(String[]))] [PropertyLink("Source", typeof(ArrayList))] [PropertyLink("SourceList", typeof(List<AutocompleteItem>))] public object Widget_Source { get { if(this._source != null) { return this._source; } else if(this._sourceList != null) { // we need to perform some hackery here so that this will render properly to the widget init/options script. var result = new List<object>(); foreach(AutocompleteItem item in _sourceList) { result.Add(new { label = item.Label, value = item.Value }); } return result; } return this._sourceUrl; } internal set { //_sourceP = value; //if(value is String[]) { // this.Source = value as String[]; //} //else if(value is String) { // this.SourceUrl = value as String; //} //else if(value is List<AutocompleteItem>) { // this.SourceList = (List<AutocompleteItem>)value; //} } } #endregion #region Widget Events /// <summary> /// Triggered when an item is selected from the menu; ui.item refers to the selected item. The default action of select is to replace the text field's value with the value of the selected item. Canceling this event prevents the value from being updated, but does not prevent the menu from closing. /// Reference: http://api.jqueryui.com/autocomplete/#event-select /// </summary> [WidgetEvent("select")] [Category("Action")] [Description("Triggered when an item is selected from the menu; ui.item refers to the selected item. The default action of select is to replace the text field's value with the value of the selected item. Canceling this event prevents the value from being updated, but does not prevent the menu from closing.")] public event EventHandler Select; /// <summary> /// Triggered when the field is blurred, if the value has changed; ui.item refers to the selected item. /// Reference: http://api.jqueryui.com/autocomplete/#event-change /// </summary> [WidgetEvent("change")] [Category("Action")] [Description("Triggered when the field is blurred, if the value has changed; ui.item refers to the selected item.")] public event EventHandler Change; #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================================= ** ** ** ** Purpose: Domains represent an application within the runtime. Objects can ** not be shared between domains and each domain can be configured ** independently. ** ** =============================================================================*/ namespace System { using System; using System.Reflection; using System.Runtime; using System.Runtime.CompilerServices; using System.Security; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Runtime.InteropServices; using System.Reflection.Emit; using CultureInfo = System.Globalization.CultureInfo; using System.IO; using AssemblyHashAlgorithm = System.Configuration.Assemblies.AssemblyHashAlgorithm; using System.Text; using System.Runtime.ConstrainedExecution; using System.Runtime.Versioning; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Runtime.ExceptionServices; internal sealed class AppDomain { // Domain security information // These fields initialized from the other side only. (NOTE: order // of these fields cannot be changed without changing the layout in // the EE- AppDomainBaseObject in this case) private AppDomainManager _domainManager; private Dictionary<String, Object> _LocalStore; private AppDomainSetup _FusionStore; public event AssemblyLoadEventHandler AssemblyLoad; private ResolveEventHandler _TypeResolve; public event ResolveEventHandler TypeResolve { add { lock (this) { _TypeResolve += value; } } remove { lock (this) { _TypeResolve -= value; } } } private ResolveEventHandler _ResourceResolve; public event ResolveEventHandler ResourceResolve { add { lock (this) { _ResourceResolve += value; } } remove { lock (this) { _ResourceResolve -= value; } } } private ResolveEventHandler _AssemblyResolve; public event ResolveEventHandler AssemblyResolve { add { lock (this) { _AssemblyResolve += value; } } remove { lock (this) { _AssemblyResolve -= value; } } } private EventHandler _processExit; private EventHandler _domainUnload; private UnhandledExceptionEventHandler _unhandledException; // The compat flags are set at domain creation time to indicate that the given breaking // changes (named in the strings) should not be used in this domain. We only use the // keys, the vhe values are ignored. private Dictionary<String, object> _compatFlags; // Delegate that will hold references to FirstChance exception notifications private EventHandler<FirstChanceExceptionEventArgs> _firstChanceException; private IntPtr _pDomain; // this is an unmanaged pointer (AppDomain * m_pDomain)` used from the VM. private bool _compatFlagsInitialized; internal const String TargetFrameworkNameAppCompatSetting = "TargetFrameworkName"; #if FEATURE_APPX private static APPX_FLAGS s_flags; // // Keep in async with vm\appdomainnative.cpp // [Flags] private enum APPX_FLAGS { APPX_FLAGS_INITIALIZED = 0x01, APPX_FLAGS_APPX_MODEL = 0x02, APPX_FLAGS_APPX_DESIGN_MODE = 0x04, APPX_FLAGS_APPX_MASK = APPX_FLAGS_APPX_MODEL | APPX_FLAGS_APPX_DESIGN_MODE, } private static APPX_FLAGS Flags { get { if (s_flags == 0) s_flags = nGetAppXFlags(); Debug.Assert(s_flags != 0); return s_flags; } } #endif // FEATURE_APPX #if FEATURE_APPX [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] [return: MarshalAs(UnmanagedType.I4)] private static extern APPX_FLAGS nGetAppXFlags(); #endif /// <summary> /// Get a handle used to make a call into the VM pointing to this domain /// </summary> internal AppDomainHandle GetNativeHandle() { // This should never happen under normal circumstances. However, there ar ways to create an // uninitialized object through remoting, etc. if (_pDomain.IsNull()) { throw new InvalidOperationException(SR.Argument_InvalidHandle); } return new AppDomainHandle(_pDomain); } /// <summary> /// If this AppDomain is configured to have an AppDomain manager then create the instance of it. /// This method is also called from the VM to create the domain manager in the default domain. /// </summary> private void CreateAppDomainManager() { Debug.Assert(_domainManager == null, "_domainManager == null"); AppDomainSetup adSetup = FusionStore; String trustedPlatformAssemblies = (String)(GetData("TRUSTED_PLATFORM_ASSEMBLIES")); if (trustedPlatformAssemblies != null) { String platformResourceRoots = (String)(GetData("PLATFORM_RESOURCE_ROOTS")); if (platformResourceRoots == null) { platformResourceRoots = String.Empty; } String appPaths = (String)(GetData("APP_PATHS")); if (appPaths == null) { appPaths = String.Empty; } String appNiPaths = (String)(GetData("APP_NI_PATHS")); if (appNiPaths == null) { appNiPaths = String.Empty; } String appLocalWinMD = (String)(GetData("APP_LOCAL_WINMETADATA")); if (appLocalWinMD == null) { appLocalWinMD = String.Empty; } SetupBindingPaths(trustedPlatformAssemblies, platformResourceRoots, appPaths, appNiPaths, appLocalWinMD); } InitializeCompatibilityFlags(); } /// <summary> /// Initialize the compatibility flags to non-NULL values. /// This method is also called from the VM when the default domain dosen't have a domain manager. /// </summary> private void InitializeCompatibilityFlags() { AppDomainSetup adSetup = FusionStore; // set up shim flags regardless of whether we create a DomainManager in this method. if (adSetup.GetCompatibilityFlags() != null) { _compatFlags = new Dictionary<String, object>(adSetup.GetCompatibilityFlags(), StringComparer.OrdinalIgnoreCase); } // for perf, we don't intialize the _compatFlags dictionary when we don't need to. However, we do need to make a // note that we've run this method, because IsCompatibilityFlagsSet needs to return different values for the // case where the compat flags have been setup. Debug.Assert(!_compatFlagsInitialized); _compatFlagsInitialized = true; CompatibilitySwitches.InitializeSwitches(); } /// <summary> /// Returns whether the current AppDomain follows the AppX rules. /// </summary> [Pure] internal static bool IsAppXModel() { #if FEATURE_APPX return (Flags & APPX_FLAGS.APPX_FLAGS_APPX_MODEL) != 0; #else return false; #endif } /// <summary> /// Returns the setting of the AppXDevMode config switch. /// </summary> [Pure] internal static bool IsAppXDesignMode() { #if FEATURE_APPX return (Flags & APPX_FLAGS.APPX_FLAGS_APPX_MASK) == (APPX_FLAGS.APPX_FLAGS_APPX_MODEL | APPX_FLAGS.APPX_FLAGS_APPX_DESIGN_MODE); #else return false; #endif } /// <summary> /// Checks (and throws on failure) if the domain supports Assembly.LoadFrom. /// </summary> [Pure] internal static void CheckLoadFromSupported() { #if FEATURE_APPX if (IsAppXModel()) throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.LoadFrom")); #endif } /// <summary> /// Checks (and throws on failure) if the domain supports Assembly.LoadFile. /// </summary> [Pure] internal static void CheckLoadFileSupported() { #if FEATURE_APPX if (IsAppXModel()) throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.LoadFile")); #endif } /// <summary> /// Checks (and throws on failure) if the domain supports Assembly.Load(byte[] ...). /// </summary> [Pure] internal static void CheckLoadByteArraySupported() { #if FEATURE_APPX if (IsAppXModel()) throw new NotSupportedException(SR.Format(SR.NotSupported_AppX, "Assembly.Load(byte[], ...)")); #endif } public AppDomainManager DomainManager { get { return _domainManager; } } public static AppDomain CurrentDomain { get { Contract.Ensures(Contract.Result<AppDomain>() != null); return Thread.GetDomain(); } } public String BaseDirectory { get { return FusionStore.ApplicationBase; } } public override String ToString() { StringBuilder sb = StringBuilderCache.Acquire(); String fn = nGetFriendlyName(); if (fn != null) { sb.Append(SR.Loader_Name + fn); sb.Append(Environment.NewLine); } return StringBuilderCache.GetStringAndRelease(sb); } [MethodImpl(MethodImplOptions.InternalCall)] private extern Assembly[] nGetAssemblies(bool forIntrospection); internal Assembly[] GetAssemblies(bool forIntrospection) { return nGetAssemblies(forIntrospection); } // this is true when we've removed the handles etc so really can't do anything [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern bool IsUnloadingForcedFinalize(); // this is true when we've just started going through the finalizers and are forcing objects to finalize // so must be aware that certain infrastructure may have gone away [MethodImplAttribute(MethodImplOptions.InternalCall)] public extern bool IsFinalizingForUnload(); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal static extern void PublishAnonymouslyHostedDynamicMethodsAssembly(RuntimeAssembly assemblyHandle); public void SetData(string name, object data) { if (name == null) throw new ArgumentNullException(nameof(name)); Contract.EndContractBlock(); lock (((ICollection)LocalStore).SyncRoot) { LocalStore[name] = data; } } [Pure] public Object GetData(string name) { if (name == null) throw new ArgumentNullException(nameof(name)); Contract.EndContractBlock(); object data; lock (((ICollection)LocalStore).SyncRoot) { LocalStore.TryGetValue(name, out data); } if (data == null) return null; return data; } [Obsolete("AppDomain.GetCurrentThreadId has been deprecated because it does not provide a stable Id when managed threads are running on fibers (aka lightweight threads). To get a stable identifier for a managed thread, use the ManagedThreadId property on Thread. http://go.microsoft.com/fwlink/?linkid=14202", false)] [DllImport(Microsoft.Win32.Win32Native.KERNEL32)] public static extern int GetCurrentThreadId(); private AppDomain() { throw new NotSupportedException(SR.GetResourceString(ResId.NotSupported_Constructor)); } [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern void nCreateContext(); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void nSetupBindingPaths(String trustedPlatformAssemblies, String platformResourceRoots, String appPath, String appNiPaths, String appLocalWinMD); internal void SetupBindingPaths(String trustedPlatformAssemblies, String platformResourceRoots, String appPath, String appNiPaths, String appLocalWinMD) { nSetupBindingPaths(trustedPlatformAssemblies, platformResourceRoots, appPath, appNiPaths, appLocalWinMD); } [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern String nGetFriendlyName(); // support reliability for certain event handlers, if the target // methods also participate in this discipline. If caller passes // an existing MulticastDelegate, then we could use a MDA to indicate // that reliability is not guaranteed. But if it is a single cast // scenario, we can make it work. public event EventHandler ProcessExit { add { if (value != null) { RuntimeHelpers.PrepareContractedDelegate(value); lock (this) _processExit += value; } } remove { lock (this) _processExit -= value; } } public event EventHandler DomainUnload { add { if (value != null) { RuntimeHelpers.PrepareContractedDelegate(value); lock (this) _domainUnload += value; } } remove { lock (this) _domainUnload -= value; } } public event UnhandledExceptionEventHandler UnhandledException { add { if (value != null) { RuntimeHelpers.PrepareContractedDelegate(value); lock (this) _unhandledException += value; } } remove { lock (this) _unhandledException -= value; } } // This is the event managed code can wireup against to be notified // about first chance exceptions. // // To register/unregister the callback, the code must be SecurityCritical. public event EventHandler<FirstChanceExceptionEventArgs> FirstChanceException { add { if (value != null) { RuntimeHelpers.PrepareContractedDelegate(value); lock (this) _firstChanceException += value; } } remove { lock (this) _firstChanceException -= value; } } private void OnAssemblyLoadEvent(RuntimeAssembly LoadedAssembly) { AssemblyLoadEventHandler eventHandler = AssemblyLoad; if (eventHandler != null) { AssemblyLoadEventArgs ea = new AssemblyLoadEventArgs(LoadedAssembly); eventHandler(this, ea); } } // This method is called by the VM. private RuntimeAssembly OnResourceResolveEvent(RuntimeAssembly assembly, String resourceName) { ResolveEventHandler eventHandler = _ResourceResolve; if (eventHandler == null) return null; Delegate[] ds = eventHandler.GetInvocationList(); int len = ds.Length; for (int i = 0; i < len; i++) { Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(resourceName, assembly)); RuntimeAssembly ret = GetRuntimeAssembly(asm); if (ret != null) return ret; } return null; } // This method is called by the VM private RuntimeAssembly OnTypeResolveEvent(RuntimeAssembly assembly, String typeName) { ResolveEventHandler eventHandler = _TypeResolve; if (eventHandler == null) return null; Delegate[] ds = eventHandler.GetInvocationList(); int len = ds.Length; for (int i = 0; i < len; i++) { Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(typeName, assembly)); RuntimeAssembly ret = GetRuntimeAssembly(asm); if (ret != null) return ret; } return null; } // This method is called by the VM. private RuntimeAssembly OnAssemblyResolveEvent(RuntimeAssembly assembly, String assemblyFullName) { ResolveEventHandler eventHandler = _AssemblyResolve; if (eventHandler == null) { return null; } Delegate[] ds = eventHandler.GetInvocationList(); int len = ds.Length; for (int i = 0; i < len; i++) { Assembly asm = ((ResolveEventHandler)ds[i])(this, new ResolveEventArgs(assemblyFullName, assembly)); RuntimeAssembly ret = GetRuntimeAssembly(asm); if (ret != null) return ret; } return null; } #if FEATURE_COMINTEROP // Called by VM - code:CLRPrivTypeCacheWinRT::RaiseDesignerNamespaceResolveEvent private string[] OnDesignerNamespaceResolveEvent(string namespaceName) { return System.Runtime.InteropServices.WindowsRuntime.WindowsRuntimeMetadata.OnDesignerNamespaceResolveEvent(this, namespaceName); } #endif // FEATURE_COMINTEROP internal AppDomainSetup FusionStore { get { Debug.Assert(_FusionStore != null, "Fusion store has not been correctly setup in this domain"); return _FusionStore; } } internal static RuntimeAssembly GetRuntimeAssembly(Assembly asm) { if (asm == null) return null; RuntimeAssembly rtAssembly = asm as RuntimeAssembly; if (rtAssembly != null) return rtAssembly; AssemblyBuilder ab = asm as AssemblyBuilder; if (ab != null) return ab.InternalAssembly; return null; } private Dictionary<String, Object> LocalStore { get { if (_LocalStore != null) return _LocalStore; else { _LocalStore = new Dictionary<String, Object>(); return _LocalStore; } } } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void nSetNativeDllSearchDirectories(string paths); private void SetupFusionStore(AppDomainSetup info, AppDomainSetup oldInfo) { Contract.Requires(info != null); if (info.ApplicationBase == null) { info.SetupDefaults(RuntimeEnvironment.GetModuleFileName(), imageLocationAlreadyNormalized: true); } nCreateContext(); // This must be the last action taken _FusionStore = info; } // Used to switch into other AppDomain and call SetupRemoteDomain. // We cannot simply call through the proxy, because if there // are any remoting sinks registered, they can add non-mscorlib // objects to the message (causing an assembly load exception when // we try to deserialize it on the other side) private static object PrepareDataForSetup(String friendlyName, AppDomainSetup setup, string[] propertyNames, string[] propertyValues) { AppDomainSetup newSetup = new AppDomainSetup(setup, false); // Remove the special AppDomainCompatSwitch entries from the set of name value pairs // And add them to the AppDomainSetup // // This is only supported on CoreCLR through ICLRRuntimeHost2.CreateAppDomainWithManager // Desktop code should use System.AppDomain.CreateDomain() or // System.AppDomainManager.CreateDomain() and add the flags to the AppDomainSetup List<String> compatList = new List<String>(); if (propertyNames != null && propertyValues != null) { for (int i = 0; i < propertyNames.Length; i++) { if (String.Compare(propertyNames[i], "AppDomainCompatSwitch", StringComparison.OrdinalIgnoreCase) == 0) { compatList.Add(propertyValues[i]); propertyNames[i] = null; propertyValues[i] = null; } } if (compatList.Count > 0) { newSetup.SetCompatibilitySwitches(compatList); } } return new Object[] { friendlyName, newSetup, propertyNames, propertyValues }; } // PrepareDataForSetup private static Object Setup(Object arg) { Contract.Requires(arg != null && arg is Object[]); Contract.Requires(((Object[])arg).Length >= 8); Object[] args = (Object[])arg; String friendlyName = (String)args[0]; AppDomainSetup setup = (AppDomainSetup)args[1]; string[] propertyNames = (string[])args[2]; // can contain null elements string[] propertyValues = (string[])args[3]; // can contain null elements AppDomain ad = AppDomain.CurrentDomain; AppDomainSetup newSetup = new AppDomainSetup(setup, false); if (propertyNames != null && propertyValues != null) { for (int i = 0; i < propertyNames.Length; i++) { // We want to set native dll probing directories before any P/Invokes have a // chance to fire. The Path class, for one, has P/Invokes. if (propertyNames[i] == "NATIVE_DLL_SEARCH_DIRECTORIES") { if (propertyValues[i] == null) throw new ArgumentNullException("NATIVE_DLL_SEARCH_DIRECTORIES"); string paths = propertyValues[i]; if (paths.Length == 0) break; nSetNativeDllSearchDirectories(paths); } } for (int i = 0; i < propertyNames.Length; i++) { if (propertyNames[i] == "APPBASE") // make sure in sync with Fusion { if (propertyValues[i] == null) throw new ArgumentNullException("APPBASE"); if (PathInternal.IsPartiallyQualified(propertyValues[i])) throw new ArgumentException(SR.Argument_AbsolutePathRequired); newSetup.ApplicationBase = NormalizePath(propertyValues[i], fullCheck: true); } else if (propertyNames[i] == "TRUSTED_PLATFORM_ASSEMBLIES" || propertyNames[i] == "PLATFORM_RESOURCE_ROOTS" || propertyNames[i] == "APP_PATHS" || propertyNames[i] == "APP_NI_PATHS") { string values = propertyValues[i]; if (values == null) throw new ArgumentNullException(propertyNames[i]); ad.SetData(propertyNames[i], NormalizeAppPaths(values)); } else if (propertyNames[i] != null) { ad.SetData(propertyNames[i], propertyValues[i]); // just propagate } } } ad.SetupFusionStore(newSetup, null); // makes FusionStore a ref to newSetup // technically, we don't need this, newSetup refers to the same object as FusionStore // but it's confusing since it isn't immediately obvious whether we have a ref or a copy AppDomainSetup adSetup = ad.FusionStore; // set up the friendly name ad.nSetupFriendlyName(friendlyName); ad.CreateAppDomainManager(); // could modify FusionStore's object return null; } private static string NormalizeAppPaths(string values) { int estimatedLength = values.Length + 1; // +1 for extra separator temporarily added at end StringBuilder sb = StringBuilderCache.Acquire(estimatedLength); for (int pos = 0; pos < values.Length; pos++) { string path; int nextPos = values.IndexOf(Path.PathSeparator, pos); if (nextPos == -1) { path = values.Substring(pos); pos = values.Length - 1; } else { path = values.Substring(pos, nextPos - pos); pos = nextPos; } // Skip empty directories if (path.Length == 0) continue; if (PathInternal.IsPartiallyQualified(path)) throw new ArgumentException(SR.Argument_AbsolutePathRequired); string appPath = NormalizePath(path, fullCheck: true); sb.Append(appPath); sb.Append(Path.PathSeparator); } // Strip the last separator if (sb.Length > 0) { sb.Remove(sb.Length - 1, 1); } return StringBuilderCache.GetStringAndRelease(sb); } internal static string NormalizePath(string path, bool fullCheck) { return Path.GetFullPath(path); } // This routine is called from unmanaged code to // set the default fusion context. private void SetupDomain(bool allowRedirects, String path, String configFile, String[] propertyNames, String[] propertyValues) { // It is possible that we could have multiple threads initializing // the default domain. We will just take the winner of these two. // (eg. one thread doing a com call and another doing attach for IJW) lock (this) { if (_FusionStore == null) { AppDomainSetup setup = new AppDomainSetup(); // always use internet permission set SetupFusionStore(setup, null); } } } [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void nSetupFriendlyName(string friendlyName); public AppDomainSetup SetupInformation { get { return new AppDomainSetup(FusionStore, true); } } [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern String IsStringInterned(String str); [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern String GetOrInternString(String str); public bool IsFullyTrusted { get { return true; } } public Int32 Id { get { return GetId(); } } [MethodImplAttribute(MethodImplOptions.InternalCall)] internal extern Int32 GetId(); } /// <summary> /// Handle used to marshal an AppDomain to the VM (eg QCall). When marshaled via a QCall, the target /// method in the VM will recieve a QCall::AppDomainHandle parameter. /// </summary> internal struct AppDomainHandle { private IntPtr m_appDomainHandle; // Note: generall an AppDomainHandle should not be directly constructed, instead the // code:System.AppDomain.GetNativeHandle method should be called to get the handle for a specific // AppDomain. internal AppDomainHandle(IntPtr domainHandle) { m_appDomainHandle = domainHandle; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: This class will encapsulate a byte and provide an ** Object representation of it. ** ** ===========================================================*/ using System.Diagnostics.Contracts; using System.Globalization; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Versioning; namespace System { [Serializable] [StructLayout(LayoutKind.Sequential)] [TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public struct Byte : IComparable, IConvertible, IFormattable, IComparable<Byte>, IEquatable<Byte> { private byte m_value; // Do not rename (binary serialization) // The maximum value that a Byte may represent: 255. public const byte MaxValue = (byte)0xFF; // The minimum value that a Byte may represent: 0. public const byte MinValue = 0; // Compares this object to another object, returning an integer that // indicates the relationship. // Returns a value less than zero if this object // null is considered to be less than any instance. // If object is not of type byte, this method throws an ArgumentException. // public int CompareTo(Object value) { if (value == null) { return 1; } if (!(value is Byte)) { throw new ArgumentException(SR.Arg_MustBeByte); } return m_value - (((Byte)value).m_value); } public int CompareTo(Byte value) { return m_value - value; } // Determines whether two Byte objects are equal. public override bool Equals(Object obj) { if (!(obj is Byte)) { return false; } return m_value == ((Byte)obj).m_value; } [NonVersionable] public bool Equals(Byte obj) { return m_value == obj; } // Gets a hash code for this instance. public override int GetHashCode() { return m_value; } [Pure] public static byte Parse(String s) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse(s.AsReadOnlySpan(), NumberStyles.Integer, NumberFormatInfo.CurrentInfo); } [Pure] public static byte Parse(String s, NumberStyles style) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse(s.AsReadOnlySpan(), style, NumberFormatInfo.CurrentInfo); } [Pure] public static byte Parse(String s, IFormatProvider provider) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse(s.AsReadOnlySpan(), NumberStyles.Integer, NumberFormatInfo.GetInstance(provider)); } // Parses an unsigned byte from a String in the given style. If // a NumberFormatInfo isn't specified, the current culture's // NumberFormatInfo is assumed. [Pure] public static byte Parse(String s, NumberStyles style, IFormatProvider provider) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Parse(s.AsReadOnlySpan(), style, NumberFormatInfo.GetInstance(provider)); } public static byte Parse(ReadOnlySpan<char> s, NumberStyles style = NumberStyles.Integer, IFormatProvider provider = null) { NumberFormatInfo.ValidateParseStyleInteger(style); return Parse(s, style, NumberFormatInfo.GetInstance(provider)); } private static byte Parse(ReadOnlySpan<char> s, NumberStyles style, NumberFormatInfo info) { int i = 0; try { i = Number.ParseInt32(s, style, info); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Byte, e); } if (i < MinValue || i > MaxValue) throw new OverflowException(SR.Overflow_Byte); return (byte)i; } public static bool TryParse(String s, out Byte result) { if (s == null) { result = 0; return false; } return TryParse(s.AsReadOnlySpan(), NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result); } public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out Byte result) { NumberFormatInfo.ValidateParseStyleInteger(style); if (s == null) { result = 0; return false; } return TryParse(s.AsReadOnlySpan(), style, NumberFormatInfo.GetInstance(provider), out result); } public static bool TryParse(ReadOnlySpan<char> s, out byte result, NumberStyles style = NumberStyles.Integer, IFormatProvider provider = null) { NumberFormatInfo.ValidateParseStyleInteger(style); return TryParse(s, style, NumberFormatInfo.GetInstance(provider), out result); } private static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, NumberFormatInfo info, out Byte result) { result = 0; int i; if (!Number.TryParseInt32(s, style, info, out i)) { return false; } if (i < MinValue || i > MaxValue) { return false; } result = (byte)i; return true; } [Pure] public override String ToString() { return Number.FormatInt32(m_value, null, NumberFormatInfo.CurrentInfo); } [Pure] public String ToString(String format) { return Number.FormatInt32(m_value, format, NumberFormatInfo.CurrentInfo); } [Pure] public String ToString(IFormatProvider provider) { return Number.FormatInt32(m_value, null, NumberFormatInfo.GetInstance(provider)); } [Pure] public String ToString(String format, IFormatProvider provider) { return Number.FormatInt32(m_value, format, NumberFormatInfo.GetInstance(provider)); } // // IConvertible implementation // [Pure] public TypeCode GetTypeCode() { return TypeCode.Byte; } bool IConvertible.ToBoolean(IFormatProvider provider) { return Convert.ToBoolean(m_value); } char IConvertible.ToChar(IFormatProvider provider) { return Convert.ToChar(m_value); } sbyte IConvertible.ToSByte(IFormatProvider provider) { return Convert.ToSByte(m_value); } byte IConvertible.ToByte(IFormatProvider provider) { return m_value; } short IConvertible.ToInt16(IFormatProvider provider) { return Convert.ToInt16(m_value); } ushort IConvertible.ToUInt16(IFormatProvider provider) { return Convert.ToUInt16(m_value); } int IConvertible.ToInt32(IFormatProvider provider) { return Convert.ToInt32(m_value); } uint IConvertible.ToUInt32(IFormatProvider provider) { return Convert.ToUInt32(m_value); } long IConvertible.ToInt64(IFormatProvider provider) { return Convert.ToInt64(m_value); } ulong IConvertible.ToUInt64(IFormatProvider provider) { return Convert.ToUInt64(m_value); } float IConvertible.ToSingle(IFormatProvider provider) { return Convert.ToSingle(m_value); } double IConvertible.ToDouble(IFormatProvider provider) { return Convert.ToDouble(m_value); } Decimal IConvertible.ToDecimal(IFormatProvider provider) { return Convert.ToDecimal(m_value); } DateTime IConvertible.ToDateTime(IFormatProvider provider) { throw new InvalidCastException(SR.Format(SR.InvalidCast_FromTo, "Byte", "DateTime")); } Object IConvertible.ToType(Type type, IFormatProvider provider) { return Convert.DefaultToType((IConvertible)this, type, provider); } } }
using System; using Csla; using ParentLoad.DataAccess; using ParentLoad.DataAccess.ERLevel; namespace ParentLoad.Business.ERLevel { /// <summary> /// A11_City_Child (editable child object).<br/> /// This is a generated base class of <see cref="A11_City_Child"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="A10_City"/> collection. /// </remarks> [Serializable] public partial class A11_City_Child : BusinessBase<A11_City_Child> { #region State Fields [NotUndoable] [NonSerialized] internal int city_ID1 = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="City_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> City_Child_NameProperty = RegisterProperty<string>(p => p.City_Child_Name, "City Child Name"); /// <summary> /// Gets or sets the City Child Name. /// </summary> /// <value>The City Child Name.</value> public string City_Child_Name { get { return GetProperty(City_Child_NameProperty); } set { SetProperty(City_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="A11_City_Child"/> object. /// </summary> /// <returns>A reference to the created <see cref="A11_City_Child"/> object.</returns> internal static A11_City_Child NewA11_City_Child() { return DataPortal.CreateChild<A11_City_Child>(); } /// <summary> /// Factory method. Loads a <see cref="A11_City_Child"/> object from the given A11_City_ChildDto. /// </summary> /// <param name="data">The <see cref="A11_City_ChildDto"/>.</param> /// <returns>A reference to the fetched <see cref="A11_City_Child"/> object.</returns> internal static A11_City_Child GetA11_City_Child(A11_City_ChildDto data) { A11_City_Child obj = new A11_City_Child(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(data); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="A11_City_Child"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public A11_City_Child() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="A11_City_Child"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="A11_City_Child"/> object from the given <see cref="A11_City_ChildDto"/>. /// </summary> /// <param name="data">The A11_City_ChildDto to use.</param> private void Fetch(A11_City_ChildDto data) { // Value properties LoadProperty(City_Child_NameProperty, data.City_Child_Name); // parent properties city_ID1 = data.Parent_City_ID; var args = new DataPortalHookArgs(data); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="A11_City_Child"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(A10_City parent) { var dto = new A11_City_ChildDto(); dto.Parent_City_ID = parent.City_ID; dto.City_Child_Name = City_Child_Name; using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IA11_City_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="A11_City_Child"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(A10_City parent) { if (!IsDirty) return; var dto = new A11_City_ChildDto(); dto.Parent_City_ID = parent.City_ID; dto.City_Child_Name = City_Child_Name; using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IA11_City_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="A11_City_Child"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(A10_City parent) { using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IA11_City_ChildDal>(); using (BypassPropertyChecks) { dal.Delete(parent.City_ID); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
/* * Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license. See full license at the bottom of this file. */ using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.Tokens; using Microsoft.SharePoint.Client; using System; using System.Net; using System.Security.Principal; using System.Web; using System.Web.Configuration; namespace Core.ODataBatchWeb { /// <summary> /// Encapsulates all the information from SharePoint. /// </summary> public abstract class SharePointContext { public const string SPHostUrlKey = "SPHostUrl"; public const string SPAppWebUrlKey = "SPAppWebUrl"; public const string SPLanguageKey = "SPLanguage"; public const string SPClientTagKey = "SPClientTag"; public const string SPProductNumberKey = "SPProductNumber"; protected static readonly TimeSpan AccessTokenLifetimeTolerance = TimeSpan.FromMinutes(5.0); private readonly Uri spHostUrl; private readonly Uri spAppWebUrl; private readonly string spLanguage; private readonly string spClientTag; private readonly string spProductNumber; // <AccessTokenString, UtcExpiresOn> protected Tuple<string, DateTime> userAccessTokenForSPHost; protected Tuple<string, DateTime> userAccessTokenForSPAppWeb; protected Tuple<string, DateTime> appOnlyAccessTokenForSPHost; protected Tuple<string, DateTime> appOnlyAccessTokenForSPAppWeb; /// <summary> /// Gets the SharePoint host url from QueryString of the specified HTTP request. /// </summary> /// <param name="httpRequest">The specified HTTP request.</param> /// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns> public static Uri GetSPHostUrl(HttpRequestBase httpRequest) { if (httpRequest == null) { throw new ArgumentNullException("httpRequest"); } string spHostUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SPHostUrlKey]); Uri spHostUrl; if (Uri.TryCreate(spHostUrlString, UriKind.Absolute, out spHostUrl) && (spHostUrl.Scheme == Uri.UriSchemeHttp || spHostUrl.Scheme == Uri.UriSchemeHttps)) { return spHostUrl; } return null; } /// <summary> /// Gets the SharePoint host url from QueryString of the specified HTTP request. /// </summary> /// <param name="httpRequest">The specified HTTP request.</param> /// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns> public static Uri GetSPHostUrl(HttpRequest httpRequest) { return GetSPHostUrl(new HttpRequestWrapper(httpRequest)); } /// <summary> /// The SharePoint host url. /// </summary> public Uri SPHostUrl { get { return this.spHostUrl; } } /// <summary> /// The SharePoint app web url. /// </summary> public Uri SPAppWebUrl { get { return this.spAppWebUrl; } } /// <summary> /// The SharePoint language. /// </summary> public string SPLanguage { get { return this.spLanguage; } } /// <summary> /// The SharePoint client tag. /// </summary> public string SPClientTag { get { return this.spClientTag; } } /// <summary> /// The SharePoint product number. /// </summary> public string SPProductNumber { get { return this.spProductNumber; } } /// <summary> /// The user access token for the SharePoint host. /// </summary> public abstract string UserAccessTokenForSPHost { get; } /// <summary> /// The user access token for the SharePoint app web. /// </summary> public abstract string UserAccessTokenForSPAppWeb { get; } /// <summary> /// The app only access token for the SharePoint host. /// </summary> public abstract string AppOnlyAccessTokenForSPHost { get; } /// <summary> /// The app only access token for the SharePoint app web. /// </summary> public abstract string AppOnlyAccessTokenForSPAppWeb { get; } /// <summary> /// Constructor. /// </summary> /// <param name="spHostUrl">The SharePoint host url.</param> /// <param name="spAppWebUrl">The SharePoint app web url.</param> /// <param name="spLanguage">The SharePoint language.</param> /// <param name="spClientTag">The SharePoint client tag.</param> /// <param name="spProductNumber">The SharePoint product number.</param> protected SharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber) { if (spHostUrl == null) { throw new ArgumentNullException("spHostUrl"); } if (string.IsNullOrEmpty(spLanguage)) { throw new ArgumentNullException("spLanguage"); } if (string.IsNullOrEmpty(spClientTag)) { throw new ArgumentNullException("spClientTag"); } if (string.IsNullOrEmpty(spProductNumber)) { throw new ArgumentNullException("spProductNumber"); } this.spHostUrl = spHostUrl; this.spAppWebUrl = spAppWebUrl; this.spLanguage = spLanguage; this.spClientTag = spClientTag; this.spProductNumber = spProductNumber; } /// <summary> /// Creates a user ClientContext for the SharePoint host. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateUserClientContextForSPHost() { return CreateClientContext(this.SPHostUrl, this.UserAccessTokenForSPHost); } /// <summary> /// Creates a user ClientContext for the SharePoint app web. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateUserClientContextForSPAppWeb() { return CreateClientContext(this.SPAppWebUrl, this.UserAccessTokenForSPAppWeb); } /// <summary> /// Creates app only ClientContext for the SharePoint host. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateAppOnlyClientContextForSPHost() { return CreateClientContext(this.SPHostUrl, this.AppOnlyAccessTokenForSPHost); } /// <summary> /// Creates an app only ClientContext for the SharePoint app web. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateAppOnlyClientContextForSPAppWeb() { return CreateClientContext(this.SPAppWebUrl, this.AppOnlyAccessTokenForSPAppWeb); } /// <summary> /// Gets the database connection string from SharePoint for autohosted app. /// </summary> /// <returns>The database connection string. Returns <c>null</c> if the app is not autohosted or there is no database.</returns> public string GetDatabaseConnectionString() { string dbConnectionString = null; using (ClientContext clientContext = CreateAppOnlyClientContextForSPHost()) { if (clientContext != null) { var result = AppInstance.RetrieveAppDatabaseConnectionString(clientContext); clientContext.ExecuteQuery(); dbConnectionString = result.Value; } } if (dbConnectionString == null) { const string LocalDBInstanceForDebuggingKey = "LocalDBInstanceForDebugging"; var dbConnectionStringSettings = WebConfigurationManager.ConnectionStrings[LocalDBInstanceForDebuggingKey]; dbConnectionString = dbConnectionStringSettings != null ? dbConnectionStringSettings.ConnectionString : null; } return dbConnectionString; } /// <summary> /// Determines if the specified access token is valid. /// It considers an access token as not valid if it is null, or it has expired. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <returns>True if the access token is valid.</returns> protected static bool IsAccessTokenValid(Tuple<string, DateTime> accessToken) { return accessToken != null && !string.IsNullOrEmpty(accessToken.Item1) && accessToken.Item2 > DateTime.UtcNow; } /// <summary> /// Creates a ClientContext with the specified SharePoint site url and the access token. /// </summary> /// <param name="spSiteUrl">The site url.</param> /// <param name="accessToken">The access token.</param> /// <returns>A ClientContext instance.</returns> private static ClientContext CreateClientContext(Uri spSiteUrl, string accessToken) { if (spSiteUrl != null && !string.IsNullOrEmpty(accessToken)) { return TokenHelper.GetClientContextWithAccessToken(spSiteUrl.AbsoluteUri, accessToken); } return null; } } /// <summary> /// Redirection status. /// </summary> public enum RedirectionStatus { Ok, ShouldRedirect, CanNotRedirect } /// <summary> /// Provides SharePointContext instances. /// </summary> public abstract class SharePointContextProvider { private static SharePointContextProvider current; /// <summary> /// The current SharePointContextProvider instance. /// </summary> public static SharePointContextProvider Current { get { return SharePointContextProvider.current; } } /// <summary> /// Initializes the default SharePointContextProvider instance. /// </summary> static SharePointContextProvider() { if (!TokenHelper.IsHighTrustApp()) { SharePointContextProvider.current = new SharePointAcsContextProvider(); } else { SharePointContextProvider.current = new SharePointHighTrustContextProvider(); } } /// <summary> /// Registers the specified SharePointContextProvider instance as current. /// It should be called by Application_Start() in Global.asax. /// </summary> /// <param name="provider">The SharePointContextProvider to be set as current.</param> public static void Register(SharePointContextProvider provider) { if (provider == null) { throw new ArgumentNullException("provider"); } SharePointContextProvider.current = provider; } /// <summary> /// Checks if it is necessary to redirect to SharePoint for user to authenticate. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param> /// <returns>Redirection status.</returns> public static RedirectionStatus CheckRedirectionStatus(HttpContextBase httpContext, out Uri redirectUrl) { if (httpContext == null) { throw new ArgumentNullException("httpContext"); } redirectUrl = null; if (SharePointContextProvider.Current.GetSharePointContext(httpContext) != null) { return RedirectionStatus.Ok; } const string SPHasRedirectedToSharePointKey = "SPHasRedirectedToSharePoint"; if (!string.IsNullOrEmpty(httpContext.Request.QueryString[SPHasRedirectedToSharePointKey])) { return RedirectionStatus.CanNotRedirect; } Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); if (spHostUrl == null) { return RedirectionStatus.CanNotRedirect; } if (StringComparer.OrdinalIgnoreCase.Equals(httpContext.Request.HttpMethod, "POST")) { return RedirectionStatus.CanNotRedirect; } Uri requestUrl = httpContext.Request.Url; var queryNameValueCollection = HttpUtility.ParseQueryString(requestUrl.Query); // Removes the values that are included in {StandardTokens}, as {StandardTokens} will be inserted at the beginning of the query string. queryNameValueCollection.Remove(SharePointContext.SPHostUrlKey); queryNameValueCollection.Remove(SharePointContext.SPAppWebUrlKey); queryNameValueCollection.Remove(SharePointContext.SPLanguageKey); queryNameValueCollection.Remove(SharePointContext.SPClientTagKey); queryNameValueCollection.Remove(SharePointContext.SPProductNumberKey); // Adds SPHasRedirectedToSharePoint=1. queryNameValueCollection.Add(SPHasRedirectedToSharePointKey, "1"); UriBuilder returnUrlBuilder = new UriBuilder(requestUrl); returnUrlBuilder.Query = queryNameValueCollection.ToString(); // Inserts StandardTokens. const string StandardTokens = "{StandardTokens}"; string returnUrlString = returnUrlBuilder.Uri.AbsoluteUri; returnUrlString = returnUrlString.Insert(returnUrlString.IndexOf("?") + 1, StandardTokens + "&"); // Constructs redirect url. string redirectUrlString = TokenHelper.GetAppContextTokenRequestUrl(spHostUrl.AbsoluteUri, Uri.EscapeDataString(returnUrlString)); redirectUrl = new Uri(redirectUrlString, UriKind.Absolute); return RedirectionStatus.ShouldRedirect; } /// <summary> /// Checks if it is necessary to redirect to SharePoint for user to authenticate. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param> /// <returns>Redirection status.</returns> public static RedirectionStatus CheckRedirectionStatus(HttpContext httpContext, out Uri redirectUrl) { return CheckRedirectionStatus(new HttpContextWrapper(httpContext), out redirectUrl); } /// <summary> /// Creates a SharePointContext instance with the specified HTTP request. /// </summary> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> public SharePointContext CreateSharePointContext(HttpRequestBase httpRequest) { if (httpRequest == null) { throw new ArgumentNullException("httpRequest"); } // SPHostUrl Uri spHostUrl = SharePointContext.GetSPHostUrl(httpRequest); if (spHostUrl == null) { return null; } // SPAppWebUrl string spAppWebUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SharePointContext.SPAppWebUrlKey]); Uri spAppWebUrl; if (!Uri.TryCreate(spAppWebUrlString, UriKind.Absolute, out spAppWebUrl) || !(spAppWebUrl.Scheme == Uri.UriSchemeHttp || spAppWebUrl.Scheme == Uri.UriSchemeHttps)) { spAppWebUrl = null; } // SPLanguage string spLanguage = httpRequest.QueryString[SharePointContext.SPLanguageKey]; if (string.IsNullOrEmpty(spLanguage)) { return null; } // SPClientTag string spClientTag = httpRequest.QueryString[SharePointContext.SPClientTagKey]; if (string.IsNullOrEmpty(spClientTag)) { return null; } // SPProductNumber string spProductNumber = httpRequest.QueryString[SharePointContext.SPProductNumberKey]; if (string.IsNullOrEmpty(spProductNumber)) { return null; } return CreateSharePointContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, httpRequest); } /// <summary> /// Creates a SharePointContext instance with the specified HTTP request. /// </summary> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> public SharePointContext CreateSharePointContext(HttpRequest httpRequest) { return CreateSharePointContext(new HttpRequestWrapper(httpRequest)); } /// <summary> /// Gets a SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns> public SharePointContext GetSharePointContext(HttpContextBase httpContext) { if (httpContext == null) { throw new ArgumentNullException("httpContext"); } Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); if (spHostUrl == null) { return null; } SharePointContext spContext = LoadSharePointContext(httpContext); if (spContext == null || !ValidateSharePointContext(spContext, httpContext)) { spContext = CreateSharePointContext(httpContext.Request); if (spContext != null) { SaveSharePointContext(spContext, httpContext); } } return spContext; } /// <summary> /// Gets a SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns> public SharePointContext GetSharePointContext(HttpContext httpContext) { return GetSharePointContext(new HttpContextWrapper(httpContext)); } /// <summary> /// Creates a SharePointContext instance. /// </summary> /// <param name="spHostUrl">The SharePoint host url.</param> /// <param name="spAppWebUrl">The SharePoint app web url.</param> /// <param name="spLanguage">The SharePoint language.</param> /// <param name="spClientTag">The SharePoint client tag.</param> /// <param name="spProductNumber">The SharePoint product number.</param> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> protected abstract SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest); /// <summary> /// Validates if the given SharePointContext can be used with the specified HTTP context. /// </summary> /// <param name="spContext">The SharePointContext.</param> /// <param name="httpContext">The HTTP context.</param> /// <returns>True if the given SharePointContext can be used with the specified HTTP context.</returns> protected abstract bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext); /// <summary> /// Loads the SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found.</returns> protected abstract SharePointContext LoadSharePointContext(HttpContextBase httpContext); /// <summary> /// Saves the specified SharePointContext instance associated with the specified HTTP context. /// <c>null</c> is accepted for clearing the SharePointContext instance associated with the HTTP context. /// </summary> /// <param name="spContext">The SharePointContext instance to be saved, or <c>null</c>.</param> /// <param name="httpContext">The HTTP context.</param> protected abstract void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext); } #region ACS /// <summary> /// Encapsulates all the information from SharePoint in ACS mode. /// </summary> public class SharePointAcsContext : SharePointContext { private readonly string contextToken; private readonly SharePointContextToken contextTokenObj; /// <summary> /// The context token. /// </summary> public string ContextToken { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextToken : null; } } /// <summary> /// The context token's "CacheKey" claim. /// </summary> public string CacheKey { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.CacheKey : null; } } /// <summary> /// The context token's "refreshtoken" claim. /// </summary> public string RefreshToken { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.RefreshToken : null; } } public override string UserAccessTokenForSPHost { get { return GetAccessTokenString(ref this.userAccessTokenForSPHost, () => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPHostUrl.Authority)); } } public override string UserAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb, () => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPAppWebUrl.Authority)); } } public override string AppOnlyAccessTokenForSPHost { get { return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost, () => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPHostUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPHostUrl))); } } public override string AppOnlyAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb, () => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPAppWebUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPAppWebUrl))); } } public SharePointAcsContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, string contextToken, SharePointContextToken contextTokenObj) : base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber) { if (string.IsNullOrEmpty(contextToken)) { throw new ArgumentNullException("contextToken"); } if (contextTokenObj == null) { throw new ArgumentNullException("contextTokenObj"); } this.contextToken = contextToken; this.contextTokenObj = contextTokenObj; } /// <summary> /// Ensures the access token is valid and returns it. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> /// <returns>The access token string.</returns> private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler) { RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler); return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null; } /// <summary> /// Renews the access token if it is not valid. /// </summary> /// <param name="accessToken">The access token to renew.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler) { if (IsAccessTokenValid(accessToken)) { return; } try { OAuth2AccessTokenResponse oAuth2AccessTokenResponse = tokenRenewalHandler(); DateTime expiresOn = oAuth2AccessTokenResponse.ExpiresOn; if ((expiresOn - oAuth2AccessTokenResponse.NotBefore) > AccessTokenLifetimeTolerance) { // Make the access token get renewed a bit earlier than the time when it expires // so that the calls to SharePoint with it will have enough time to complete successfully. expiresOn -= AccessTokenLifetimeTolerance; } accessToken = Tuple.Create(oAuth2AccessTokenResponse.AccessToken, expiresOn); } catch (WebException) { } } } /// <summary> /// Default provider for SharePointAcsContext. /// </summary> public class SharePointAcsContextProvider : SharePointContextProvider { private const string SPContextKey = "SPContext"; private const string SPCacheKeyKey = "SPCacheKey"; protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest) { string contextTokenString = TokenHelper.GetContextTokenFromRequest(httpRequest); if (string.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = null; try { contextToken = TokenHelper.ReadAndValidateContextToken(contextTokenString, httpRequest.Url.Authority); } catch (WebException) { return null; } catch (AudienceUriValidationFailedException) { return null; } return new SharePointAcsContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, contextTokenString, contextToken); } protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointAcsContext spAcsContext = spContext as SharePointAcsContext; if (spAcsContext != null) { Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); string contextToken = TokenHelper.GetContextTokenFromRequest(httpContext.Request); HttpCookie spCacheKeyCookie = httpContext.Request.Cookies[SPCacheKeyKey]; string spCacheKey = spCacheKeyCookie != null ? spCacheKeyCookie.Value : null; return spHostUrl == spAcsContext.SPHostUrl && !string.IsNullOrEmpty(spAcsContext.CacheKey) && spCacheKey == spAcsContext.CacheKey && !string.IsNullOrEmpty(spAcsContext.ContextToken) && (string.IsNullOrEmpty(contextToken) || contextToken == spAcsContext.ContextToken); } return false; } protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext) { return httpContext.Session[SPContextKey] as SharePointAcsContext; } protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointAcsContext spAcsContext = spContext as SharePointAcsContext; if (spAcsContext != null) { HttpCookie spCacheKeyCookie = new HttpCookie(SPCacheKeyKey) { Value = spAcsContext.CacheKey, Secure = true, HttpOnly = true }; httpContext.Response.AppendCookie(spCacheKeyCookie); } httpContext.Session[SPContextKey] = spAcsContext; } } #endregion ACS #region HighTrust /// <summary> /// Encapsulates all the information from SharePoint in HighTrust mode. /// </summary> public class SharePointHighTrustContext : SharePointContext { private readonly WindowsIdentity logonUserIdentity; /// <summary> /// The Windows identity for the current user. /// </summary> public WindowsIdentity LogonUserIdentity { get { return this.logonUserIdentity; } } public override string UserAccessTokenForSPHost { get { return GetAccessTokenString(ref this.userAccessTokenForSPHost, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, this.LogonUserIdentity)); } } public override string UserAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, this.LogonUserIdentity)); } } public override string AppOnlyAccessTokenForSPHost { get { return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, null)); } } public override string AppOnlyAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, null)); } } public SharePointHighTrustContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, WindowsIdentity logonUserIdentity) : base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber) { if (logonUserIdentity == null) { throw new ArgumentNullException("logonUserIdentity"); } this.logonUserIdentity = logonUserIdentity; } /// <summary> /// Ensures the access token is valid and returns it. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> /// <returns>The access token string.</returns> private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler) { RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler); return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null; } /// <summary> /// Renews the access token if it is not valid. /// </summary> /// <param name="accessToken">The access token to renew.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler) { if (IsAccessTokenValid(accessToken)) { return; } DateTime expiresOn = DateTime.UtcNow.Add(TokenHelper.HighTrustAccessTokenLifetime); if (TokenHelper.HighTrustAccessTokenLifetime > AccessTokenLifetimeTolerance) { // Make the access token get renewed a bit earlier than the time when it expires // so that the calls to SharePoint with it will have enough time to complete successfully. expiresOn -= AccessTokenLifetimeTolerance; } accessToken = Tuple.Create(tokenRenewalHandler(), expiresOn); } } /// <summary> /// Default provider for SharePointHighTrustContext. /// </summary> public class SharePointHighTrustContextProvider : SharePointContextProvider { private const string SPContextKey = "SPContext"; protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest) { WindowsIdentity logonUserIdentity = httpRequest.LogonUserIdentity; if (logonUserIdentity == null || !logonUserIdentity.IsAuthenticated || logonUserIdentity.IsGuest || logonUserIdentity.User == null) { return null; } return new SharePointHighTrustContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, logonUserIdentity); } protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointHighTrustContext spHighTrustContext = spContext as SharePointHighTrustContext; if (spHighTrustContext != null) { Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); WindowsIdentity logonUserIdentity = httpContext.Request.LogonUserIdentity; return spHostUrl == spHighTrustContext.SPHostUrl && logonUserIdentity != null && logonUserIdentity.IsAuthenticated && !logonUserIdentity.IsGuest && logonUserIdentity.User == spHighTrustContext.LogonUserIdentity.User; } return false; } protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext) { return httpContext.Session[SPContextKey] as SharePointHighTrustContext; } protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { httpContext.Session[SPContextKey] = spContext as SharePointHighTrustContext; } } #endregion HighTrust }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ''AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Drawing; using System.IO; using System.Reflection; using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using RegionFlags = OpenSim.Framework.RegionFlags; namespace OpenSim.Data.MSSQL { /// <summary> /// A MSSQL Interface for the Region Server. /// </summary> public class MSSQLRegionData : IRegionData { private string m_Realm; private List<string> m_ColumnNames = null; private string m_ConnectionString; private MSSQLManager m_database; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); public MSSQLRegionData(string connectionString, string realm) { m_Realm = realm; m_ConnectionString = connectionString; m_database = new MSSQLManager(connectionString); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "GridStore"); m.Update(); } } public List<RegionData> Get(string regionName, UUID scopeID) { string sql = "select * from ["+m_Realm+"] where regionName like @regionName"; if (scopeID != UUID.Zero) sql += " and ScopeID = @scopeID"; sql += " order by regionName"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@regionName", regionName)); cmd.Parameters.Add(m_database.CreateParameter("@scopeID", scopeID)); conn.Open(); return RunCommand(cmd); } } public RegionData Get(int posX, int posY, UUID scopeID) { string sql = "select * from ["+m_Realm+"] where locX = @posX and locY = @posY"; if (scopeID != UUID.Zero) sql += " and ScopeID = @scopeID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@posX", posX.ToString())); cmd.Parameters.Add(m_database.CreateParameter("@posY", posY.ToString())); cmd.Parameters.Add(m_database.CreateParameter("@scopeID", scopeID)); conn.Open(); List<RegionData> ret = RunCommand(cmd); if (ret.Count == 0) return null; return ret[0]; } } public RegionData Get(UUID regionID, UUID scopeID) { string sql = "select * from ["+m_Realm+"] where uuid = @regionID"; if (scopeID != UUID.Zero) sql += " and ScopeID = @scopeID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@regionID", regionID)); cmd.Parameters.Add(m_database.CreateParameter("@scopeID", scopeID)); conn.Open(); List<RegionData> ret = RunCommand(cmd); if (ret.Count == 0) return null; return ret[0]; } } public List<RegionData> Get(int startX, int startY, int endX, int endY, UUID scopeID) { string sql = "select * from ["+m_Realm+"] where locX between @startX and @endX and locY between @startY and @endY"; if (scopeID != UUID.Zero) sql += " and ScopeID = @scopeID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@startX", startX)); cmd.Parameters.Add(m_database.CreateParameter("@startY", startY)); cmd.Parameters.Add(m_database.CreateParameter("@endX", endX)); cmd.Parameters.Add(m_database.CreateParameter("@endY", endY)); cmd.Parameters.Add(m_database.CreateParameter("@scopeID", scopeID)); conn.Open(); return RunCommand(cmd); } } public List<RegionData> RunCommand(SqlCommand cmd) { List<RegionData> retList = new List<RegionData>(); SqlDataReader result = cmd.ExecuteReader(); while (result.Read()) { RegionData ret = new RegionData(); ret.Data = new Dictionary<string, object>(); UUID regionID; UUID.TryParse(result["uuid"].ToString(), out regionID); ret.RegionID = regionID; UUID scope; UUID.TryParse(result["ScopeID"].ToString(), out scope); ret.ScopeID = scope; ret.RegionName = result["regionName"].ToString(); ret.posX = Convert.ToInt32(result["locX"]); ret.posY = Convert.ToInt32(result["locY"]); ret.sizeX = Convert.ToInt32(result["sizeX"]); ret.sizeY = Convert.ToInt32(result["sizeY"]); if (m_ColumnNames == null) { m_ColumnNames = new List<string>(); DataTable schemaTable = result.GetSchemaTable(); foreach (DataRow row in schemaTable.Rows) m_ColumnNames.Add(row["ColumnName"].ToString()); } foreach (string s in m_ColumnNames) { if (s == "uuid") continue; if (s == "ScopeID") continue; if (s == "regionName") continue; if (s == "locX") continue; if (s == "locY") continue; ret.Data[s] = result[s].ToString(); } retList.Add(ret); } return retList; } public bool Store(RegionData data) { if (data.Data.ContainsKey("uuid")) data.Data.Remove("uuid"); if (data.Data.ContainsKey("ScopeID")) data.Data.Remove("ScopeID"); if (data.Data.ContainsKey("regionName")) data.Data.Remove("regionName"); if (data.Data.ContainsKey("posX")) data.Data.Remove("posX"); if (data.Data.ContainsKey("posY")) data.Data.Remove("posY"); if (data.Data.ContainsKey("sizeX")) data.Data.Remove("sizeX"); if (data.Data.ContainsKey("sizeY")) data.Data.Remove("sizeY"); if (data.Data.ContainsKey("locX")) data.Data.Remove("locX"); if (data.Data.ContainsKey("locY")) data.Data.Remove("locY"); string[] fields = new List<string>(data.Data.Keys).ToArray(); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand()) { string update = "update [" + m_Realm + "] set locX=@posX, locY=@posY, sizeX=@sizeX, sizeY=@sizeY "; foreach (string field in fields) { update += ", "; update += "[" + field + "] = @" + field; cmd.Parameters.Add(m_database.CreateParameter("@" + field, data.Data[field])); } update += " where uuid = @regionID"; if (data.ScopeID != UUID.Zero) update += " and ScopeID = @scopeID"; cmd.CommandText = update; cmd.Connection = conn; cmd.Parameters.Add(m_database.CreateParameter("@regionID", data.RegionID)); cmd.Parameters.Add(m_database.CreateParameter("@regionName", data.RegionName)); cmd.Parameters.Add(m_database.CreateParameter("@scopeID", data.ScopeID)); cmd.Parameters.Add(m_database.CreateParameter("@posX", data.posX)); cmd.Parameters.Add(m_database.CreateParameter("@posY", data.posY)); cmd.Parameters.Add(m_database.CreateParameter("@sizeX", data.sizeX)); cmd.Parameters.Add(m_database.CreateParameter("@sizeY", data.sizeY)); conn.Open(); try { if (cmd.ExecuteNonQuery() < 1) { string insert = "insert into [" + m_Realm + "] ([uuid], [ScopeID], [locX], [locY], [sizeX], [sizeY], [regionName], [" + String.Join("], [", fields) + "]) values (@regionID, @scopeID, @posX, @posY, @sizeX, @sizeY, @regionName, @" + String.Join(", @", fields) + ")"; cmd.CommandText = insert; try { if (cmd.ExecuteNonQuery() < 1) { return false; } } catch (Exception ex) { m_log.Warn("[MSSQL Grid]: Error inserting into Regions table: " + ex.Message + ", INSERT sql: " + insert); } } } catch (Exception ex) { m_log.Warn("[MSSQL Grid]: Error updating Regions table: " + ex.Message + ", UPDATE sql: " + update); } } return true; } public bool SetDataItem(UUID regionID, string item, string value) { string sql = "update [" + m_Realm + "] set [" + item + "] = @" + item + " where uuid = @UUID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@" + item, value)); cmd.Parameters.Add(m_database.CreateParameter("@UUID", regionID)); conn.Open(); if (cmd.ExecuteNonQuery() > 0) return true; } return false; } public bool Delete(UUID regionID) { string sql = "delete from [" + m_Realm + "] where uuid = @UUID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@UUID", regionID)); conn.Open(); if (cmd.ExecuteNonQuery() > 0) return true; } return false; } public List<RegionData> GetDefaultRegions(UUID scopeID) { return Get((int)RegionFlags.DefaultRegion, scopeID); } public List<RegionData> GetFallbackRegions(UUID scopeID, int x, int y) { List<RegionData> regions = Get((int)RegionFlags.FallbackRegion, scopeID); RegionDataDistanceCompare distanceComparer = new RegionDataDistanceCompare(x, y); regions.Sort(distanceComparer); return regions; } public List<RegionData> GetHyperlinks(UUID scopeID) { return Get((int)RegionFlags.Hyperlink, scopeID); } private List<RegionData> Get(int regionFlags, UUID scopeID) { string sql = "SELECT * FROM [" + m_Realm + "] WHERE (flags & " + regionFlags.ToString() + ") <> 0"; if (scopeID != UUID.Zero) sql += " AND ScopeID = @scopeID"; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(sql, conn)) { cmd.Parameters.Add(m_database.CreateParameter("@scopeID", scopeID)); conn.Open(); return RunCommand(cmd); } } } }
//------------------------------------------------------------------------------ // <copyright file="SkinBuilder.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ /* * Classes related to templated control support * * Copyright (c) 1999 Microsoft Corporation */ namespace System.Web.UI { using System; using System.Collections; using System.Collections.Specialized; using System.ComponentModel; using System.ComponentModel.Design; using System.Globalization; using System.IO; using System.Reflection; using System.Web.Compilation; using System.Web.UI.WebControls; using System.Web.Util; #if !FEATURE_PAL using System.Web.UI.Design; #endif // !FEATURE_PAL /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public sealed class SkinBuilder : ControlBuilder { private ThemeProvider _provider; private Control _control; private ControlBuilder _skinBuilder; private string _themePath; internal static readonly Object[] EmptyParams = new Object[0]; public SkinBuilder(ThemeProvider provider, Control control, ControlBuilder skinBuilder, string themePath) { _provider = provider; _control = control; _skinBuilder = skinBuilder; _themePath = themePath; } private void ApplyTemplateProperties(Control control) { object[] parameters = new object[1]; ICollection entries = GetFilteredPropertyEntrySet(_skinBuilder.TemplatePropertyEntries); foreach (TemplatePropertyEntry entry in entries) { try { object originalValue = FastPropertyAccessor.GetProperty(control, entry.Name, InDesigner); if (originalValue == null) { ControlBuilder controlBuilder = ((TemplatePropertyEntry)entry).Builder; controlBuilder.SetServiceProvider(ServiceProvider); try { object objectValue = controlBuilder.BuildObject(true); parameters[0] = objectValue; } finally { controlBuilder.SetServiceProvider(null); } MethodInfo methodInfo = entry.PropertyInfo.GetSetMethod(); Util.InvokeMethod(methodInfo, control, parameters); } } catch (Exception e) { Debug.Fail(e.Message); } #pragma warning disable 1058 catch { } #pragma warning restore 1058 } } private void ApplyComplexProperties(Control control) { ICollection entries = GetFilteredPropertyEntrySet(_skinBuilder.ComplexPropertyEntries); foreach (ComplexPropertyEntry entry in entries) { ControlBuilder builder = entry.Builder; if (builder != null) { string propertyName = entry.Name; if (entry.ReadOnly) { object objectValue = FastPropertyAccessor.GetProperty(control, propertyName, InDesigner); if (objectValue == null) continue; entry.Builder.SetServiceProvider(ServiceProvider); try { entry.Builder.InitObject(objectValue); } finally { entry.Builder.SetServiceProvider(null); } } else { object childObj; string actualPropName; object value = entry.Builder.BuildObject(true); // Make the UrlProperty based on theme path for control themes(Must be a string) PropertyDescriptor desc = PropertyMapper.GetMappedPropertyDescriptor(control, PropertyMapper.MapNameToPropertyName(propertyName), out childObj, out actualPropName, InDesigner); if (desc != null) { string str = value as string; if (value != null && desc.Attributes[typeof(UrlPropertyAttribute)] != null && UrlPath.IsRelativeUrl(str)) { value = _themePath + str; } } FastPropertyAccessor.SetProperty(childObj, propertyName, value, InDesigner); } } } } private void ApplySimpleProperties(Control control) { ICollection entries = GetFilteredPropertyEntrySet(_skinBuilder.SimplePropertyEntries); foreach (SimplePropertyEntry entry in entries) { try { if (entry.UseSetAttribute) { SetSimpleProperty(entry, control); continue; } string propertyName = PropertyMapper.MapNameToPropertyName(entry.Name); object childObj; string actualPropName; PropertyDescriptor desc = PropertyMapper.GetMappedPropertyDescriptor(control, propertyName, out childObj, out actualPropName, InDesigner); if (desc != null) { DefaultValueAttribute defValAttr = (DefaultValueAttribute)desc.Attributes[typeof(DefaultValueAttribute)]; object currentValue = desc.GetValue(childObj); // Only apply the themed value if different from default value. if (defValAttr != null && !object.Equals(defValAttr.Value, currentValue)) { continue; } object value = entry.Value; // Make the UrlProperty based on theme path for control themes. string str = value as string; if (value != null && desc.Attributes[typeof(UrlPropertyAttribute)] != null && UrlPath.IsRelativeUrl(str)) { value = _themePath + str; } SetSimpleProperty(entry, control); } } catch (Exception e) { Debug.Fail(e.Message); } #pragma warning disable 1058 catch { } #pragma warning restore 1058 } } private void ApplyBoundProperties(Control control) { DataBindingCollection dataBindings = null; IAttributeAccessor attributeAccessor = null; // If there are no filters in the picture, use the entries as is ICollection entries = GetFilteredPropertyEntrySet(_skinBuilder.BoundPropertyEntries); foreach (BoundPropertyEntry entry in entries) { InitBoundProperty(control, entry, ref dataBindings, ref attributeAccessor); } } private void InitBoundProperty(Control control, BoundPropertyEntry entry, ref DataBindingCollection dataBindings, ref IAttributeAccessor attributeAccessor) { string expressionPrefix = entry.ExpressionPrefix; // If we're in the designer, add the bound properties to the collections if (expressionPrefix.Length == 0) { if (dataBindings == null && control is IDataBindingsAccessor) { dataBindings = ((IDataBindingsAccessor)control).DataBindings; } dataBindings.Add(new DataBinding(entry.Name, entry.Type, entry.Expression.Trim())); } else { throw new InvalidOperationException(SR.GetString(SR.ControlBuilder_ExpressionsNotAllowedInThemes)); } } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public Control ApplyTheme() { if (_skinBuilder != null) { ApplySimpleProperties(_control); ApplyComplexProperties(_control); ApplyBoundProperties(_control); ApplyTemplateProperties(_control); } return _control; } } public sealed class ThemeProvider { private IDictionary _skinBuilders; private string[] _cssFiles; private string _themeName; private string _themePath; private int _contentHashCode; private IDesignerHost _host; public ThemeProvider(IDesignerHost host, string name, string themeDefinition, string[] cssFiles, string themePath) { _themeName = name; _themePath = themePath; _cssFiles = cssFiles; _host = host; ControlBuilder themeBuilder = DesignTimeTemplateParser.ParseTheme(host, themeDefinition, themePath); _contentHashCode = themeDefinition.GetHashCode(); ArrayList subBuilders = themeBuilder.SubBuilders; _skinBuilders = new Hashtable(); for (int i=0; i<subBuilders.Count; ++i) { ControlBuilder builder = subBuilders[i] as ControlBuilder; if (builder != null) { IDictionary skins = _skinBuilders[builder.ControlType] as IDictionary; if (skins == null) { skins = new SortedList(StringComparer.OrdinalIgnoreCase); _skinBuilders[builder.ControlType] = skins; } Control builtControl = builder.BuildObject() as Control; if (builtControl != null) { skins[builtControl.SkinID] = builder; } } } } public int ContentHashCode { get { return _contentHashCode; } } public ICollection CssFiles { get { return _cssFiles; } } public IDesignerHost DesignerHost { get { return _host; } } public string ThemeName { get { return _themeName; } } public ICollection GetSkinsForControl(Type type) { IDictionary skins = _skinBuilders[type] as IDictionary; if (skins == null) { return new ArrayList(); } return skins.Keys; } public SkinBuilder GetSkinBuilder(Control control) { IDictionary skins = _skinBuilders[control.GetType()] as IDictionary; if (skins == null) { return null; } ControlBuilder builder = skins[control.SkinID] as ControlBuilder; if (builder == null) { return null; } return new SkinBuilder(this, control, builder, _themePath); } public IDictionary GetSkinControlBuildersForControlType(Type type) { IDictionary skins = _skinBuilders[type] as IDictionary; return skins; } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================ ** ** File: RealProxy.cs ** ** ** Purpose: Defines the base class from which proxy should ** derive ** ** ===========================================================*/ namespace System.Runtime.Remoting.Proxies { using System; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Remoting; using System.Runtime.Remoting.Messaging; using System.Runtime.Remoting.Metadata; using System.Runtime.Remoting.Channels; using System.Runtime.Remoting.Activation; using System.Runtime.Remoting.Services; using System.Runtime.Serialization; using System.Runtime.Versioning; using System.Security; using System.Security.Permissions; using System.Security.Principal; using System.Threading; using System.Runtime.ConstrainedExecution; using System.Globalization; using System.Diagnostics.Contracts; // NOTE: Keep this in [....] with unmanaged enum definition in Remoting.h [Serializable] internal enum CallType { InvalidCall = 0x0, MethodCall = 0x1, ConstructorCall = 0x2 }; [Flags] internal enum RealProxyFlags { None = 0x0, RemotingProxy = 0x1, Initialized = 0x2 }; // NOTE: Keep this in [....] with unmanaged struct "messageData" in Remoting.h [System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)] internal struct MessageData { internal IntPtr pFrame; internal IntPtr pMethodDesc; internal IntPtr pDelegateMD; internal IntPtr pSig; internal IntPtr thGoverningType; internal int iFlags; }; [System.Security.SecurityCritical] // auto-generated_required [SecurityPermissionAttribute(SecurityAction.InheritanceDemand, Flags=SecurityPermissionFlag.Infrastructure)] [System.Runtime.InteropServices.ComVisible(true)] abstract public class RealProxy { // ************* NOTE ****** // Object.h has unmanaged structure which maps this layout // if you add/remove/change fields make sure to update the structure // in object.h also // Private members private Object _tp; private Object _identity; private MarshalByRefObject _serverObject; private RealProxyFlags _flags; internal GCHandle _srvIdentity; internal int _optFlags; internal int _domainID; // Static members private static IntPtr _defaultStub = GetDefaultStub(); private static IntPtr _defaultStubValue = new IntPtr(-1); private static Object _defaultStubData = _defaultStubValue; [System.Security.SecuritySafeCritical] // static constructors should be safe to call static RealProxy() { } // Constructor [System.Security.SecurityCritical] // auto-generated protected RealProxy(Type classToProxy) : this(classToProxy, (IntPtr)0, null) { } [System.Security.SecurityCritical] // auto-generated_required protected RealProxy(Type classToProxy, IntPtr stub, Object stubData) { if(!classToProxy.IsMarshalByRef && !classToProxy.IsInterface) { throw new ArgumentException( Environment.GetResourceString("Remoting_Proxy_ProxyTypeIsNotMBR")); } Contract.EndContractBlock(); if((IntPtr)0 == stub) { Contract.Assert((IntPtr)0 != _defaultStub, "Default stub not set up"); // The default stub checks for match of contexts defined by us stub = _defaultStub; // Start with a value of -1 because 0 is reserved for the default context stubData = _defaultStubData; } _tp = null; if (stubData == null) { throw new ArgumentNullException("stubdata"); } _tp = RemotingServices.CreateTransparentProxy(this, classToProxy, stub, stubData); RemotingProxy rp = this as RemotingProxy; if (rp != null) { _flags |= RealProxyFlags.RemotingProxy; } } // This is used (along the frequent path) of Invoke to avoid // casting to RemotingProxy internal bool IsRemotingProxy() { return (_flags & RealProxyFlags.RemotingProxy) == RealProxyFlags.RemotingProxy; } // This is mainly used for RemotingProxy case. It may be worthwhile // to make this virtual so extensible proxies can make use of this // (and other flags) as well. internal bool Initialized { get { return (_flags & RealProxyFlags.Initialized) == RealProxyFlags.Initialized; } set { if (value) { _flags |= RealProxyFlags.Initialized; } else { _flags &= ~RealProxyFlags.Initialized; } } } // Method to initialize the server object for x-context scenarios // in an extensible way [System.Security.SecurityCritical] // auto-generated_required [System.Runtime.InteropServices.ComVisible(true)] public IConstructionReturnMessage InitializeServerObject(IConstructionCallMessage ctorMsg) { IConstructionReturnMessage retMsg = null; if (_serverObject == null) { Type svrType = GetProxiedType(); if((ctorMsg != null) && (ctorMsg.ActivationType != svrType)) { throw new RemotingException( String.Format( CultureInfo.CurrentCulture, Environment.GetResourceString("Remoting_Proxy_BadTypeForActivation"), svrType.FullName, ctorMsg.ActivationType)); } // Create a blank object _serverObject = RemotingServices.AllocateUninitializedObject(svrType); // If the stub is the default stub, then set the server context // to be the current context. SetContextForDefaultStub(); // OK... we are all set to run the constructor call on the uninitialized object MarshalByRefObject proxy = (MarshalByRefObject)GetTransparentProxy(); IMethodReturnMessage msg = null; Exception e = null; if(null != ctorMsg) { msg = RemotingServices.ExecuteMessage(proxy, ctorMsg); e = msg.Exception; } else { try { RemotingServices.CallDefaultCtor(proxy); } catch(Exception excep) { e = excep; } } // Construct a return message if(null == e) { Object[] outArgs = (msg == null ? null : msg.OutArgs); int outLength = (null == outArgs ? 0 : outArgs.Length); LogicalCallContext callCtx = (msg == null ? null : msg.LogicalCallContext); retMsg = new ConstructorReturnMessage(proxy, outArgs, outLength, callCtx, ctorMsg); // setup identity SetupIdentity(); if (IsRemotingProxy()) { ((RemotingProxy) this).Initialized = true; } } else { // Exception occurred retMsg = new ConstructorReturnMessage(e, ctorMsg); } } return retMsg; } [System.Security.SecurityCritical] // auto-generated_required protected MarshalByRefObject GetUnwrappedServer() { return UnwrappedServerObject; } [System.Security.SecurityCritical] // auto-generated_required protected MarshalByRefObject DetachServer() { Object tp = GetTransparentProxy(); if (tp != null) RemotingServices.ResetInterfaceCache(tp); MarshalByRefObject server = _serverObject; _serverObject = null; server.__ResetServerIdentity(); return server; } [System.Security.SecurityCritical] // auto-generated_required protected void AttachServer(MarshalByRefObject s) { Object tp = GetTransparentProxy(); if (tp != null) RemotingServices.ResetInterfaceCache(tp); AttachServerHelper(s); } [System.Security.SecurityCritical] // auto-generated private void SetupIdentity() { if (_identity == null) { _identity = IdentityHolder.FindOrCreateServerIdentity( (MarshalByRefObject)_serverObject, null, IdOps.None); // Set the reference to the proxy in the identity object ((Identity)_identity).RaceSetTransparentProxy(GetTransparentProxy()); } } [System.Security.SecurityCritical] // auto-generated private void SetContextForDefaultStub() { // Check whether the stub is ours or not... if(GetStub() == _defaultStub) { // Yes.. setup the context in the TP so that // contexts can be matched correctly... Object oVal = GetStubData(this); if(oVal is IntPtr) { IntPtr iVal = (IntPtr)oVal; // Set the stub data only if it has been set to our default value, // otherwise, the user has already indicated a preference for the // stub data. if(iVal.Equals(_defaultStubValue)) { SetStubData(this, Thread.CurrentContext.InternalContextID); } } } } // Check whether the current context is the same as the // server context [System.Security.SecurityCritical] // auto-generated internal bool DoContextsMatch() { bool fMatch = false; // Check whether the stub is ours or not... if(GetStub() == _defaultStub) { // Yes.. setup the context in the TP so that // contexts can be matched correctly... Object oVal = GetStubData(this); if(oVal is IntPtr) { IntPtr iVal = (IntPtr)oVal; // Match the internal context ids... if(iVal.Equals(Thread.CurrentContext.InternalContextID)) { fMatch = true; } } } return fMatch; } // This is directly called by RemotingServices::Wrap() when it needs // to bind a proxy with an uninitialized contextBound server object [System.Security.SecurityCritical] // auto-generated internal void AttachServerHelper(MarshalByRefObject s) { if (s == null || _serverObject != null) { throw new ArgumentException(Environment.GetResourceString("ArgumentNull_Generic"), "s"); } _serverObject = s; // setup identity SetupIdentity(); } // Gets the stub pointer stashed away in the transparent proxy. [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern IntPtr GetStub(); // Sets the stub data [System.Security.SecurityCritical] // auto-generated_required [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern void SetStubData(RealProxy rp, Object stubData); internal void SetSrvInfo(GCHandle srvIdentity, int domainID) { _srvIdentity = srvIdentity; _domainID = domainID; } // Gets the stub data [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern Object GetStubData(RealProxy rp); // Gets the default stub implemented by us [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern IntPtr GetDefaultStub(); // Accessor to obtain the type being proxied [System.Security.SecurityCritical] // auto-generated [ResourceExposure(ResourceScope.None)] [MethodImplAttribute(MethodImplOptions.InternalCall)] public extern Type GetProxiedType(); // Method to which transparent proxy delegates when // it gets called public abstract IMessage Invoke(IMessage msg); [System.Security.SecurityCritical] // auto-generated public virtual ObjRef CreateObjRef(Type requestedType) { if(_identity == null) { throw new RemotingException(Environment.GetResourceString( "Remoting_NoIdentityEntry")); } return new ObjRef((MarshalByRefObject)GetTransparentProxy(), requestedType); } [System.Security.SecurityCritical] // auto-generated public virtual void GetObjectData(SerializationInfo info, StreamingContext context) { Object obj = GetTransparentProxy(); RemotingServices.GetObjectData(obj, info, context); } [System.Security.SecurityCritical] // auto-generated private static void HandleReturnMessage(IMessage reqMsg, IMessage retMsg) { IMethodReturnMessage mrm = retMsg as IMethodReturnMessage; if (retMsg==null || (mrm == null)) { throw new RemotingException(Environment.GetResourceString( "Remoting_Message_BadType")); } Exception e = mrm.Exception; if (e != null) { throw e.PrepForRemoting(); } else { if (!(retMsg is StackBasedReturnMessage)) { if (reqMsg is Message) { PropagateOutParameters(reqMsg, mrm.Args, mrm.ReturnValue); } else if (reqMsg is ConstructorCallMessage) { // NOTE: We do not extract the return value as // the process of returning a value from a ConstructorCallMessage // results in marshaling. PropagateOutParameters(reqMsg, mrm.Args, null); } } } } // Propagate the out parameters to the stack. This should be called once // the call has finished. The input message parameter should be the same // as the one which was passed to the first sink to start the call. [System.Security.SecurityCritical] // auto-generated internal static void PropagateOutParameters(IMessage msg, Object[] outArgs, Object returnValue) { // Check for method call Message m = msg as Message; // Check for constructor call if(null == m) { ConstructorCallMessage ccm = msg as ConstructorCallMessage; if(null != ccm) { m = ccm.GetMessage(); } } if(null == m) { throw new ArgumentException( Environment.GetResourceString("Remoting_Proxy_ExpectedOriginalMessage")); } MethodBase mb = m.GetMethodBase(); RemotingMethodCachedData cache = InternalRemotingServices.GetReflectionCachedData(mb); if (outArgs != null && outArgs.Length > 0) { Object[] args = m.Args; // original arguments // If a byref parameter is marked only with [In], we need to copy the // original value from the request message into outargs, so that the // value won't be bashed by CMessage::PropagateOutParameters below. ParameterInfo[] parameters = cache.Parameters; foreach (int index in cache.MarshalRequestArgMap) { ParameterInfo param = parameters[index]; if (param.IsIn && param.ParameterType.IsByRef) { if (!param.IsOut) outArgs[index] = args[index]; } } // copy non-byref arrays back into the same instance if (cache.NonRefOutArgMap.Length > 0) { foreach (int index in cache.NonRefOutArgMap) { Array arg = args[index] as Array; if (arg != null) { Array.Copy((Array)outArgs[index], arg, arg.Length); } } } // validate by-ref args (This must be done last) int[] outRefArgMap = cache.OutRefArgMap; if (outRefArgMap.Length > 0) { foreach (int index in outRefArgMap) { ValidateReturnArg(outArgs[index], parameters[index].ParameterType); } } } // validate return value // (We don't validate Message.BeginAsync because the return value // is always an IAsyncResult and the method base is the one that // represents the underlying synchronous method). int callType = m.GetCallType(); if ((callType & Message.CallMask ) != Message.BeginAsync) { Type returnType = cache.ReturnType; if (returnType != null) { ValidateReturnArg(returnValue, returnType); } } m.PropagateOutParameters(outArgs, returnValue); } // PropagateOutParameters private static void ValidateReturnArg(Object arg, Type paramType) { if (paramType.IsByRef) paramType = paramType.GetElementType(); if (paramType.IsValueType) { if (arg == null) { if (!(paramType.IsGenericType && paramType.GetGenericTypeDefinition() == typeof(Nullable<>))) throw new RemotingException( Environment.GetResourceString("Remoting_Proxy_ReturnValueTypeCannotBeNull")); } else if (!paramType.IsInstanceOfType(arg)) { throw new InvalidCastException( Environment.GetResourceString("Remoting_Proxy_BadReturnType")); } } else { if (arg != null) { if (!paramType.IsInstanceOfType(arg)) { throw new InvalidCastException( Environment.GetResourceString("Remoting_Proxy_BadReturnType")); } } } } // ValidateReturnArg // This is shared code path that executes when an EndInvoke is called // either on a delegate on a proxy // OR a regular delegate (called asynchronously). [System.Security.SecurityCritical] // auto-generated internal static IMessage EndInvokeHelper(Message reqMsg, bool bProxyCase) { AsyncResult ar = reqMsg.GetAsyncResult() as AsyncResult; IMessage retMsg = null; // used for proxy case only! if (ar == null) { throw new RemotingException( Environment.GetResourceString( "Remoting_Message_BadAsyncResult")); } if (ar.AsyncDelegate != reqMsg.GetThisPtr()) { throw new InvalidOperationException(Environment.GetResourceString( "InvalidOperation_MismatchedAsyncResult")); } if (!ar.IsCompleted) { // Note: using ThreadPoolAware to detect if this is a // ThreadAffinity or Synchronization context. ar.AsyncWaitHandle.WaitOne( Timeout.Infinite, Thread.CurrentContext.IsThreadPoolAware); } lock (ar) { if (ar.EndInvokeCalled) throw new InvalidOperationException( Environment.GetResourceString( "InvalidOperation_EndInvokeCalledMultiple")); ar.EndInvokeCalled = true; IMethodReturnMessage mrm = (IMethodReturnMessage) ar.GetReplyMessage(); Contract.Assert( mrm != null, "Reply sink should ensure we have a reply message before signalling"); // For the proxy case this is handled by RealProxy if (!bProxyCase) { Exception e = mrm.Exception; if (e != null) { // throw e; throw e.PrepForRemoting(); } else { reqMsg.PropagateOutParameters( mrm.Args, mrm.ReturnValue); } } else { retMsg = mrm; } // Merge the call context back into the thread that // called EndInvoke Thread.CurrentThread.GetMutableExecutionContext().LogicalCallContext.Merge( mrm.LogicalCallContext); } // Will be non-null only for proxy case! return retMsg; } // EndInvokeHelper // itnerop methods [System.Security.SecurityCritical] // auto-generated public virtual IntPtr GetCOMIUnknown(bool fIsMarshalled) { // sub -class should override return MarshalByRefObject.GetComIUnknown((MarshalByRefObject)GetTransparentProxy()); } public virtual void SetCOMIUnknown(IntPtr i) { // don't care } public virtual IntPtr SupportsInterface(ref Guid iid) { return IntPtr.Zero; } // Method used for traversing back to the TP public virtual Object GetTransparentProxy() { return _tp; } internal MarshalByRefObject UnwrappedServerObject { get { return (MarshalByRefObject) _serverObject; } } internal virtual Identity IdentityObject { [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] get { return (Identity) _identity; } set { _identity = value; } } // Private method invoked by the transparent proxy [System.Security.SecurityCritical] // auto-generated private void PrivateInvoke(ref MessageData msgData, int type) { IMessage reqMsg = null; CallType callType = (CallType)type; IMessage retMsg = null; int msgFlags = -1; // Used only for Construction case RemotingProxy rp = null; // Create a message object based on the type of call if(CallType.MethodCall == callType) { Message msg = new Message(); msg.InitFields(msgData); reqMsg = msg; msgFlags = msg.GetCallType(); } else if (CallType.ConstructorCall == (CallType)callType) { // We use msgFlags to handle CallContext around // the virtual call to Invoke() msgFlags = Message.Sync; rp = this as RemotingProxy; ConstructorCallMessage ctorMsg = null; bool bIsWellKnown = false; if(!IsRemotingProxy()) { // Create a new constructor call message // < ctorMsg = new ConstructorCallMessage(null, null, null, (RuntimeType)GetProxiedType()); } else { // Extract the constructor message set in the first step of activation. ctorMsg = rp.ConstructorMessage; // If the proxy is a wellknown client proxy, we don't // need to run the c'tor. Identity id = rp.IdentityObject; if (id != null) bIsWellKnown = id.IsWellKnown(); } if ((null == ctorMsg) || bIsWellKnown) { // This is also used to short-circuit the activation path // when we have a well known proxy that has already been // initialized (there's a race condition if we don't do this). // // This is a special case, where we have a remoting proxy // but the constructormessage hasn't been setup. // so let us just bail out.. // this is currently used by ServicedComponent's for cross appdomain // pooling: <EMAIL>[....]</EMAIL> // ctorMsg = new ConstructorCallMessage(null, null, null, (RuntimeType)GetProxiedType()); // Set the constructor frame info in the CCM ctorMsg.SetFrame(msgData); reqMsg = ctorMsg; // If this was the default ctor, check that default .ctor was called. if (bIsWellKnown) { Contract.Assert(rp!=null, "RemotingProxy expected here!"); // Clear any cached ctorMsg on the RemotingProxy rp.ConstructorMessage = null; // We did execute a Connect. Throw if the client // code is also trying to use a non-default constructor at // the same time. if (ctorMsg.ArgCount != 0) { throw new RemotingException( Environment.GetResourceString( "Remoting_Activation_WellKnownCTOR")); } } // Create a constructor return message retMsg = new ConstructorReturnMessage((MarshalByRefObject)GetTransparentProxy(), null, 0, null, ctorMsg); } else { // Set the constructor frame info in the CCM ctorMsg.SetFrame(msgData); reqMsg = ctorMsg; } } else { Contract.Assert(false, "Unknown call type"); } // Make sure that outgoing remote calls are counted. ChannelServices.IncrementRemoteCalls(); // For non-remoting proxies, EndAsync should not call Invoke() // because the proxy cannot support Async and the call has already // finished executing in BeginAsync if (!IsRemotingProxy() && ((msgFlags&Message.EndAsync)==Message.EndAsync)) { Message msg = reqMsg as Message; retMsg = EndInvokeHelper(msg, true); Contract.Assert(null != retMsg, "null != retMsg"); } // Invoke Contract.Assert(null != reqMsg, "null != reqMsg"); if (null == retMsg) { // NOTE: there are cases where we setup a return message // and we don't want the activation call to go through // refer to the note above for ServicedComponents and Cross Appdomain // pooling LogicalCallContext cctx = null; Thread currentThread = Thread.CurrentThread; // Pick up or clone the call context from the thread // and install it in the reqMsg as appropriate cctx = currentThread.GetMutableExecutionContext().LogicalCallContext; SetCallContextInMessage(reqMsg, msgFlags, cctx); // Add the outgoing "Header"'s to the message. cctx.PropagateOutgoingHeadersToMessage(reqMsg); retMsg = Invoke(reqMsg); // Get the call context returned and set it on the thread ReturnCallContextToThread(currentThread, retMsg, msgFlags, cctx); // Pull response "Header"'s out of the message Thread.CurrentThread.GetMutableExecutionContext().LogicalCallContext.PropagateIncomingHeadersToCallContext(retMsg); } if (!IsRemotingProxy() && ((msgFlags&Message.BeginAsync) == Message.BeginAsync)) { // This was a begin-async on a non-Remoting Proxy. For V-1 they // cannot support Async and end up doing a [....] call. We need // to fill up here to make the call look like async to // the caller. // Create the async result to return Message msg = reqMsg as Message; AsyncResult ar = new AsyncResult(msg); // Tell the async result that the call has actually completed // so it can hold on to the return message. ar.SyncProcessMessage(retMsg); // create a returnMessage to propagate just the asyncResult back // to the caller's stack. retMsg = new ReturnMessage(ar, null, 0, null/*cctx*/, msg); } // Propagate out parameters HandleReturnMessage(reqMsg, retMsg); // For constructor calls do some extra bookkeeping if(CallType.ConstructorCall == callType) { // NOTE: It is the responsiblity of the callee to propagate // the out parameters // Everything went well, we are ready to return // a proxy to the caller // Extract the return value MarshalByRefObject retObj = null; IConstructionReturnMessage ctorRetMsg = retMsg as IConstructionReturnMessage; if(null == ctorRetMsg) { throw new RemotingException( Environment.GetResourceString("Remoting_Proxy_BadReturnTypeForActivation")); } ConstructorReturnMessage crm = ctorRetMsg as ConstructorReturnMessage; if (null != crm) { // If return message is of type ConstructorReturnMessage // this is an in-appDomain activation. So no unmarshaling // needed. retObj = (MarshalByRefObject)crm.GetObject(); if (retObj == null) { throw new RemotingException( Environment.GetResourceString("Remoting_Activation_NullReturnValue")); } } else { // Fetch the objRef out of the returned message and unmarshal it retObj = (MarshalByRefObject)RemotingServices.InternalUnmarshal( (ObjRef)ctorRetMsg.ReturnValue, GetTransparentProxy(), true /*fRefine*/); if (retObj == null) { throw new RemotingException( Environment.GetResourceString("Remoting_Activation_NullFromInternalUnmarshal")); } } if (retObj != (MarshalByRefObject)GetTransparentProxy()) { throw new RemotingException( Environment.GetResourceString( "Remoting_Activation_InconsistentState")); } if (IsRemotingProxy()) { // Clear any cached ctorMsg on the RemotingProxy rp.ConstructorMessage = null; } } } void SetCallContextInMessage( IMessage reqMsg, int msgFlags, LogicalCallContext cctx) { Contract.Assert(msgFlags != -1, "Unexpected msgFlags?"); Message msg = reqMsg as Message; switch (msgFlags) { case Message.Sync: if (msg != null) { msg.SetLogicalCallContext(cctx); } else { ((ConstructorCallMessage)reqMsg).SetLogicalCallContext(cctx); } break; } } [System.Security.SecurityCritical] // auto-generated void ReturnCallContextToThread(Thread currentThread, IMessage retMsg, int msgFlags, LogicalCallContext currCtx) { if (msgFlags == Message.Sync) { if (retMsg == null) return; IMethodReturnMessage mrm = retMsg as IMethodReturnMessage; if (mrm == null) return; LogicalCallContext retCtx = mrm.LogicalCallContext; if (retCtx == null) { currentThread.GetMutableExecutionContext().LogicalCallContext = currCtx; return; } if (!(mrm is StackBasedReturnMessage)) { ExecutionContext ec = currentThread.GetMutableExecutionContext(); LogicalCallContext oldCtx = ec.LogicalCallContext; ec.LogicalCallContext = retCtx; if ((Object)oldCtx != (Object)retCtx) { // If the new call context does not match the old call context, // we must have gone remote. We need to keep the preserve // the principal from the original call context. IPrincipal principal = oldCtx.Principal; if (principal != null) retCtx.Principal = principal; } } //for other types (async/one-way etc) there is nothing to be //done as we have just finished processing BeginInvoke or EndInvoke } } [System.Security.SecurityCritical] // auto-generated internal virtual void Wrap() { // < ServerIdentity serverID = _identity as ServerIdentity; if((null != serverID) && (this is RemotingProxy)) { Contract.Assert(null != serverID.ServerContext, "null != serverID.ServerContext"); SetStubData(this, serverID.ServerContext.InternalContextID); } } protected RealProxy() { } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.Drawing; using System.Drawing.Imaging; using System.Reflection; using System.IO; using System.Web; using log4net; using Nini.Config; using Mono.Addins; using OpenMetaverse; using OpenMetaverse.StructuredData; using OpenMetaverse.Imaging; using OpenSim.Framework; using OpenSim.Framework.Console; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using Caps = OpenSim.Framework.Capabilities.Caps; using OpenSim.Capabilities.Handlers; namespace OpenSim.Region.ClientStack.Linden { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "RegionConsoleModule")] public class RegionConsoleModule : INonSharedRegionModule, IRegionConsole { // private static readonly ILog m_log = // LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private Scene m_scene; private IEventQueue m_eventQueue; private Commands m_commands = new Commands(); public ICommands Commands { get { return m_commands; } } public event ConsoleMessage OnConsoleMessage; public void Initialise(IConfigSource source) { m_commands.AddCommand( "Help", false, "help", "help [<item>]", "Display help on a particular command or on a list of commands in a category", Help); } public void AddRegion(Scene s) { m_scene = s; m_scene.RegisterModuleInterface<IRegionConsole>(this); } public void RemoveRegion(Scene s) { m_scene.EventManager.OnRegisterCaps -= RegisterCaps; m_scene = null; } public void RegionLoaded(Scene s) { m_scene.EventManager.OnRegisterCaps += RegisterCaps; m_eventQueue = m_scene.RequestModuleInterface<IEventQueue>(); } public void PostInitialise() { } public void Close() { } public string Name { get { return "RegionConsoleModule"; } } public Type ReplaceableInterface { get { return null; } } public void RegisterCaps(UUID agentID, Caps caps) { if (!m_scene.RegionInfo.EstateSettings.IsEstateManagerOrOwner(agentID) && !m_scene.Permissions.IsGod(agentID)) return; UUID capID = UUID.Random(); // m_log.DebugFormat("[REGION CONSOLE]: /CAPS/{0} in region {1}", capID, m_scene.RegionInfo.RegionName); caps.RegisterHandler( "SimConsoleAsync", new ConsoleHandler("/CAPS/" + capID + "/", "SimConsoleAsync", agentID, this, m_scene)); } public void SendConsoleOutput(UUID agentID, string message) { OSD osd = OSD.FromString(message); m_eventQueue.Enqueue(EventQueueHelper.BuildEvent("SimConsoleResponse", osd), agentID); ConsoleMessage handlerConsoleMessage = OnConsoleMessage; if (handlerConsoleMessage != null) handlerConsoleMessage( agentID, message); } public bool RunCommand(string command, UUID invokerID) { string[] parts = Parser.Parse(command); Array.Resize(ref parts, parts.Length + 1); parts[parts.Length - 1] = invokerID.ToString(); if (m_commands.Resolve(parts).Length == 0) return false; return true; } private void Help(string module, string[] cmd) { UUID agentID = new UUID(cmd[cmd.Length - 1]); Array.Resize(ref cmd, cmd.Length - 1); List<string> help = Commands.GetHelp(cmd); string reply = String.Empty; foreach (string s in help) { reply += s + "\n"; } SendConsoleOutput(agentID, reply); } public void AddCommand(string module, bool shared, string command, string help, string longhelp, CommandDelegate fn) { m_commands.AddCommand(module, shared, command, help, longhelp, fn); } } public class ConsoleHandler : BaseStreamHandler { // private static readonly ILog m_log = // LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private RegionConsoleModule m_consoleModule; private UUID m_agentID; private bool m_isGod; private Scene m_scene; private bool m_consoleIsOn = false; public ConsoleHandler(string path, string name, UUID agentID, RegionConsoleModule module, Scene scene) :base("POST", path, name, agentID.ToString()) { m_agentID = agentID; m_consoleModule = module; m_scene = scene; m_isGod = m_scene.Permissions.IsGod(agentID); } protected override byte[] ProcessRequest(string path, Stream request, IOSHttpRequest httpRequest, IOSHttpResponse httpResponse) { StreamReader reader = new StreamReader(request); string message = reader.ReadToEnd(); OSD osd = OSDParser.DeserializeLLSDXml(message); string cmd = osd.AsString(); if (cmd == "set console on") { if (m_isGod) { MainConsole.Instance.OnOutput += ConsoleSender; m_consoleIsOn = true; m_consoleModule.SendConsoleOutput(m_agentID, "Console is now on"); } return new byte[0]; } else if (cmd == "set console off") { MainConsole.Instance.OnOutput -= ConsoleSender; m_consoleIsOn = false; m_consoleModule.SendConsoleOutput(m_agentID, "Console is now off"); return new byte[0]; } if (m_consoleIsOn == false && m_consoleModule.RunCommand(osd.AsString().Trim(), m_agentID)) return new byte[0]; if (m_isGod && m_consoleIsOn) { MainConsole.Instance.RunCommand(osd.AsString().Trim()); } else { m_consoleModule.SendConsoleOutput(m_agentID, "Unknown command"); } return new byte[0]; } private void ConsoleSender(string text) { m_consoleModule.SendConsoleOutput(m_agentID, text); } private void OnMakeChildAgent(ScenePresence presence) { if (presence.UUID == m_agentID) { MainConsole.Instance.OnOutput -= ConsoleSender; m_consoleIsOn = false; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Collections.Specialized; using System.IO; using System.Net; using System.Reflection; using log4net; using Mono.Addins; using Nini.Config; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using OpenMetaverse; using OpenMetaverse.StructuredData; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Services.Connectors.SimianGrid { /// <summary> /// Connects region registration and neighbor lookups to the SimianGrid /// backend /// </summary> public class SimianGridServiceConnector : IGridService { private static readonly ILog m_log = LogManager.GetLogger( MethodBase.GetCurrentMethod().DeclaringType); private string m_ServerURI = String.Empty; // private bool m_Enabled = false; public SimianGridServiceConnector() { } public SimianGridServiceConnector(string serverURI) { m_ServerURI = serverURI.TrimEnd('/'); } public SimianGridServiceConnector(IConfigSource source) { CommonInit(source); } public void Initialise(IConfigSource source) { CommonInit(source); } private void CommonInit(IConfigSource source) { IConfig gridConfig = source.Configs["GridService"]; if (gridConfig == null) { m_log.Error("[SIMIAN GRID CONNECTOR]: GridService missing from OpenSim.ini"); throw new Exception("Grid connector init error"); } string serviceUrl = gridConfig.GetString("GridServerURI"); if (String.IsNullOrEmpty(serviceUrl)) { m_log.Error("[SIMIAN GRID CONNECTOR]: No Server URI named in section GridService"); throw new Exception("Grid connector init error"); } if (!serviceUrl.EndsWith("/") && !serviceUrl.EndsWith("=")) serviceUrl = serviceUrl + '/'; m_ServerURI = serviceUrl; // m_Enabled = true; } #region IGridService public string RegisterRegion(UUID scopeID, GridRegion regionInfo) { Vector3d minPosition = new Vector3d(regionInfo.RegionLocX, regionInfo.RegionLocY, 0.0); Vector3d maxPosition = minPosition + new Vector3d(regionInfo.RegionSizeX, regionInfo.RegionSizeY, Constants.RegionHeight); OSDMap extraData = new OSDMap { { "ServerURI", OSD.FromString(regionInfo.ServerURI) }, { "InternalAddress", OSD.FromString(regionInfo.InternalEndPoint.Address.ToString()) }, { "InternalPort", OSD.FromInteger(regionInfo.InternalEndPoint.Port) }, { "ExternalAddress", OSD.FromString(regionInfo.ExternalEndPoint.Address.ToString()) }, { "ExternalPort", OSD.FromInteger(regionInfo.ExternalEndPoint.Port) }, { "MapTexture", OSD.FromUUID(regionInfo.TerrainImage) }, { "Access", OSD.FromInteger(regionInfo.Access) }, { "RegionSecret", OSD.FromString(regionInfo.RegionSecret) }, { "EstateOwner", OSD.FromUUID(regionInfo.EstateOwner) }, { "Token", OSD.FromString(regionInfo.Token) } }; NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "AddScene" }, { "SceneID", regionInfo.RegionID.ToString() }, { "Name", regionInfo.RegionName }, { "MinPosition", minPosition.ToString() }, { "MaxPosition", maxPosition.ToString() }, { "Address", regionInfo.ServerURI }, { "Enabled", "1" }, { "ExtraData", OSDParser.SerializeJsonString(extraData) } }; OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) return String.Empty; else return "Region registration for " + regionInfo.RegionName + " failed: " + response["Message"].AsString(); } public bool DeregisterRegion(UUID regionID) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "AddScene" }, { "SceneID", regionID.ToString() }, { "Enabled", "0" } }; OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); bool success = response["Success"].AsBoolean(); if (!success) m_log.Warn("[SIMIAN GRID CONNECTOR]: Region deregistration for " + regionID + " failed: " + response["Message"].AsString()); return success; } public List<GridRegion> GetNeighbours(UUID scopeID, UUID regionID) { GridRegion region = GetRegionByUUID(scopeID, regionID); int NEIGHBOR_RADIUS = Math.Max(region.RegionSizeX, region.RegionSizeY) / 2; if (region != null) { List<GridRegion> regions = GetRegionRange(scopeID, region.RegionLocX - NEIGHBOR_RADIUS, region.RegionLocX + region.RegionSizeX + NEIGHBOR_RADIUS, region.RegionLocY - NEIGHBOR_RADIUS, region.RegionLocY + region.RegionSizeY + NEIGHBOR_RADIUS); for (int i = 0; i < regions.Count; i++) { if (regions[i].RegionID == regionID) { regions.RemoveAt(i); break; } } // m_log.Debug("[SIMIAN GRID CONNECTOR]: Found " + regions.Count + " neighbors for region " + regionID); return regions; } return new List<GridRegion>(0); } public GridRegion GetRegionByUUID(UUID scopeID, UUID regionID) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScene" }, { "SceneID", regionID.ToString() } }; // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] request region with uuid {0}",regionID.ToString()); OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] uuid request successful {0}",response["Name"].AsString()); return ResponseToGridRegion(response); } else { m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region " + regionID); return null; } } public GridRegion GetRegionByPosition(UUID scopeID, int x, int y) { // Go one meter in from the requested x/y coords to avoid requesting a position // that falls on the border of two sims Vector3d position = new Vector3d(x + 1, y + 1, 0.0); NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScene" }, { "Position", position.ToString() }, { "Enabled", "1" } }; // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] request grid at {0}",position.ToString()); OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] position request successful {0}",response["Name"].AsString()); return ResponseToGridRegion(response); } else { // m_log.InfoFormat("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region at {0},{1}", // Util.WorldToRegionLoc(x), Util.WorldToRegionLoc(y)); return null; } } public GridRegion GetRegionByName(UUID scopeID, string regionName) { List<GridRegion> regions = GetRegionsByName(scopeID, regionName, 1); m_log.Debug("[SIMIAN GRID CONNECTOR]: Got " + regions.Count + " matches for region name " + regionName); if (regions.Count > 0) return regions[0]; return null; } public List<GridRegion> GetRegionsByName(UUID scopeID, string name, int maxNumber) { List<GridRegion> foundRegions = new List<GridRegion>(); NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScenes" }, { "NameQuery", name }, { "Enabled", "1" } }; if (maxNumber > 0) requestArgs["MaxNumber"] = maxNumber.ToString(); // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] request regions with name {0}",name); OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] found regions with name {0}",name); OSDArray array = response["Scenes"] as OSDArray; if (array != null) { for (int i = 0; i < array.Count; i++) { GridRegion region = ResponseToGridRegion(array[i] as OSDMap); if (region != null) foundRegions.Add(region); } } } return foundRegions; } public List<GridRegion> GetRegionRange(UUID scopeID, int xmin, int xmax, int ymin, int ymax) { List<GridRegion> foundRegions = new List<GridRegion>(); Vector3d minPosition = new Vector3d(xmin, ymin, 0.0); Vector3d maxPosition = new Vector3d(xmax, ymax, Constants.RegionHeight); NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScenes" }, { "MinPosition", minPosition.ToString() }, { "MaxPosition", maxPosition.ToString() }, { "Enabled", "1" } }; //m_log.DebugFormat("[SIMIAN GRID CONNECTOR] request regions by range {0} to {1}",minPosition.ToString(),maxPosition.ToString()); OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { OSDArray array = response["Scenes"] as OSDArray; if (array != null) { for (int i = 0; i < array.Count; i++) { GridRegion region = ResponseToGridRegion(array[i] as OSDMap); if (region != null) foundRegions.Add(region); } } } return foundRegions; } public List<GridRegion> GetDefaultRegions(UUID scopeID) { // TODO: Allow specifying the default grid location const int DEFAULT_X = 1000 * 256; const int DEFAULT_Y = 1000 * 256; GridRegion defRegion = GetNearestRegion(new Vector3d(DEFAULT_X, DEFAULT_Y, 0.0), true); if (defRegion != null) return new List<GridRegion>(1) { defRegion }; else return new List<GridRegion>(0); } public List<GridRegion> GetDefaultHypergridRegions(UUID scopeID) { // TODO: Allow specifying the default grid location return GetDefaultRegions(scopeID); } public List<GridRegion> GetFallbackRegions(UUID scopeID, int x, int y) { GridRegion defRegion = GetNearestRegion(new Vector3d(x, y, 0.0), true); if (defRegion != null) return new List<GridRegion>(1) { defRegion }; else return new List<GridRegion>(0); } public List<GridRegion> GetHyperlinks(UUID scopeID) { List<GridRegion> foundRegions = new List<GridRegion>(); NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScenes" }, { "HyperGrid", "true" }, { "Enabled", "1" } }; OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { // m_log.DebugFormat("[SIMIAN GRID CONNECTOR] found regions with name {0}",name); OSDArray array = response["Scenes"] as OSDArray; if (array != null) { for (int i = 0; i < array.Count; i++) { GridRegion region = ResponseToGridRegion(array[i] as OSDMap); if (region != null) foundRegions.Add(region); } } } return foundRegions; } public int GetRegionFlags(UUID scopeID, UUID regionID) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScene" }, { "SceneID", regionID.ToString() } }; m_log.DebugFormat("[SIMIAN GRID CONNECTOR] request region flags for {0}",regionID.ToString()); OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { OSDMap extraData = response["ExtraData"] as OSDMap; int enabled = response["Enabled"].AsBoolean() ? (int)OpenSim.Framework.RegionFlags.RegionOnline : 0; int hypergrid = extraData["HyperGrid"].AsBoolean() ? (int)OpenSim.Framework.RegionFlags.Hyperlink : 0; int flags = enabled | hypergrid; m_log.DebugFormat("[SGGC] enabled - {0} hg - {1} flags - {2}", enabled, hypergrid, flags); return flags; } else { m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region " + regionID + " during region flags check"); return -1; } } public Dictionary<string, object> GetExtraFeatures() { /// See SimulatorFeaturesModule - Need to get map, search and destination guide Dictionary<string, object> extraFeatures = new Dictionary<string, object>(); return extraFeatures; } #endregion IGridService private GridRegion GetNearestRegion(Vector3d position, bool onlyEnabled) { NameValueCollection requestArgs = new NameValueCollection { { "RequestMethod", "GetScene" }, { "Position", position.ToString() }, { "FindClosest", "1" } }; if (onlyEnabled) requestArgs["Enabled"] = "1"; OSDMap response = SimianGrid.PostToService(m_ServerURI, requestArgs); if (response["Success"].AsBoolean()) { return ResponseToGridRegion(response); } else { m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region at " + position); return null; } } private GridRegion ResponseToGridRegion(OSDMap response) { if (response == null) return null; OSDMap extraData = response["ExtraData"] as OSDMap; if (extraData == null) return null; GridRegion region = new GridRegion(); region.RegionID = response["SceneID"].AsUUID(); region.RegionName = response["Name"].AsString(); Vector3d minPosition = response["MinPosition"].AsVector3d(); Vector3d maxPosition = response["MaxPosition"].AsVector3d(); region.RegionLocX = (int)minPosition.X; region.RegionLocY = (int)minPosition.Y; region.RegionSizeX = (int)maxPosition.X - (int)minPosition.X; region.RegionSizeY = (int)maxPosition.Y - (int)minPosition.Y; if ( ! extraData["HyperGrid"] ) { Uri httpAddress = response["Address"].AsUri(); region.ExternalHostName = httpAddress.Host; region.HttpPort = (uint)httpAddress.Port; IPAddress internalAddress; IPAddress.TryParse(extraData["InternalAddress"].AsString(), out internalAddress); if (internalAddress == null) internalAddress = IPAddress.Any; region.InternalEndPoint = new IPEndPoint(internalAddress, extraData["InternalPort"].AsInteger()); region.TerrainImage = extraData["MapTexture"].AsUUID(); region.Access = (byte)extraData["Access"].AsInteger(); region.RegionSecret = extraData["RegionSecret"].AsString(); region.EstateOwner = extraData["EstateOwner"].AsUUID(); region.Token = extraData["Token"].AsString(); region.ServerURI = extraData["ServerURI"].AsString(); } else { region.ServerURI = response["Address"]; } return region; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace MysteryRiddles.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Orleans.Runtime; using Orleans.TestingHost.Utils; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration.Memory; using Orleans.Configuration; using Microsoft.Extensions.Options; using Microsoft.Extensions.Hosting; namespace Orleans.TestingHost { /// <summary> /// A host class for local testing with Orleans using in-process silos. /// Runs a Primary and optionally secondary silos in separate app domains, and client in the main app domain. /// Additional silos can also be started in-process on demand if required for particular test cases. /// </summary> /// <remarks> /// Make sure that your test project references your test grains and test grain interfaces /// projects, and has CopyLocal=True set on those references [which should be the default]. /// </remarks> public class TestCluster : IDisposable, IAsyncDisposable { private readonly List<SiloHandle> additionalSilos = new List<SiloHandle>(); private readonly TestClusterOptions options; private readonly StringBuilder log = new StringBuilder(); private bool _disposed; private int startedInstances; /// <summary> /// Primary silo handle, if applicable. /// </summary> /// <remarks>This handle is valid only when using Grain-based membership.</remarks> public SiloHandle Primary { get; private set; } /// <summary> /// List of handles to the secondary silos. /// </summary> public IReadOnlyList<SiloHandle> SecondarySilos { get { lock (this.additionalSilos) { return new List<SiloHandle>(this.additionalSilos); } } } /// <summary> /// Collection of all known silos. /// </summary> public ReadOnlyCollection<SiloHandle> Silos { get { var result = new List<SiloHandle>(); if (this.Primary != null) { result.Add(this.Primary); } lock (this.additionalSilos) { result.AddRange(this.additionalSilos); } return result.AsReadOnly(); } } /// <summary> /// Options used to configure the test cluster. /// </summary> /// <remarks>This is the options you configured your test cluster with, or the default one. /// If the cluster is being configured via ClusterConfiguration, then this object may not reflect the true settings. /// </remarks> public TestClusterOptions Options => this.options; /// <summary> /// The internal client interface. /// </summary> internal IHost ClientHost { get; private set; } /// <summary> /// The internal client interface. /// </summary> internal IInternalClusterClient InternalClient => ClientHost?.Services.GetRequiredService<IInternalClusterClient>(); /// <summary> /// The client. /// </summary> public IClusterClient Client => this.InternalClient; /// <summary> /// GrainFactory to use in the tests /// </summary> public IGrainFactory GrainFactory => this.Client; /// <summary> /// GrainFactory to use in the tests /// </summary> internal IInternalGrainFactory InternalGrainFactory => this.InternalClient; /// <summary> /// Client-side <see cref="IServiceProvider"/> to use in the tests. /// </summary> public IServiceProvider ServiceProvider => this.Client.ServiceProvider; /// <summary> /// Delegate used to create and start an individual silo. /// </summary> public Func<string, IConfiguration, Task<SiloHandle>> CreateSiloAsync { private get; set; } = InProcessSiloHandle.CreateAsync; /// <summary> /// The port allocator. /// </summary> public ITestClusterPortAllocator PortAllocator { get; } /// <summary> /// Configures the test cluster plus client in-process. /// </summary> public TestCluster( TestClusterOptions options, IReadOnlyList<IConfigurationSource> configurationSources, ITestClusterPortAllocator portAllocator) { this.options = options; this.ConfigurationSources = configurationSources.ToArray(); this.PortAllocator = portAllocator; } /// <summary> /// Deploys the cluster using the specified configuration and starts the client in-process. /// It will start the number of silos defined in <see cref="TestClusterOptions.InitialSilosCount"/>. /// </summary> public void Deploy() { this.DeployAsync().GetAwaiter().GetResult(); } /// <summary> /// Deploys the cluster using the specified configuration and starts the client in-process. /// </summary> public async Task DeployAsync() { if (this.Primary != null || this.additionalSilos.Count > 0) throw new InvalidOperationException("Cluster host already deployed."); AppDomain.CurrentDomain.UnhandledException += ReportUnobservedException; try { string startMsg = "----------------------------- STARTING NEW UNIT TEST SILO HOST: " + GetType().FullName + " -------------------------------------"; WriteLog(startMsg); await InitializeAsync(); if (this.options.InitializeClientOnDeploy) { await WaitForInitialStabilization(); } } catch (TimeoutException te) { FlushLogToConsole(); throw new TimeoutException("Timeout during test initialization", te); } catch (Exception ex) { await StopAllSilosAsync(); Exception baseExc = ex.GetBaseException(); FlushLogToConsole(); if (baseExc is TimeoutException) { throw new TimeoutException("Timeout during test initialization", ex); } // IMPORTANT: // Do NOT re-throw the original exception here, also not as an internal exception inside AggregateException // Due to the way MS tests works, if the original exception is an Orleans exception, // it's assembly might not be loaded yet in this phase of the test. // As a result, we will get "MSTest: Unit Test Adapter threw exception: Type is not resolved for member XXX" // and will loose the original exception. This makes debugging tests super hard! // The root cause has to do with us initializing our tests from Test constructor and not from TestInitialize method. // More details: http://dobrzanski.net/2010/09/20/mstest-unit-test-adapter-threw-exception-type-is-not-resolved-for-member/ //throw new Exception( // string.Format("Exception during test initialization: {0}", // LogFormatter.PrintException(baseExc))); throw; } } private async Task WaitForInitialStabilization() { // Poll each silo to check that it knows the expected number of active silos. // If any silo does not have the expected number of active silos in its cluster membership oracle, try again. // If the cluster membership has not stabilized after a certain period of time, give up and continue anyway. var totalWait = Stopwatch.StartNew(); while (true) { var silos = this.Silos; var expectedCount = silos.Count; var remainingSilos = expectedCount; foreach (var silo in silos) { var hooks = this.InternalClient.GetTestHooks(silo); var statuses = await hooks.GetApproximateSiloStatuses(); var activeCount = statuses.Count(s => s.Value == SiloStatus.Active); if (activeCount != expectedCount) break; remainingSilos--; } if (remainingSilos == 0) { totalWait.Stop(); break; } WriteLog($"{remainingSilos} silos do not have a consistent cluster view, waiting until stabilization."); await Task.Delay(TimeSpan.FromMilliseconds(100)); if (totalWait.Elapsed < TimeSpan.FromSeconds(60)) { WriteLog($"Warning! {remainingSilos} silos do not have a consistent cluster view after {totalWait.ElapsedMilliseconds}ms, continuing without stabilization."); break; } } } /// <summary> /// Get the list of current active silos. /// </summary> /// <returns>List of current silos.</returns> public IEnumerable<SiloHandle> GetActiveSilos() { var additional = new List<SiloHandle>(); lock (additionalSilos) { additional.AddRange(additionalSilos); } WriteLog("GetActiveSilos: Primary={0} + {1} Additional={2}", Primary, additional.Count, Runtime.Utils.EnumerableToString(additional)); if (Primary?.IsActive == true) yield return Primary; if (additional.Count > 0) foreach (var s in additional) if (s?.IsActive == true) yield return s; } /// <summary> /// Find the silo handle for the specified silo address. /// </summary> /// <param name="siloAddress">Silo address to be found.</param> /// <returns>SiloHandle of the appropriate silo, or <c>null</c> if not found.</returns> public SiloHandle GetSiloForAddress(SiloAddress siloAddress) { var activeSilos = GetActiveSilos().ToList(); var ret = activeSilos.Find(s => s.SiloAddress.Equals(siloAddress)); return ret; } /// <summary> /// Wait for the silo liveness sub-system to detect and act on any recent cluster membership changes. /// </summary> /// <param name="didKill">Whether recent membership changes we done by graceful Stop.</param> public async Task WaitForLivenessToStabilizeAsync(bool didKill = false) { var clusterMembershipOptions = this.ServiceProvider.GetRequiredService<IOptions<ClusterMembershipOptions>>().Value; TimeSpan stabilizationTime = GetLivenessStabilizationTime(clusterMembershipOptions, didKill); WriteLog(Environment.NewLine + Environment.NewLine + "WaitForLivenessToStabilize is about to sleep for {0}", stabilizationTime); await Task.Delay(stabilizationTime); WriteLog("WaitForLivenessToStabilize is done sleeping"); } /// <summary> /// Get the timeout value to use to wait for the silo liveness sub-system to detect and act on any recent cluster membership changes. /// <seealso cref="WaitForLivenessToStabilizeAsync"/> /// </summary> public static TimeSpan GetLivenessStabilizationTime(ClusterMembershipOptions clusterMembershipOptions, bool didKill = false) { TimeSpan stabilizationTime = TimeSpan.Zero; if (didKill) { // in case of hard kill (kill and not Stop), we should give silos time to detect failures first. stabilizationTime = TestingUtils.Multiply(clusterMembershipOptions.ProbeTimeout, clusterMembershipOptions.NumMissedProbesLimit); } if (clusterMembershipOptions.UseLivenessGossip) { stabilizationTime += TimeSpan.FromSeconds(5); } else { stabilizationTime += TestingUtils.Multiply(clusterMembershipOptions.TableRefreshTimeout, 2); } return stabilizationTime; } /// <summary> /// Start an additional silo, so that it joins the existing cluster. /// </summary> /// <returns>SiloHandle for the newly started silo.</returns> public SiloHandle StartAdditionalSilo(bool startAdditionalSiloOnNewPort = false) { return StartAdditionalSiloAsync(startAdditionalSiloOnNewPort).GetAwaiter().GetResult(); } /// <summary> /// Start an additional silo, so that it joins the existing cluster. /// </summary> /// <returns>SiloHandle for the newly started silo.</returns> public async Task<SiloHandle> StartAdditionalSiloAsync(bool startAdditionalSiloOnNewPort = false) { return (await this.StartAdditionalSilosAsync(1, startAdditionalSiloOnNewPort)).Single(); } /// <summary> /// Start a number of additional silo, so that they join the existing cluster. /// </summary> /// <param name="silosToStart">Number of silos to start.</param> /// <param name="startAdditionalSiloOnNewPort"></param> /// <returns>List of SiloHandles for the newly started silos.</returns> public async Task<List<SiloHandle>> StartAdditionalSilosAsync(int silosToStart, bool startAdditionalSiloOnNewPort = false) { var instances = new List<SiloHandle>(); if (silosToStart > 0) { var siloStartTasks = Enumerable.Range(this.startedInstances, silosToStart) .Select(instanceNumber => Task.Run(() => StartSiloAsync((short)instanceNumber, this.options, startSiloOnNewPort: startAdditionalSiloOnNewPort))).ToArray(); try { await Task.WhenAll(siloStartTasks); } catch (Exception) { lock (additionalSilos) { this.additionalSilos.AddRange(siloStartTasks.Where(t => t.Exception == null).Select(t => t.Result)); } throw; } instances.AddRange(siloStartTasks.Select(t => t.Result)); lock (additionalSilos) { this.additionalSilos.AddRange(instances); } } return instances; } /// <summary> /// Stop any additional silos, not including the default Primary silo. /// </summary> public async Task StopSecondarySilosAsync() { foreach (var instance in this.additionalSilos.ToList()) { await StopSiloAsync(instance); } } /// <summary> /// Stops the default Primary silo. /// </summary> public async Task StopPrimarySiloAsync() { if (Primary == null) throw new InvalidOperationException("There is no primary silo"); await StopClusterClientAsync(); await StopSiloAsync(Primary); } /// <summary> /// Stop cluster client as an asynchronous operation. /// </summary> /// <returns>A <see cref="Task"/> representing the asynchronous operation.</returns> public async Task StopClusterClientAsync() { var client = this.ClientHost; try { if (client is not null) { await client.StopAsync().ConfigureAwait(false); } } catch (Exception exc) { WriteLog("Exception stopping client: {0}", exc); } finally { await DisposeAsync(client).ConfigureAwait(false); ClientHost = null; } } /// <summary> /// Stop all current silos. /// </summary> public void StopAllSilos() { StopAllSilosAsync().GetAwaiter().GetResult(); } /// <summary> /// Stop all current silos. /// </summary> public async Task StopAllSilosAsync() { await StopClusterClientAsync(); await StopSecondarySilosAsync(); if (Primary != null) { await StopPrimarySiloAsync(); } AppDomain.CurrentDomain.UnhandledException -= ReportUnobservedException; } /// <summary> /// Do a semi-graceful Stop of the specified silo. /// </summary> /// <param name="instance">Silo to be stopped.</param> public async Task StopSiloAsync(SiloHandle instance) { if (instance != null) { await StopSiloAsync(instance, true); if (Primary == instance) { Primary = null; } else { lock (additionalSilos) { additionalSilos.Remove(instance); } } } } /// <summary> /// Do an immediate Kill of the specified silo. /// </summary> /// <param name="instance">Silo to be killed.</param> public async Task KillSiloAsync(SiloHandle instance) { if (instance != null) { // do NOT stop, just kill directly, to simulate crash. await StopSiloAsync(instance, false); if (Primary == instance) { Primary = null; } else { lock (additionalSilos) { additionalSilos.Remove(instance); } } } } /// <summary> /// Performs a hard kill on client. Client will not cleanup resources. /// </summary> public async Task KillClientAsync() { var client = ClientHost; if (client != null) { var cancelled = new CancellationTokenSource(); cancelled.Cancel(); try { await client.StopAsync(cancelled.Token).ConfigureAwait(false); } finally { await DisposeAsync(client); ClientHost = null; } } } /// <summary> /// Do a Stop or Kill of the specified silo, followed by a restart. /// </summary> /// <param name="instance">Silo to be restarted.</param> public async Task<SiloHandle> RestartSiloAsync(SiloHandle instance) { if (instance != null) { var instanceNumber = instance.InstanceNumber; var siloName = instance.Name; await StopSiloAsync(instance); var newInstance = await StartSiloAsync(instanceNumber, this.options); if (siloName == Silo.PrimarySiloName) { Primary = newInstance; } else { lock (additionalSilos) { additionalSilos.Add(newInstance); } } return newInstance; } return null; } /// <summary> /// Restart a previously stopped. /// </summary> /// <param name="siloName">Silo to be restarted.</param> public async Task<SiloHandle> RestartStoppedSecondarySiloAsync(string siloName) { if (siloName == null) throw new ArgumentNullException(nameof(siloName)); var siloHandle = this.Silos.Single(s => s.Name.Equals(siloName, StringComparison.Ordinal)); var newInstance = await this.StartSiloAsync(this.Silos.IndexOf(siloHandle), this.options); lock (additionalSilos) { additionalSilos.Add(newInstance); } return newInstance; } /// <summary> /// Initialize the grain client. This should be already done by <see cref="Deploy()"/> or <see cref="DeployAsync"/> /// </summary> public async Task InitializeClientAsync() { WriteLog("Initializing Cluster Client"); if (ClientHost is not null) { await StopClusterClientAsync(); } this.ClientHost = TestClusterHostFactory.CreateClusterClient("MainClient", this.ConfigurationSources); await this.ClientHost.StartAsync(); } /// <summary> /// Gets the configuration sources. /// </summary> /// <value>The configuration sources.</value> public IReadOnlyList<IConfigurationSource> ConfigurationSources { get; } private async Task InitializeAsync() { short silosToStart = this.options.InitialSilosCount; if (this.options.UseTestClusterMembership) { this.Primary = await StartSiloAsync(this.startedInstances, this.options); silosToStart--; } if (silosToStart > 0) { await this.StartAdditionalSilosAsync(silosToStart); } WriteLog("Done initializing cluster"); if (this.options.InitializeClientOnDeploy) { await InitializeClientAsync(); } } /// <summary> /// Start a new silo in the target cluster /// </summary> /// <param name="cluster">The TestCluster in which the silo should be deployed</param> /// <param name="instanceNumber">The instance number to deploy</param> /// <param name="clusterOptions">The options to use.</param> /// <param name="configurationOverrides">Configuration overrides.</param> /// <param name="startSiloOnNewPort">Whether we start this silo on a new port, instead of the default one</param> /// <returns>A handle to the silo deployed</returns> public static async Task<SiloHandle> StartSiloAsync(TestCluster cluster, int instanceNumber, TestClusterOptions clusterOptions, IReadOnlyList<IConfigurationSource> configurationOverrides = null, bool startSiloOnNewPort = false) { if (cluster == null) throw new ArgumentNullException(nameof(cluster)); return await cluster.StartSiloAsync(instanceNumber, clusterOptions, configurationOverrides, startSiloOnNewPort); } /// <summary> /// Starts a new silo. /// </summary> /// <param name="instanceNumber">The instance number to deploy</param> /// <param name="clusterOptions">The options to use.</param> /// <param name="configurationOverrides">Configuration overrides.</param> /// <param name="startSiloOnNewPort">Whether we start this silo on a new port, instead of the default one</param> /// <returns>A handle to the deployed silo.</returns> public async Task<SiloHandle> StartSiloAsync(int instanceNumber, TestClusterOptions clusterOptions, IReadOnlyList<IConfigurationSource> configurationOverrides = null, bool startSiloOnNewPort = false) { var configurationSources = this.ConfigurationSources.ToList(); // Add overrides. if (configurationOverrides != null) configurationSources.AddRange(configurationOverrides); var siloSpecificOptions = TestSiloSpecificOptions.Create(this, clusterOptions, instanceNumber, startSiloOnNewPort); configurationSources.Add(new MemoryConfigurationSource { InitialData = siloSpecificOptions.ToDictionary() }); var configurationBuilder = new ConfigurationBuilder(); foreach (var source in configurationSources) { configurationBuilder.Add(source); } var configuration = configurationBuilder.Build(); var handle = await this.CreateSiloAsync(siloSpecificOptions.SiloName, configuration); handle.InstanceNumber = (short)instanceNumber; Interlocked.Increment(ref this.startedInstances); return handle; } private async Task StopSiloAsync(SiloHandle instance, bool stopGracefully) { try { await instance.StopSiloAsync(stopGracefully).ConfigureAwait(false); } finally { await DisposeAsync(instance).ConfigureAwait(false); Interlocked.Decrement(ref this.startedInstances); } } /// <summary> /// Gets the log. /// </summary> /// <returns>The log contents.</returns> public string GetLog() { return this.log.ToString(); } private void ReportUnobservedException(object sender, UnhandledExceptionEventArgs eventArgs) { Exception exception = (Exception)eventArgs.ExceptionObject; this.WriteLog("Unobserved exception: {0}", exception); } private void WriteLog(string format, params object[] args) { log.AppendFormat(format + Environment.NewLine, args); } private void FlushLogToConsole() { Console.WriteLine(GetLog()); } /// <inheritdoc/> public async ValueTask DisposeAsync() { if (_disposed) { return; } await Task.Run(async () => { foreach (var handle in this.SecondarySilos) { await DisposeAsync(handle).ConfigureAwait(false); } if (this.Primary is object) { await DisposeAsync(Primary).ConfigureAwait(false); } await DisposeAsync(ClientHost).ConfigureAwait(false); ClientHost = null; this.PortAllocator?.Dispose(); }); _disposed = true; } /// <inheritdoc/> public void Dispose() { if (_disposed) { return; } foreach (var handle in this.SecondarySilos) { handle.Dispose(); } this.Primary?.Dispose(); this.ClientHost?.Dispose(); this.PortAllocator?.Dispose(); _disposed = true; } private static async Task DisposeAsync(IDisposable value) { if (value is IAsyncDisposable asyncDisposable) { await asyncDisposable.DisposeAsync().ConfigureAwait(false); } else if (value is IDisposable disposable) { disposable.Dispose(); } } } }
// The MIT License // // Copyright (c) 2012-2015 Jordan E. Terrell // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Linq; using System.Text; using NUnit.Framework; namespace iSynaptic.Commons.Runtime.Serialization { public partial class CloneableTests { [Test] public void CloneListOfValueType() { List<int> source = new List<int> { 1, 2, 3, 4, 5 }; Assert.IsTrue(Cloneable<List<int>>.CanClone()); Assert.IsTrue(Cloneable<List<int>>.CanShallowClone()); List<int> clone = source.Clone(); List<int> shallowClone = source.ShallowClone(); Assert.IsFalse(ReferenceEquals(source, clone)); Assert.IsFalse(ReferenceEquals(source, shallowClone)); Assert.IsTrue(clone.SequenceEqual(new int[] { 1, 2, 3, 4, 5 })); Assert.IsTrue(shallowClone.SequenceEqual(new int[] { 1, 2, 3, 4, 5 })); } [Test] public void CloneListOfReferenceType() { List<CloneTestClass> source = new List<CloneTestClass> { new CloneTestClass { FirstName = "John", LastName = "Doe" } }; Assert.IsTrue(Cloneable<List<CloneTestClass>>.CanClone()); Assert.IsTrue(Cloneable<List<CloneTestClass>>.CanShallowClone()); List<CloneTestClass> clone = Cloneable<List<CloneTestClass>>.Clone(source); List<CloneTestClass> shallowClone = Cloneable<List<CloneTestClass>>.ShallowClone(source); Assert.IsFalse(ReferenceEquals(source, clone)); Assert.IsFalse(ReferenceEquals(source, shallowClone)); Assert.AreEqual(1, clone.Count); Assert.AreEqual(1, shallowClone.Count); Assert.IsFalse(ReferenceEquals(source[0], clone[0])); Assert.IsTrue(ReferenceEquals(source[0], shallowClone[0])); Assert.AreEqual("John", clone[0].FirstName); Assert.AreEqual("Doe", clone[0].LastName); } [Test] public void ClonePrimitiveArray() { int[] ints = new int[] { 1, 2, 3, 4, 5 }; Assert.IsTrue(Cloneable<int[]>.CanClone()); Assert.IsTrue(Cloneable<int[]>.CanShallowClone()); int[] clonedInts = Cloneable<int[]>.Clone(ints); int[] shallowClonedInts = Cloneable<int[]>.ShallowClone(ints); Assert.IsFalse(ReferenceEquals(ints, clonedInts)); Assert.IsFalse(ReferenceEquals(ints, shallowClonedInts)); Assert.IsTrue(clonedInts.SequenceEqual(new int[] { 1, 2, 3, 4, 5 })); Assert.IsTrue(shallowClonedInts.SequenceEqual(new int[] { 1, 2, 3, 4, 5 })); } [Test] public void CloneClassArray() { var source = new CloneTestClass[] { new CloneTestClass { FirstName = "John", LastName = "Doe" }, new CloneTestClass { FirstName = "Jane", LastName = "Smith"} }; Assert.IsTrue(Cloneable<CloneTestClass[]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestClass[]>.CanShallowClone()); var clones = Cloneable<CloneTestClass[]>.Clone(source); var shallowClones = Cloneable<CloneTestClass[]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clones)); Assert.IsFalse(object.ReferenceEquals(source, shallowClones)); Assert.AreEqual(source.Length, clones.Length); Assert.AreEqual(source.Length, shallowClones.Length); Assert.IsFalse(object.ReferenceEquals(source[0], clones[0])); Assert.IsFalse(object.ReferenceEquals(source[1], clones[1])); Assert.IsTrue(object.ReferenceEquals(source[0], shallowClones[0])); Assert.IsTrue(object.ReferenceEquals(source[1], shallowClones[1])); Assert.AreEqual("John", clones[0].FirstName); Assert.AreEqual("Doe", clones[0].LastName); Assert.AreEqual("Jane", clones[1].FirstName); Assert.AreEqual("Smith", clones[1].LastName); } [Test] public void CloneStructArray() { var source = new CloneTestStruct[] { new CloneTestStruct { FirstName = "John", LastName = "Doe" }, new CloneTestStruct { FirstName = "Jane", LastName = "Smith"} }; Assert.IsTrue(Cloneable<CloneTestStruct[]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestStruct[]>.CanShallowClone()); var clones = Cloneable<CloneTestStruct[]>.Clone(source); var shallowClones = Cloneable<CloneTestStruct[]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clones)); Assert.IsFalse(object.ReferenceEquals(source, shallowClones)); Assert.AreEqual(source.Length, clones.Length); Assert.AreEqual(source.Length, shallowClones.Length); Assert.AreEqual("John", clones[0].FirstName); Assert.AreEqual("Doe", clones[0].LastName); Assert.AreEqual("Jane", clones[1].FirstName); Assert.AreEqual("Smith", clones[1].LastName); } [Test] public void CloneMultidimensionalPrimitiveArray() { int[,] source = new int[,] { { 1, 2, 3 } }; Assert.IsTrue(Cloneable<int[,]>.CanClone()); Assert.IsTrue(Cloneable<int[,]>.CanShallowClone()); var clone = Cloneable<int[,]>.Clone(source); var shallowClone = Cloneable<int[,]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.IsFalse(object.ReferenceEquals(source, shallowClone)); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.IsTrue(source[0, 0].Equals(clone[0, 0])); Assert.IsTrue(source[0, 1].Equals(clone[0, 1])); Assert.IsTrue(source[0, 2].Equals(clone[0, 2])); Assert.IsTrue(source[0, 0].Equals(shallowClone[0, 0])); Assert.IsTrue(source[0, 1].Equals(shallowClone[0, 1])); Assert.IsTrue(source[0, 2].Equals(shallowClone[0, 2])); } [Test] public void CloneMultidimensionalClassArray() { CloneTestClass[,] source = new CloneTestClass[,] { { new CloneTestClass { FirstName = "John", LastName = "Doe" }, new CloneTestClass { FirstName = "Jane", LastName = "Smith"} } }; source[0, 1].InnerClass = source[0, 1]; Assert.IsTrue(Cloneable<CloneTestClass[,]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestClass[,]>.CanShallowClone()); var clone = Cloneable<CloneTestClass[,]>.Clone(source); var shallowClone = Cloneable<CloneTestClass[,]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.IsFalse(object.ReferenceEquals(source, shallowClone)); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.IsFalse(object.ReferenceEquals(source[0, 0], clone[0, 0])); Assert.IsFalse(object.ReferenceEquals(source[0, 1], clone[0, 1])); Assert.IsFalse(object.ReferenceEquals(source[0, 1], clone[0, 1].InnerClass)); Assert.IsTrue(object.ReferenceEquals(clone[0, 1], clone[0, 1].InnerClass)); Assert.IsTrue(object.ReferenceEquals(source[0, 0], shallowClone[0, 0])); Assert.IsTrue(object.ReferenceEquals(source[0, 1], shallowClone[0, 1])); Assert.IsTrue(object.ReferenceEquals(source[0, 1], shallowClone[0, 1].InnerClass)); Assert.IsFalse(object.ReferenceEquals(clone[0, 1], shallowClone[0, 1].InnerClass)); Assert.AreEqual("John", clone[0, 0].FirstName); Assert.AreEqual("Doe", clone[0, 0].LastName); Assert.AreEqual("Jane", clone[0, 1].FirstName); Assert.AreEqual("Smith", clone[0, 1].LastName); Assert.AreEqual("John", shallowClone[0, 0].FirstName); Assert.AreEqual("Doe", shallowClone[0, 0].LastName); Assert.AreEqual("Jane", shallowClone[0, 1].FirstName); Assert.AreEqual("Smith", shallowClone[0, 1].LastName); } [Test] public void CloneMultidimensionalStructArray() { CloneTestStruct[,] source = new CloneTestStruct[,] { { new CloneTestStruct { FirstName = "John", LastName = "Doe" }, new CloneTestStruct { FirstName = "Jane", LastName = "Smith"} } }; Assert.IsTrue(Cloneable<CloneTestStruct[,]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestStruct[,]>.CanShallowClone()); var clone = Cloneable<CloneTestStruct[,]>.Clone(source); var shallowClone = Cloneable<CloneTestStruct[,]>.ShallowClone(source); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.AreEqual("John", clone[0, 0].FirstName); Assert.AreEqual("Doe", clone[0, 0].LastName); Assert.AreEqual("Jane", clone[0, 1].FirstName); Assert.AreEqual("Smith", clone[0, 1].LastName); Assert.AreEqual("John", shallowClone[0, 0].FirstName); Assert.AreEqual("Doe", shallowClone[0, 0].LastName); Assert.AreEqual("Jane", shallowClone[0, 1].FirstName); Assert.AreEqual("Smith", shallowClone[0, 1].LastName); } [Test] public void CloneJaggedPrimitiveArray() { var source = new int[][] { new int[]{1,2,3,4,5} }; Assert.IsTrue(Cloneable<int[][]>.CanClone()); Assert.IsTrue(Cloneable<int[][]>.CanShallowClone()); var clone = Cloneable<int[][]>.Clone(source); var shallowClone = Cloneable<int[][]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.IsFalse(object.ReferenceEquals(source, shallowClone)); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.AreEqual(source[0][0], clone[0][0]); Assert.AreEqual(source[0][1], clone[0][1]); Assert.AreEqual(source[0][2], clone[0][2]); Assert.AreEqual(source[0][0], shallowClone[0][0]); Assert.AreEqual(source[0][1], shallowClone[0][1]); Assert.AreEqual(source[0][2], shallowClone[0][2]); } [Test] public void CloneJaggedClassArray() { var source = new CloneTestClass[][] { new CloneTestClass[] { new CloneTestClass { FirstName = "John", LastName = "Doe" }, new CloneTestClass { FirstName = "Jane", LastName = "Smith"} } }; Assert.IsTrue(Cloneable<CloneTestClass[][]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestClass[][]>.CanShallowClone()); var clone = Cloneable<CloneTestClass[][]>.Clone(source); var shallowClone = Cloneable<CloneTestClass[][]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.IsFalse(object.ReferenceEquals(source, shallowClone)); Assert.IsFalse(object.ReferenceEquals(source[0], clone[0])); Assert.IsFalse(object.ReferenceEquals(source[0][0], clone[0][0])); Assert.IsFalse(object.ReferenceEquals(source[0][1], clone[0][1])); Assert.IsTrue(object.ReferenceEquals(source[0], shallowClone[0])); Assert.IsTrue(object.ReferenceEquals(source[0][0], shallowClone[0][0])); Assert.IsTrue(object.ReferenceEquals(source[0][1], shallowClone[0][1])); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.AreEqual("John", clone[0][0].FirstName); Assert.AreEqual("Doe", clone[0][0].LastName); Assert.AreEqual("Jane", clone[0][1].FirstName); Assert.AreEqual("Smith", clone[0][1].LastName); } [Test] public void CloneJaggedStructArray() { var source = new CloneTestStruct[][] { new CloneTestStruct[] { new CloneTestStruct { FirstName = "John", LastName = "Doe" }, new CloneTestStruct { FirstName = "Jane", LastName = "Smith"} } }; Assert.IsTrue(Cloneable<CloneTestStruct[][]>.CanClone()); Assert.IsTrue(Cloneable<CloneTestStruct[][]>.CanShallowClone()); var clone = Cloneable<CloneTestStruct[][]>.Clone(source); var shallowClone = Cloneable<CloneTestStruct[][]>.ShallowClone(source); Assert.AreEqual(source.Length, clone.Length); Assert.AreEqual(source.Length, shallowClone.Length); Assert.AreEqual("John", clone[0][0].FirstName); Assert.AreEqual("Doe", clone[0][0].LastName); Assert.AreEqual("Jane", clone[0][1].FirstName); Assert.AreEqual("Smith", clone[0][1].LastName); Assert.AreEqual("John", shallowClone[0][0].FirstName); Assert.AreEqual("Doe", shallowClone[0][0].LastName); Assert.AreEqual("Jane", shallowClone[0][1].FirstName); Assert.AreEqual("Smith", shallowClone[0][1].LastName); } [Test] public void CloneMultipleReferencesToSameArray() { var child = new CloneTestClass[] { new CloneTestClass(), new CloneTestClass() }; var source = new CloneTestClass[][] { child, child }; var clone = Cloneable<CloneTestClass[][]>.Clone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.IsFalse(object.ReferenceEquals(clone[0], child)); Assert.IsFalse(object.ReferenceEquals(clone[1], child)); Assert.IsTrue(object.ReferenceEquals(clone[0], clone[1])); } [Test] public void CloneNullArray() { Assert.IsNull(Cloneable<int[]>.Clone(null)); Assert.IsNull(Cloneable<int[]>.ShallowClone(null)); } [Test] public void CloneEmptyArray() { var source = new int[] { }; var clone = Cloneable<int[]>.Clone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.AreEqual(0, clone.Length); clone = Cloneable<int[]>.ShallowClone(source); Assert.IsFalse(object.ReferenceEquals(source, clone)); Assert.AreEqual(0, clone.Length); } [Test] public void CloneToClassArray() { var destinationClass = new CloneTestClass(); var source = new[] { new CloneTestClass { FirstName = "John", LastName = "Doe" } }; var destination = new[] { destinationClass }; Cloneable<CloneTestClass[]>.CloneTo(source, destination); Assert.IsTrue(ReferenceEquals(destinationClass, destination[0])); Assert.AreEqual("John", destinationClass.FirstName); Assert.AreEqual("Doe", destinationClass.LastName); } [Test] public void ShallowCloneToClassArray() { var destinationClass = new CloneTestClass(); var source = new[] { new CloneTestClass { FirstName = "John", LastName = "Doe" } }; var destination = new[] { destinationClass }; Cloneable<CloneTestClass[]>.ShallowCloneTo(source, destination); Assert.IsTrue(ReferenceEquals(source[0], destination[0])); } [Test] public void CloneToStructArray() { var source = new[] { new CloneTestStruct { FirstName = "John", LastName = "Doe" } }; var destination = new CloneTestStruct[1]; source.CloneTo(destination); Assert.AreEqual("John", destination[0].FirstName); Assert.AreEqual("Doe", destination[0].LastName); } [Test] public void ShallowCloneToStructArray() { var source = new[] { new CloneTestStruct { FirstName = "John", LastName = "Doe" } }; var destination = new CloneTestStruct[1]; source.ShallowCloneTo(destination); Assert.AreEqual("John", destination[0].FirstName); Assert.AreEqual("Doe", destination[0].LastName); } [Test] public void CloneToMultidimentionalClassArray() { CloneTestClass[,] source = new[,] { { new CloneTestClass { FirstName = "John", LastName = "Doe" }, new CloneTestClass { FirstName = "Jane", LastName = "Smith"} } }; var dest1 = new CloneTestClass(); var dest2 = new CloneTestClass(); var destination = new[,] { { dest1, dest2 } }; source.CloneTo(destination); Assert.IsTrue(ReferenceEquals(dest1, destination[0, 0])); Assert.IsTrue(ReferenceEquals(dest2, destination[0, 1])); Assert.AreEqual("John", destination[0, 0].FirstName); Assert.AreEqual("Doe", destination[0, 0].LastName); Assert.AreEqual("Jane", destination[0, 1].FirstName); Assert.AreEqual("Smith", destination[0, 1].LastName); } [Test] public void ShallowCloneToMultidimentionalClassArray() { var source1 = new CloneTestClass { FirstName = "John", LastName = "Doe" }; var source2 = new CloneTestClass { FirstName = "Jane", LastName = "Smith" }; CloneTestClass[,] source = new[,] { { source1, source2 } }; var destination = new[,] { { new CloneTestClass(), new CloneTestClass() } }; source.ShallowCloneTo(destination); Assert.IsTrue(ReferenceEquals(source1, destination[0, 0])); Assert.IsTrue(ReferenceEquals(source2, destination[0, 1])); } [Test] public void CloneToMultidimentionalStructArray() { var dest1 = new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass() }; var dest2 = new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass() }; var source = new[,] { { new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "John", LastName = "Doe" } }, new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "Jane", LastName = "Smith" } } } }; var dest = new[,] { { dest1, dest2 } }; var clone = source.CloneTo(dest); Assert.IsTrue(ReferenceEquals(clone[0, 0].TestClass, dest1.TestClass)); Assert.IsTrue(ReferenceEquals(clone[0, 1].TestClass, dest2.TestClass)); Assert.AreEqual("John", clone[0, 0].TestClass.FirstName); Assert.AreEqual("Doe", clone[0, 0].TestClass.LastName); Assert.AreEqual("Jane", clone[0, 1].TestClass.FirstName); Assert.AreEqual("Smith", clone[0, 1].TestClass.LastName); } [Test] public void ShallowCloneToMultidimentionalStructArray() { var source1 = new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "John", LastName = "Doe" } }; var source2 = new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "Jane", LastName = "Smith" } }; var source = new[,] { { source1, source2 } }; var dest = new CloneTestStructWithClonableClassField[1, 2]; var clone = source.ShallowCloneTo(dest); Assert.IsTrue(ReferenceEquals(clone[0, 0].TestClass, source1.TestClass)); Assert.IsTrue(ReferenceEquals(clone[0, 1].TestClass, source2.TestClass)); } [Test] public void CloneToJaggedClassArray() { var source = new[] { new[] {new CloneTestClass { FirstName = "John", LastName = "Doe" }}, new[] {new CloneTestClass { FirstName = "Jane", LastName = "Smith"}} }; var dest1 = new CloneTestClass(); var dest2 = new CloneTestClass(); var destination = new[] { new[] {dest1}, new[] {dest2} }; source.CloneTo(destination); Assert.IsTrue(ReferenceEquals(dest1, destination[0][0])); Assert.IsTrue(ReferenceEquals(dest2, destination[1][0])); Assert.AreEqual("John", destination[0][0].FirstName); Assert.AreEqual("Doe", destination[0][0].LastName); Assert.AreEqual("Jane", destination[1][0].FirstName); Assert.AreEqual("Smith", destination[1][0].LastName); } [Test] public void ShallowCloneToJaggedClassArray() { var source1 = new CloneTestClass { FirstName = "John", LastName = "Doe" }; var source2 = new CloneTestClass { FirstName = "Jane", LastName = "Smith" }; var source = new[] { new[] {source1}, new[] {source2} }; var destination = new[] { new[] {new CloneTestClass()}, new[] {new CloneTestClass()} }; source.ShallowCloneTo(destination); Assert.IsTrue(ReferenceEquals(source1, destination[0][0])); Assert.IsTrue(ReferenceEquals(source2, destination[1][0])); } [Test] public void CloneToJaggedStructArray() { var source = new[] { new[] {new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "John", LastName = "Doe" }}}, new[] {new CloneTestStructWithClonableClassField { TestClass = new CloneTestClass { FirstName = "Jane", LastName = "Smith"}}} }; var dest1 = new CloneTestClass(); var dest2 = new CloneTestClass(); var destination = new[] { new[] {new CloneTestStructWithClonableClassField { TestClass = dest1}}, new[] {new CloneTestStructWithClonableClassField { TestClass = dest2}} }; var clone = source.CloneTo(destination); Assert.IsTrue(ReferenceEquals(dest1, clone[0][0].TestClass)); Assert.IsTrue(ReferenceEquals(dest2, clone[1][0].TestClass)); Assert.AreEqual("John", clone[0][0].TestClass.FirstName); Assert.AreEqual("Doe", clone[0][0].TestClass.LastName); Assert.AreEqual("Jane", clone[1][0].TestClass.FirstName); Assert.AreEqual("Smith", clone[1][0].TestClass.LastName); } [Test] public void ShallowCloneToJaggedStructArray() { var source1 = new CloneTestClass { FirstName = "John", LastName = "Doe" }; var source2 = new CloneTestClass { FirstName = "Jane", LastName = "Smith" }; var source = new[] { new[] {new CloneTestStructWithClonableClassField { TestClass = source1}}, new[] {new CloneTestStructWithClonableClassField { TestClass = source2}} }; var destination = new[] { new[] {new CloneTestStructWithClonableClassField()}, new[] {new CloneTestStructWithClonableClassField()} }; var clone = source.ShallowCloneTo(destination); Assert.IsTrue(ReferenceEquals(source1, destination[0][0].TestClass)); Assert.IsTrue(ReferenceEquals(source2, destination[1][0].TestClass)); } [Test] public void CloneToClassArrayOfDifferingLengthsWillNotWork() { var source = new CloneTestClass[2]; var destination = new CloneTestClass[3]; Assert.Throws<InvalidOperationException>(() => Cloneable<CloneTestClass[]>.CloneTo(source, destination)); } [Test] public void ShallowCloneToClassArrayOfDifferingLengthsWillNotWork() { var source = new CloneTestClass[2]; var destination = new CloneTestClass[3]; Assert.Throws<InvalidOperationException>(() => Cloneable<CloneTestClass[]>.ShallowCloneTo(source, destination)); } [Test] public void CloneToStructArrayOfDifferingLengthsWillNotWork() { var source = new CloneTestStruct[2]; var destination = new CloneTestStruct[3]; Assert.Throws<InvalidOperationException>(() => Cloneable<CloneTestStruct[]>.CloneTo(source, destination)); } [Test] public void ShallowCloneToStructArrayOfDifferingLengthsWillNotWork() { var source = new CloneTestStruct[2]; var destination = new CloneTestStruct[3]; Assert.Throws<InvalidOperationException>(() => Cloneable<CloneTestStruct[]>.ShallowCloneTo(source, destination)); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace Northwind{ /// <summary> /// Strongly-typed collection for the AlphabeticalListOfProduct class. /// </summary> [Serializable] public partial class AlphabeticalListOfProductCollection : ReadOnlyList<AlphabeticalListOfProduct, AlphabeticalListOfProductCollection> { public AlphabeticalListOfProductCollection() {} } /// <summary> /// This is Read-only wrapper class for the Alphabetical list of products view. /// </summary> [Serializable] public partial class AlphabeticalListOfProduct : ReadOnlyRecord<AlphabeticalListOfProduct>, IReadOnlyRecord { #region Default Settings protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema Accessor public static TableSchema.Table Schema { get { if (BaseSchema == null) { SetSQLProps(); } return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("Alphabetical list of products", TableType.View, DataService.GetInstance("Northwind")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarProductID = new TableSchema.TableColumn(schema); colvarProductID.ColumnName = "ProductID"; colvarProductID.DataType = DbType.Int32; colvarProductID.MaxLength = 0; colvarProductID.AutoIncrement = false; colvarProductID.IsNullable = false; colvarProductID.IsPrimaryKey = false; colvarProductID.IsForeignKey = false; colvarProductID.IsReadOnly = false; schema.Columns.Add(colvarProductID); TableSchema.TableColumn colvarProductName = new TableSchema.TableColumn(schema); colvarProductName.ColumnName = "ProductName"; colvarProductName.DataType = DbType.String; colvarProductName.MaxLength = 40; colvarProductName.AutoIncrement = false; colvarProductName.IsNullable = false; colvarProductName.IsPrimaryKey = false; colvarProductName.IsForeignKey = false; colvarProductName.IsReadOnly = false; schema.Columns.Add(colvarProductName); TableSchema.TableColumn colvarSupplierID = new TableSchema.TableColumn(schema); colvarSupplierID.ColumnName = "SupplierID"; colvarSupplierID.DataType = DbType.Int32; colvarSupplierID.MaxLength = 0; colvarSupplierID.AutoIncrement = false; colvarSupplierID.IsNullable = true; colvarSupplierID.IsPrimaryKey = false; colvarSupplierID.IsForeignKey = false; colvarSupplierID.IsReadOnly = false; schema.Columns.Add(colvarSupplierID); TableSchema.TableColumn colvarCategoryID = new TableSchema.TableColumn(schema); colvarCategoryID.ColumnName = "CategoryID"; colvarCategoryID.DataType = DbType.Int32; colvarCategoryID.MaxLength = 0; colvarCategoryID.AutoIncrement = false; colvarCategoryID.IsNullable = true; colvarCategoryID.IsPrimaryKey = false; colvarCategoryID.IsForeignKey = false; colvarCategoryID.IsReadOnly = false; schema.Columns.Add(colvarCategoryID); TableSchema.TableColumn colvarQuantityPerUnit = new TableSchema.TableColumn(schema); colvarQuantityPerUnit.ColumnName = "QuantityPerUnit"; colvarQuantityPerUnit.DataType = DbType.String; colvarQuantityPerUnit.MaxLength = 20; colvarQuantityPerUnit.AutoIncrement = false; colvarQuantityPerUnit.IsNullable = true; colvarQuantityPerUnit.IsPrimaryKey = false; colvarQuantityPerUnit.IsForeignKey = false; colvarQuantityPerUnit.IsReadOnly = false; schema.Columns.Add(colvarQuantityPerUnit); TableSchema.TableColumn colvarUnitPrice = new TableSchema.TableColumn(schema); colvarUnitPrice.ColumnName = "UnitPrice"; colvarUnitPrice.DataType = DbType.Currency; colvarUnitPrice.MaxLength = 0; colvarUnitPrice.AutoIncrement = false; colvarUnitPrice.IsNullable = true; colvarUnitPrice.IsPrimaryKey = false; colvarUnitPrice.IsForeignKey = false; colvarUnitPrice.IsReadOnly = false; schema.Columns.Add(colvarUnitPrice); TableSchema.TableColumn colvarUnitsInStock = new TableSchema.TableColumn(schema); colvarUnitsInStock.ColumnName = "UnitsInStock"; colvarUnitsInStock.DataType = DbType.Int16; colvarUnitsInStock.MaxLength = 0; colvarUnitsInStock.AutoIncrement = false; colvarUnitsInStock.IsNullable = true; colvarUnitsInStock.IsPrimaryKey = false; colvarUnitsInStock.IsForeignKey = false; colvarUnitsInStock.IsReadOnly = false; schema.Columns.Add(colvarUnitsInStock); TableSchema.TableColumn colvarUnitsOnOrder = new TableSchema.TableColumn(schema); colvarUnitsOnOrder.ColumnName = "UnitsOnOrder"; colvarUnitsOnOrder.DataType = DbType.Int16; colvarUnitsOnOrder.MaxLength = 0; colvarUnitsOnOrder.AutoIncrement = false; colvarUnitsOnOrder.IsNullable = true; colvarUnitsOnOrder.IsPrimaryKey = false; colvarUnitsOnOrder.IsForeignKey = false; colvarUnitsOnOrder.IsReadOnly = false; schema.Columns.Add(colvarUnitsOnOrder); TableSchema.TableColumn colvarReorderLevel = new TableSchema.TableColumn(schema); colvarReorderLevel.ColumnName = "ReorderLevel"; colvarReorderLevel.DataType = DbType.Int16; colvarReorderLevel.MaxLength = 0; colvarReorderLevel.AutoIncrement = false; colvarReorderLevel.IsNullable = true; colvarReorderLevel.IsPrimaryKey = false; colvarReorderLevel.IsForeignKey = false; colvarReorderLevel.IsReadOnly = false; schema.Columns.Add(colvarReorderLevel); TableSchema.TableColumn colvarDiscontinued = new TableSchema.TableColumn(schema); colvarDiscontinued.ColumnName = "Discontinued"; colvarDiscontinued.DataType = DbType.Boolean; colvarDiscontinued.MaxLength = 0; colvarDiscontinued.AutoIncrement = false; colvarDiscontinued.IsNullable = false; colvarDiscontinued.IsPrimaryKey = false; colvarDiscontinued.IsForeignKey = false; colvarDiscontinued.IsReadOnly = false; schema.Columns.Add(colvarDiscontinued); TableSchema.TableColumn colvarAttributeXML = new TableSchema.TableColumn(schema); colvarAttributeXML.ColumnName = "AttributeXML"; colvarAttributeXML.DataType = DbType.AnsiString; colvarAttributeXML.MaxLength = -1; colvarAttributeXML.AutoIncrement = false; colvarAttributeXML.IsNullable = true; colvarAttributeXML.IsPrimaryKey = false; colvarAttributeXML.IsForeignKey = false; colvarAttributeXML.IsReadOnly = false; schema.Columns.Add(colvarAttributeXML); TableSchema.TableColumn colvarDateCreated = new TableSchema.TableColumn(schema); colvarDateCreated.ColumnName = "DateCreated"; colvarDateCreated.DataType = DbType.DateTime; colvarDateCreated.MaxLength = 0; colvarDateCreated.AutoIncrement = false; colvarDateCreated.IsNullable = true; colvarDateCreated.IsPrimaryKey = false; colvarDateCreated.IsForeignKey = false; colvarDateCreated.IsReadOnly = false; schema.Columns.Add(colvarDateCreated); TableSchema.TableColumn colvarProductGUID = new TableSchema.TableColumn(schema); colvarProductGUID.ColumnName = "ProductGUID"; colvarProductGUID.DataType = DbType.Guid; colvarProductGUID.MaxLength = 0; colvarProductGUID.AutoIncrement = false; colvarProductGUID.IsNullable = true; colvarProductGUID.IsPrimaryKey = false; colvarProductGUID.IsForeignKey = false; colvarProductGUID.IsReadOnly = false; schema.Columns.Add(colvarProductGUID); TableSchema.TableColumn colvarCreatedOn = new TableSchema.TableColumn(schema); colvarCreatedOn.ColumnName = "CreatedOn"; colvarCreatedOn.DataType = DbType.DateTime; colvarCreatedOn.MaxLength = 0; colvarCreatedOn.AutoIncrement = false; colvarCreatedOn.IsNullable = false; colvarCreatedOn.IsPrimaryKey = false; colvarCreatedOn.IsForeignKey = false; colvarCreatedOn.IsReadOnly = false; schema.Columns.Add(colvarCreatedOn); TableSchema.TableColumn colvarCreatedBy = new TableSchema.TableColumn(schema); colvarCreatedBy.ColumnName = "CreatedBy"; colvarCreatedBy.DataType = DbType.String; colvarCreatedBy.MaxLength = 50; colvarCreatedBy.AutoIncrement = false; colvarCreatedBy.IsNullable = true; colvarCreatedBy.IsPrimaryKey = false; colvarCreatedBy.IsForeignKey = false; colvarCreatedBy.IsReadOnly = false; schema.Columns.Add(colvarCreatedBy); TableSchema.TableColumn colvarModifiedOn = new TableSchema.TableColumn(schema); colvarModifiedOn.ColumnName = "ModifiedOn"; colvarModifiedOn.DataType = DbType.DateTime; colvarModifiedOn.MaxLength = 0; colvarModifiedOn.AutoIncrement = false; colvarModifiedOn.IsNullable = false; colvarModifiedOn.IsPrimaryKey = false; colvarModifiedOn.IsForeignKey = false; colvarModifiedOn.IsReadOnly = false; schema.Columns.Add(colvarModifiedOn); TableSchema.TableColumn colvarModifiedBy = new TableSchema.TableColumn(schema); colvarModifiedBy.ColumnName = "ModifiedBy"; colvarModifiedBy.DataType = DbType.String; colvarModifiedBy.MaxLength = 50; colvarModifiedBy.AutoIncrement = false; colvarModifiedBy.IsNullable = true; colvarModifiedBy.IsPrimaryKey = false; colvarModifiedBy.IsForeignKey = false; colvarModifiedBy.IsReadOnly = false; schema.Columns.Add(colvarModifiedBy); TableSchema.TableColumn colvarDeleted = new TableSchema.TableColumn(schema); colvarDeleted.ColumnName = "Deleted"; colvarDeleted.DataType = DbType.Boolean; colvarDeleted.MaxLength = 0; colvarDeleted.AutoIncrement = false; colvarDeleted.IsNullable = false; colvarDeleted.IsPrimaryKey = false; colvarDeleted.IsForeignKey = false; colvarDeleted.IsReadOnly = false; schema.Columns.Add(colvarDeleted); TableSchema.TableColumn colvarCategoryName = new TableSchema.TableColumn(schema); colvarCategoryName.ColumnName = "CategoryName"; colvarCategoryName.DataType = DbType.String; colvarCategoryName.MaxLength = 15; colvarCategoryName.AutoIncrement = false; colvarCategoryName.IsNullable = false; colvarCategoryName.IsPrimaryKey = false; colvarCategoryName.IsForeignKey = false; colvarCategoryName.IsReadOnly = false; schema.Columns.Add(colvarCategoryName); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["Northwind"].AddSchema("Alphabetical list of products",schema); } } #endregion #region Query Accessor public static Query CreateQuery() { return new Query(Schema); } #endregion #region .ctors public AlphabeticalListOfProduct() { SetSQLProps(); SetDefaults(); MarkNew(); } public AlphabeticalListOfProduct(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) { ForceDefaults(); } MarkNew(); } public AlphabeticalListOfProduct(object keyID) { SetSQLProps(); LoadByKey(keyID); } public AlphabeticalListOfProduct(string columnName, object columnValue) { SetSQLProps(); LoadByParam(columnName,columnValue); } #endregion #region Props [XmlAttribute("ProductID")] [Bindable(true)] public int ProductID { get { return GetColumnValue<int>("ProductID"); } set { SetColumnValue("ProductID", value); } } [XmlAttribute("ProductName")] [Bindable(true)] public string ProductName { get { return GetColumnValue<string>("ProductName"); } set { SetColumnValue("ProductName", value); } } [XmlAttribute("SupplierID")] [Bindable(true)] public int? SupplierID { get { return GetColumnValue<int?>("SupplierID"); } set { SetColumnValue("SupplierID", value); } } [XmlAttribute("CategoryID")] [Bindable(true)] public int? CategoryID { get { return GetColumnValue<int?>("CategoryID"); } set { SetColumnValue("CategoryID", value); } } [XmlAttribute("QuantityPerUnit")] [Bindable(true)] public string QuantityPerUnit { get { return GetColumnValue<string>("QuantityPerUnit"); } set { SetColumnValue("QuantityPerUnit", value); } } [XmlAttribute("UnitPrice")] [Bindable(true)] public decimal? UnitPrice { get { return GetColumnValue<decimal?>("UnitPrice"); } set { SetColumnValue("UnitPrice", value); } } [XmlAttribute("UnitsInStock")] [Bindable(true)] public short? UnitsInStock { get { return GetColumnValue<short?>("UnitsInStock"); } set { SetColumnValue("UnitsInStock", value); } } [XmlAttribute("UnitsOnOrder")] [Bindable(true)] public short? UnitsOnOrder { get { return GetColumnValue<short?>("UnitsOnOrder"); } set { SetColumnValue("UnitsOnOrder", value); } } [XmlAttribute("ReorderLevel")] [Bindable(true)] public short? ReorderLevel { get { return GetColumnValue<short?>("ReorderLevel"); } set { SetColumnValue("ReorderLevel", value); } } [XmlAttribute("Discontinued")] [Bindable(true)] public bool Discontinued { get { return GetColumnValue<bool>("Discontinued"); } set { SetColumnValue("Discontinued", value); } } [XmlAttribute("AttributeXML")] [Bindable(true)] public string AttributeXML { get { return GetColumnValue<string>("AttributeXML"); } set { SetColumnValue("AttributeXML", value); } } [XmlAttribute("DateCreated")] [Bindable(true)] public DateTime? DateCreated { get { return GetColumnValue<DateTime?>("DateCreated"); } set { SetColumnValue("DateCreated", value); } } [XmlAttribute("ProductGUID")] [Bindable(true)] public Guid? ProductGUID { get { return GetColumnValue<Guid?>("ProductGUID"); } set { SetColumnValue("ProductGUID", value); } } [XmlAttribute("CreatedOn")] [Bindable(true)] public DateTime CreatedOn { get { return GetColumnValue<DateTime>("CreatedOn"); } set { SetColumnValue("CreatedOn", value); } } [XmlAttribute("CreatedBy")] [Bindable(true)] public string CreatedBy { get { return GetColumnValue<string>("CreatedBy"); } set { SetColumnValue("CreatedBy", value); } } [XmlAttribute("ModifiedOn")] [Bindable(true)] public DateTime ModifiedOn { get { return GetColumnValue<DateTime>("ModifiedOn"); } set { SetColumnValue("ModifiedOn", value); } } [XmlAttribute("ModifiedBy")] [Bindable(true)] public string ModifiedBy { get { return GetColumnValue<string>("ModifiedBy"); } set { SetColumnValue("ModifiedBy", value); } } [XmlAttribute("Deleted")] [Bindable(true)] public bool Deleted { get { return GetColumnValue<bool>("Deleted"); } set { SetColumnValue("Deleted", value); } } [XmlAttribute("CategoryName")] [Bindable(true)] public string CategoryName { get { return GetColumnValue<string>("CategoryName"); } set { SetColumnValue("CategoryName", value); } } #endregion #region Columns Struct public struct Columns { public static string ProductID = @"ProductID"; public static string ProductName = @"ProductName"; public static string SupplierID = @"SupplierID"; public static string CategoryID = @"CategoryID"; public static string QuantityPerUnit = @"QuantityPerUnit"; public static string UnitPrice = @"UnitPrice"; public static string UnitsInStock = @"UnitsInStock"; public static string UnitsOnOrder = @"UnitsOnOrder"; public static string ReorderLevel = @"ReorderLevel"; public static string Discontinued = @"Discontinued"; public static string AttributeXML = @"AttributeXML"; public static string DateCreated = @"DateCreated"; public static string ProductGUID = @"ProductGUID"; public static string CreatedOn = @"CreatedOn"; public static string CreatedBy = @"CreatedBy"; public static string ModifiedOn = @"ModifiedOn"; public static string ModifiedBy = @"ModifiedBy"; public static string Deleted = @"Deleted"; public static string CategoryName = @"CategoryName"; } #endregion #region IAbstractRecord Members public new CT GetColumnValue<CT>(string columnName) { return base.GetColumnValue<CT>(columnName); } public object GetColumnValue(string columnName) { return base.GetColumnValue<object>(columnName); } #endregion } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Description; using sep02v1.Areas.HelpPage.Models; namespace sep02v1.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); model = GenerateApiModel(apiDescription, sampleGenerator); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator) { HelpPageApiModel apiModel = new HelpPageApiModel(); apiModel.ApiDescription = apiDescription; try { foreach (var item in sampleGenerator.GetSampleRequests(apiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message)); } return apiModel; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
namespace Microsoft.Protocols.TestSuites.SharedAdapter { using Microsoft.Protocols.TestSuites.Common; using System; using System.Collections.Generic; /// <summary> /// This class specifies the base class for 16-bit or 32-bit stream object header start. /// </summary> public abstract class StreamObjectHeaderStart : BasicObject { /// <summary> /// Specify for 16-bit stream object header start. /// </summary> public const int StreamObjectHeaderStart16bit = 0x0; /// <summary> /// Specify for 32-bit stream object header start. /// </summary> public const int StreamObjectHeaderStart32bit = 0x02; /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart class. /// </summary> protected StreamObjectHeaderStart() { } /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart class with specified header type. /// </summary> /// <param name="streamObjectTypeHeaderStart">Specify the value of the StreamObjectHeaderStart Type.</param> protected StreamObjectHeaderStart(StreamObjectTypeHeaderStart streamObjectTypeHeaderStart) { this.Type = streamObjectTypeHeaderStart; } /// <summary> /// Gets or sets the type of the stream object. /// value 0 for 16-bit stream object header start, /// value 2 for 32-bit stream object header start. /// </summary> public int HeaderType { get; set; } /// <summary> /// Gets or sets a value that specifies if set a compound parse type is needed and /// MUST be ended with either an 8-bit stream object header end or a 16-bit stream object header end. /// If the bit is zero, it specifies a single object. Otherwise it specifies a compound object. /// </summary> public int Compound { get; set; } /// <summary> /// Gets or sets a value that specifies the stream object type. /// </summary> public StreamObjectTypeHeaderStart Type { get; set; } /// <summary> /// Gets or sets a 15-bit unsigned integer that specifies the length in bytes for additional data (if any). /// </summary> public int Length { get; set; } /// <summary> /// This method is used to parse the actual 16bit or 32bit stream header. /// </summary> /// <param name="byteArray">Specify the Byte array.</param> /// <param name="startIndex">Specify the start position.</param> /// <param name="streamObjectHeader">Specify the out value for the parse result.</param> /// <returns>Return true if success, otherwise returns false. </returns> public static int TryParse(byte[] byteArray, int startIndex, out StreamObjectHeaderStart streamObjectHeader) { uint headerType = (uint)(byteArray[startIndex] & 0x03); if (headerType == StreamObjectHeaderStart.StreamObjectHeaderStart16bit) { streamObjectHeader = new StreamObjectHeaderStart16bit(); } else { if (headerType == StreamObjectHeaderStart.StreamObjectHeaderStart32bit) { streamObjectHeader = new StreamObjectHeaderStart32bit(); } else { streamObjectHeader = null; return 0; } } try { return streamObjectHeader.DeserializeFromByteArray(byteArray, startIndex); } catch (InvalidOperationException) { streamObjectHeader = null; return 0; } } } /// <summary> /// An 16-bit header for a compound object would indicate the start of a stream object /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.StyleCop.CSharp.MaintainabilityRules", "SA1402:FileMayOnlyContainASingleClass", Justification = "Easy to maintain one group of classes in one .cs file.")] public class StreamObjectHeaderStart16bit : StreamObjectHeaderStart { /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart16bit class with specified type and length. /// </summary> /// <param name="type">Specify the type of the StreamObjectHeaderStart16bit.</param> /// <param name="length">Specify the length of the StreamObjectHeaderStart16bit.</param> public StreamObjectHeaderStart16bit(StreamObjectTypeHeaderStart type, int length) { if (this.Length > 127) { throw new ArgumentOutOfRangeException("Length", "16-bit Stream Object Header Start, Length (7-bits): A 7-bit unsigned integer that specifies the length in bytes for additional data (if any). If the length is more than 127 bytes, a 32-bit stream object header start MUST be used."); } this.HeaderType = 0x0; this.Type = type; this.Compound = StreamObject.CompoundTypes.Contains(this.Type) ? 1 : 0; this.Length = length; } /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart16bit class with specified type. /// </summary> /// <param name="type">Specify the type of the StreamObjectHeaderStart16bit.</param> public StreamObjectHeaderStart16bit(StreamObjectTypeHeaderStart type) : this(type, 0) { } /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart16bit class, this is the default constructor. /// </summary> public StreamObjectHeaderStart16bit() { } /// <summary> /// This method is used to convert the element of StreamObjectHeaderStart16bit basic object into a byte List. /// </summary> /// <returns>Return the byte list which store the byte information of StreamObjectHeaderStart16bit.</returns> public override List<byte> SerializeToByteList() { BitWriter bitField = new BitWriter(2); bitField.AppendInit32(this.HeaderType, 2); bitField.AppendInit32(this.Compound, 1); bitField.AppendUInit32(Convert.ToUInt32(this.Type), 6); bitField.AppendInit32(this.Length, 7); return new List<byte>(bitField.Bytes); } /// <summary> /// This method is used to get the Uint16 value of the 16bit stream object header. /// </summary> /// <returns>Return the ushort value.</returns> public ushort ToUint16() { List<byte> bytes = this.SerializeToByteList(); return LittleEndianBitConverter.ToUInt16(bytes.ToArray(), 0); } /// <summary> /// This method is used to deserialize the StreamObjectHeaderStart16bit basic object from the specified byte array and start index. /// </summary> /// <param name="byteArray">Specify the byte array.</param> /// <param name="startIndex">Specify the start index from the byte array.</param> /// <returns>Return the length in byte of the StreamObjectHeaderStart16bit basic object.</returns> protected override int DoDeserializeFromByteArray(byte[] byteArray, int startIndex) { using (BitReader bitReader = new BitReader(byteArray, startIndex)) { this.HeaderType = bitReader.ReadInt32(2); if (this.HeaderType != StreamObjectHeaderStart.StreamObjectHeaderStart16bit) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderStart16bit header type value, expect value {0}, but actual value is {1}", StreamObjectHeaderStart16bit, this.HeaderType)); } this.Compound = bitReader.ReadInt32(1); int typeValue = bitReader.ReadInt32(6); if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderStart), typeValue)) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderStart16bit type value, the value {0} is not defined", typeValue)); } this.Type = (StreamObjectTypeHeaderStart)typeValue; if (StreamObject.CompoundTypes.Contains(this.Type) && this.Compound != 1) { throw new InvalidOperationException(string.Format("Failed to parse the StreamObjectHeaderStart16bit header. If the type value is {0} then the compound value should 1, but actual value is 0", typeValue)); } this.Length = bitReader.ReadInt32(7); if (this.Length > 127) { throw new InvalidOperationException("16-bit Stream Object Header Start, Length (7-bits): A 7-bit unsigned integer that specifies the length in bytes for additional data (if any). If the length is more than 127 bytes, a 32-bit stream object header start MUST be used."); } return 2; } } } /// <summary> /// An 32-bit header for a compound object would indicate the start of a stream object /// </summary> public class StreamObjectHeaderStart32bit : StreamObjectHeaderStart { /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart32bit class with specified type and length. /// </summary> /// <param name="type">Specify the type of the StreamObjectHeaderStart32bit.</param> /// <param name="length">Specify the length of the StreamObjectHeaderStart32bit.</param> public StreamObjectHeaderStart32bit(StreamObjectTypeHeaderStart type, int length) { this.HeaderType = StreamObjectHeaderStart.StreamObjectHeaderStart32bit; this.Type = type; this.Compound = StreamObject.CompoundTypes.Contains(this.Type) ? 1 : 0; if (length >= 32767) { this.Length = 32767; this.LargeLength = new Compact64bitInt((ulong)length); } else { this.Length = length; this.LargeLength = null; } } /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart32bit class, this is the default constructor. /// </summary> public StreamObjectHeaderStart32bit() { } /// <summary> /// Initializes a new instance of the StreamObjectHeaderStart32bit class with specified type. /// </summary> /// <param name="streamObjectTypeHeaderStart">Specify the type of the StreamObjectHeaderStart32bit.</param> public StreamObjectHeaderStart32bit(StreamObjectTypeHeaderStart streamObjectTypeHeaderStart) { this.Type = streamObjectTypeHeaderStart; } /// <summary> /// Gets or sets an optional compact uint64 that specifies the length in bytes for additional data (if any). /// This field MUST be specified if the Length field contains 32767 and MUST NOT be specified if the Length field /// contains any other value than 32767. /// </summary> public Compact64bitInt LargeLength { get; set; } /// <summary> /// This method is used to convert the element of StreamObjectHeaderStart32bit basic object into a byte List. /// </summary> /// <returns>Return the byte list which store the byte information of StreamObjectHeaderStart32bit.</returns> public override List<byte> SerializeToByteList() { BitWriter bitFieldWriter = new BitWriter(4); bitFieldWriter.AppendInit32(this.HeaderType, 2); bitFieldWriter.AppendInit32(this.Compound, 1); bitFieldWriter.AppendUInit32(Convert.ToUInt32(this.Type), 14); bitFieldWriter.AppendInit32(this.Length, 15); List<byte> listByte = new List<byte>(bitFieldWriter.Bytes); if (this.LargeLength != null) { listByte.AddRange(this.LargeLength.SerializeToByteList().ToArray()); } return listByte; } /// <summary> /// This method is used to deserialize the StreamObjectHeaderStart32bit basic object from the specified byte array and start index. /// </summary> /// <param name="byteArray">Specify the byte array.</param> /// <param name="startIndex">Specify the start index from the byte array.</param> /// <returns>Return the length in byte of the StreamObjectHeaderStart32bit basic object.</returns> protected override int DoDeserializeFromByteArray(byte[] byteArray, int startIndex) { using (BitReader bitReader = new BitReader(byteArray, startIndex)) { this.HeaderType = bitReader.ReadInt32(2); if (this.HeaderType != StreamObjectHeaderStart.StreamObjectHeaderStart32bit) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderStart32bit header type value, expect value {0}, but actual value is {1}", StreamObjectHeaderStart.StreamObjectHeaderStart32bit, this.HeaderType)); } this.Compound = bitReader.ReadInt32(1); int typeValue = bitReader.ReadInt32(14); if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderStart), typeValue)) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderStart32bit type value, the value {0} is not defined", typeValue)); } this.Type = (StreamObjectTypeHeaderStart)typeValue; if (StreamObject.CompoundTypes.Contains(this.Type) && this.Compound != 1) { throw new InvalidOperationException(string.Format("Failed to parse the StreamObjectHeaderStart32bit header. If the type value is {0} then the compound value should 1, but actual value is 0", typeValue)); } this.Length = bitReader.ReadInt32(15); int index = startIndex; index += 4; if (this.Length == 32767) { this.LargeLength = BasicObject.Parse<Compact64bitInt>(byteArray, ref index); } return index - startIndex; } } } /// <summary> /// This class specifies the base class for 8-bit or 16-bit stream object header end. /// </summary> public abstract class StreamObjectHeaderEnd : BasicObject { /// <summary> /// Gets or sets the type of the stream object. /// value 1 for 8-bit stream object header start, /// value 3 for 16-bit stream object header start. /// </summary> public StreamObjectTypeHeaderEnd Type { get; set; } } /// <summary> /// An 8-bit header for a compound object would indicate the end of a stream object /// </summary> public class StreamObjectHeaderEnd8bit : StreamObjectHeaderEnd { /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd8bit class with the specified type value. /// </summary> /// <param name="type">Specify the integer value of the type.</param> public StreamObjectHeaderEnd8bit(int type) { if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderEnd), type)) { throw new InvalidOperationException(string.Format("The type value {0} is not defined for the stream object end 8 bit header", type)); } this.Type = (StreamObjectTypeHeaderEnd)type; } /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd8bit class, this is the default constructor. /// </summary> public StreamObjectHeaderEnd8bit() { } /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd8bit class with the specified type value. /// </summary> /// <param name="type">Specify the value of the type.</param> public StreamObjectHeaderEnd8bit(StreamObjectTypeHeaderEnd type) : this((int)type) { } /// <summary> /// This method is used to convert the element of StreamObjectHeaderEnd8bit basic object into a byte List. /// </summary> /// <returns>Return the byte list which store the byte information of StreamObjectHeaderEnd8bit.</returns> public override List<byte> SerializeToByteList() { BitWriter bitFieldWriter = new BitWriter(1); bitFieldWriter.AppendInit32(0x1, 2); bitFieldWriter.AppendUInit32(Convert.ToUInt32(this.Type), 6); return new List<byte>(bitFieldWriter.Bytes); } /// <summary> /// This method is used to get the byte value of the 8bit stream object header End. /// </summary> /// <returns>Return StreamObjectHeaderEnd8bit value represented by byte.</returns> public byte ToByte() { List<byte> bytes = this.SerializeToByteList(); if (bytes.Count != 1) { throw new InvalidOperationException("The unexpected StreamObjectHeaderEnd8bit length"); } return bytes[0]; } /// <summary> /// This method is used to deserialize the StreamObjectHeaderEnd8bit basic object from the specified byte array and start index. /// </summary> /// <param name="byteArray">Specify the byte array.</param> /// <param name="startIndex">Specify the start index from the byte array.</param> /// <returns>Return the length in byte of the StreamObjectHeaderEnd8bit basic object.</returns> protected override int DoDeserializeFromByteArray(byte[] byteArray, int startIndex) { using (BitReader reader = new BitReader(byteArray, startIndex)) { int headerType = reader.ReadInt32(2); if (headerType != 0x1) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderEnd8bit header type value, expect value {0}, but actual value is {1}", 0x1, headerType)); } uint typeValue = reader.ReadUInt32(6); if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderEnd), (int)typeValue)) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderEnd8bit type value, the value {0} is not defined", typeValue)); } this.Type = (StreamObjectTypeHeaderEnd)typeValue; return 1; } } } /// <summary> /// An 16-bit header for a compound object would indicate the end of a stream object /// </summary> public class StreamObjectHeaderEnd16bit : StreamObjectHeaderEnd { /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd16bit class with the specified type value. /// </summary> /// <param name="type">Specify the integer value of the type.</param> public StreamObjectHeaderEnd16bit(int type) { if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderEnd), type)) { throw new InvalidOperationException(string.Format("The type value {0} is not defined for the stream object end 16-bit header", type)); } this.Type = (StreamObjectTypeHeaderEnd)type; } /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd16bit class with the specified type value. /// </summary> /// <param name="headerType">Specify the value of the type.</param> public StreamObjectHeaderEnd16bit(StreamObjectTypeHeaderEnd headerType) : this((int)headerType) { } /// <summary> /// Initializes a new instance of the StreamObjectHeaderEnd16bit class, this is the default constructor. /// </summary> public StreamObjectHeaderEnd16bit() { } /// <summary> /// This method is used to convert the element of StreamObjectHeaderEnd16bit basic object into a byte List. /// </summary> /// <returns>Return the byte list which store the byte information of StreamObjectHeaderEnd16bit.</returns> public override List<byte> SerializeToByteList() { BitWriter bitFieldWriter = new BitWriter(2); bitFieldWriter.AppendInit32(0x3, 2); bitFieldWriter.AppendUInit32(Convert.ToUInt32(this.Type), 14); return new List<byte>(bitFieldWriter.Bytes); } /// <summary> /// This method is used to get the byte value of the 16-bit stream object header End. /// </summary> /// <returns>Return StreamObjectHeaderEnd8bit value represented by unsigned short integer.</returns> public ushort ToUint16() { List<byte> bytes = this.SerializeToByteList(); return LittleEndianBitConverter.ToUInt16(bytes.ToArray(), 0); } /// <summary> /// This method is used to deserialize the StreamObjectHeaderEnd16bit basic object from the specified byte array and start index. /// </summary> /// <param name="byteArray">Specify the byte array.</param> /// <param name="startIndex">Specify the start index from the byte array.</param> /// <returns>Return the length in byte of the StreamObjectHeaderEnd16bit basic object.</returns> protected override int DoDeserializeFromByteArray(byte[] byteArray, int startIndex) { using (BitReader reader = new BitReader(byteArray, startIndex)) { int headerType = reader.ReadInt32(2); if (headerType != 0x3) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderEnd16bit header type value, expect value {0}, but actual value is {1}", 0x3, headerType)); } uint typeValue = reader.ReadUInt32(14); if (!Enum.IsDefined(typeof(StreamObjectTypeHeaderEnd), (int)typeValue)) { throw new InvalidOperationException(string.Format("Failed to get the StreamObjectHeaderEnd16bit type value, the value {0} is not defined", typeValue)); } this.Type = (StreamObjectTypeHeaderEnd)typeValue; return 2; } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.Linq; using QuantConnect.Util; namespace QuantConnect.Data.Market { /// <summary> /// Represents an entire chain of option contracts for a single underying security. /// This type is <see cref="IEnumerable{OptionContract}"/> /// </summary> public class OptionChain : BaseData, IEnumerable<OptionContract> { private readonly Dictionary<Type, Dictionary<Symbol, List<BaseData>>> _auxiliaryData = new Dictionary<Type, Dictionary<Symbol, List<BaseData>>>(); /// <summary> /// Gets the most recent trade information for the underlying. This may /// be a <see cref="Tick"/> or a <see cref="TradeBar"/> /// </summary> public BaseData Underlying { get; internal set; } /// <summary> /// Gets all ticks for every option contract in this chain, keyed by option symbol /// </summary> public Ticks Ticks { get; private set; } /// <summary> /// Gets all trade bars for every option contract in this chain, keyed by option symbol /// </summary> public TradeBars TradeBars { get; private set; } /// <summary> /// Gets all quote bars for every option contract in this chain, keyed by option symbol /// </summary> public QuoteBars QuoteBars { get; private set; } /// <summary> /// Gets all contracts in the chain, keyed by option symbol /// </summary> public OptionContracts Contracts { get; private set; } /// <summary> /// Gets the set of symbols that passed the <see cref="Option.ContractFilter"/> /// </summary> public HashSet<Symbol> FilteredContracts { get; private set; } /// <summary> /// Initializes a new default instance of the <see cref="OptionChain"/> class /// </summary> private OptionChain() { DataType = MarketDataType.OptionChain; } /// <summary> /// Initializes a new instance of the <see cref="OptionChain"/> class /// </summary> /// <param name="canonicalOptionSymbol">The symbol for this chain.</param> /// <param name="time">The time of this chain</param> public OptionChain(Symbol canonicalOptionSymbol, DateTime time) { Time = time; Symbol = canonicalOptionSymbol; DataType = MarketDataType.OptionChain; Ticks = new Ticks(time); TradeBars = new TradeBars(time); QuoteBars = new QuoteBars(time); Contracts = new OptionContracts(time); FilteredContracts = new HashSet<Symbol>(); } /// <summary> /// Initializes a new instance of the <see cref="OptionChain"/> class /// </summary> /// <param name="canonicalOptionSymbol">The symbol for this chain.</param> /// <param name="time">The time of this chain</param> /// <param name="underlying">The most recent underlying trade data</param> /// <param name="trades">All trade data for the entire option chain</param> /// <param name="quotes">All quote data for the entire option chain</param> /// <param name="contracts">All contrains for this option chain</param> public OptionChain(Symbol canonicalOptionSymbol, DateTime time, BaseData underlying, IEnumerable<BaseData> trades, IEnumerable<BaseData> quotes, IEnumerable<OptionContract> contracts, IEnumerable<Symbol> filteredContracts) { Time = time; Underlying = underlying; Symbol = canonicalOptionSymbol; DataType = MarketDataType.OptionChain; FilteredContracts = filteredContracts.ToHashSet(); Ticks = new Ticks(time); TradeBars = new TradeBars(time); QuoteBars = new QuoteBars(time); Contracts = new OptionContracts(time); foreach (var trade in trades) { var tick = trade as Tick; if (tick != null) { List<Tick> ticks; if (!Ticks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List<Tick>(); Ticks[tick.Symbol] = ticks; } ticks.Add(tick); continue; } var bar = trade as TradeBar; if (bar != null) { TradeBars[trade.Symbol] = bar; } } foreach (var quote in quotes) { var tick = quote as Tick; if (tick != null) { List<Tick> ticks; if (!Ticks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List<Tick>(); Ticks[tick.Symbol] = ticks; } ticks.Add(tick); continue; } var bar = quote as QuoteBar; if (bar != null) { QuoteBars[quote.Symbol] = bar; } } foreach (var contract in contracts) { Contracts[contract.Symbol] = contract; } } /// <summary> /// Gets the auxiliary data with the specified type and symbol /// </summary> /// <typeparam name="T">The type of auxiliary data</typeparam> /// <param name="symbol">The symbol of the auxiliary data</param> /// <returns>The last auxiliary data with the specified type and symbol</returns> public T GetAux<T>(Symbol symbol) { List<BaseData> list; Dictionary<Symbol, List<BaseData>> dictionary; if (!_auxiliaryData.TryGetValue(typeof(T), out dictionary) || !dictionary.TryGetValue(symbol, out list)) { return default(T); } return list.OfType<T>().LastOrDefault(); } /// <summary> /// Gets all auxiliary data of the specified type as a dictionary keyed by symbol /// </summary> /// <typeparam name="T">The type of auxiliary data</typeparam> /// <returns>A dictionary containing all auxiliary data of the specified type</returns> public DataDictionary<T> GetAux<T>() { Dictionary<Symbol, List<BaseData>> d; if (!_auxiliaryData.TryGetValue(typeof(T), out d)) { return new DataDictionary<T>(); } var dictionary = new DataDictionary<T>(); foreach (var kvp in d) { var item = kvp.Value.OfType<T>().LastOrDefault(); if (item != null) { dictionary.Add(kvp.Key, item); } } return dictionary; } /// <summary> /// Gets all auxiliary data of the specified type as a dictionary keyed by symbol /// </summary> /// <typeparam name="T">The type of auxiliary data</typeparam> /// <returns>A dictionary containing all auxiliary data of the specified type</returns> public Dictionary<Symbol, List<BaseData>> GetAuxList<T>() { Dictionary<Symbol, List<BaseData>> dictionary; if (!_auxiliaryData.TryGetValue(typeof(T), out dictionary)) { return new Dictionary<Symbol, List<BaseData>>(); } return dictionary; } /// <summary> /// Gets a list of auxiliary data with the specified type and symbol /// </summary> /// <typeparam name="T">The type of auxiliary data</typeparam> /// <param name="symbol">The symbol of the auxiliary data</param> /// <returns>The list of auxiliary data with the specified type and symbol</returns> public List<T> GetAuxList<T>(Symbol symbol) { List<BaseData> list; Dictionary<Symbol, List<BaseData>> dictionary; if (!_auxiliaryData.TryGetValue(typeof(T), out dictionary) || !dictionary.TryGetValue(symbol, out list)) { return new List<T>(); } return list.OfType<T>().ToList(); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// An enumerator that can be used to iterate through the collection. /// </returns> public IEnumerator<OptionContract> GetEnumerator() { return Contracts.Values.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } /// <summary> /// Return a new instance clone of this object, used in fill forward /// </summary> /// <returns>A clone of the current object</returns> public override BaseData Clone() { return new OptionChain { Underlying = Underlying, Ticks = Ticks, Contracts = Contracts, QuoteBars = QuoteBars, TradeBars = TradeBars, FilteredContracts = FilteredContracts, Symbol = Symbol, Time = Time, DataType = DataType, Value = Value }; } /// <summary> /// Adds the specified auxiliary data to this option chain /// </summary> /// <param name="baseData">The auxiliary data to be added</param> internal void AddAuxData(BaseData baseData) { var type = baseData.GetType(); Dictionary<Symbol, List<BaseData>> dictionary; if (!_auxiliaryData.TryGetValue(type, out dictionary)) { dictionary = new Dictionary<Symbol, List<BaseData>>(); _auxiliaryData[type] = dictionary; } List<BaseData> list; if (!dictionary.TryGetValue(baseData.Symbol, out list)) { list = new List<BaseData>(); dictionary[baseData.Symbol] = list; } list.Add(baseData); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. // // Idea got from and adapted to work in avalonia // http://silverlight.codeplex.com/SourceControl/changeset/view/74775#Release/Silverlight4/Source/Controls.Layout.Toolkit/LayoutTransformer/LayoutTransformer.cs // using System; using System.Diagnostics.CodeAnalysis; using System.Reactive.Linq; using Avalonia.Media; namespace Avalonia.Controls { /// <summary> /// Control that implements support for transformations as if applied by LayoutTransform. /// </summary> public class LayoutTransformControl : Decorator { public static readonly AvaloniaProperty<Transform> LayoutTransformProperty = AvaloniaProperty.Register<LayoutTransformControl, Transform>(nameof(LayoutTransform)); static LayoutTransformControl() { ClipToBoundsProperty.OverrideDefaultValue<LayoutTransformControl>(true); LayoutTransformProperty.Changed .AddClassHandler<LayoutTransformControl>(x => x.OnLayoutTransformChanged); ChildProperty.Changed .AddClassHandler<LayoutTransformControl>(x => x.OnChildChanged); } /// <summary> /// Gets or sets a graphics transformation that should apply to this element when layout is performed. /// </summary> public Transform LayoutTransform { get { return GetValue(LayoutTransformProperty); } set { SetValue(LayoutTransformProperty, value); } } public IControl TransformRoot => Child; /// <summary> /// Provides the behavior for the "Arrange" pass of layout. /// </summary> /// <param name="finalSize">The final area within the parent that this element should use to arrange itself and its children.</param> /// <returns>The actual size used.</returns> protected override Size ArrangeOverride(Size finalSize) { if (TransformRoot == null || LayoutTransform == null) { return base.ArrangeOverride(finalSize); } // Determine the largest available size after the transformation Size finalSizeTransformed = ComputeLargestTransformedSize(finalSize); if (IsSizeSmaller(finalSizeTransformed, TransformRoot.DesiredSize)) { // Some elements do not like being given less space than they asked for (ex: TextBlock) // Bump the working size up to do the right thing by them finalSizeTransformed = TransformRoot.DesiredSize; } // Transform the working size to find its width/height Rect transformedRect = new Rect(0, 0, finalSizeTransformed.Width, finalSizeTransformed.Height).TransformToAABB(_transformation); // Create the Arrange rect to center the transformed content Rect finalRect = new Rect( -transformedRect.X + ((finalSize.Width - transformedRect.Width) / 2), -transformedRect.Y + ((finalSize.Height - transformedRect.Height) / 2), finalSizeTransformed.Width, finalSizeTransformed.Height); // Perform an Arrange on TransformRoot (containing Child) Size arrangedsize; TransformRoot.Arrange(finalRect); arrangedsize = TransformRoot.Bounds.Size; // This is the first opportunity under Silverlight to find out the Child's true DesiredSize if (IsSizeSmaller(finalSizeTransformed, arrangedsize) && (Size.Empty == _childActualSize)) { //// Unfortunately, all the work so far is invalid because the wrong DesiredSize was used //// Make a note of the actual DesiredSize //_childActualSize = arrangedsize; //// Force a new measure/arrange pass //InvalidateMeasure(); } else { // Clear the "need to measure/arrange again" flag _childActualSize = Size.Empty; } // Return result to perform the transformation return finalSize; } /// <summary> /// Provides the behavior for the "Measure" pass of layout. /// </summary> /// <param name="availableSize">The available size that this element can give to child elements.</param> /// <returns>The size that this element determines it needs during layout, based on its calculations of child element sizes.</returns> protected override Size MeasureOverride(Size availableSize) { if (TransformRoot == null || LayoutTransform == null) { return base.MeasureOverride(availableSize); } Size measureSize; if (_childActualSize == Size.Empty) { // Determine the largest size after the transformation measureSize = ComputeLargestTransformedSize(availableSize); } else { // Previous measure/arrange pass determined that Child.DesiredSize was larger than believed measureSize = _childActualSize; } // Perform a measure on the TransformRoot (containing Child) TransformRoot.Measure(measureSize); var desiredSize = TransformRoot.DesiredSize; // Transform DesiredSize to find its width/height Rect transformedDesiredRect = new Rect(0, 0, desiredSize.Width, desiredSize.Height).TransformToAABB(_transformation); Size transformedDesiredSize = new Size(transformedDesiredRect.Width, transformedDesiredRect.Height); // Return result to allocate enough space for the transformation return transformedDesiredSize; } private void OnChildChanged(AvaloniaPropertyChangedEventArgs e) { if (null != TransformRoot) { TransformRoot.RenderTransform = _matrixTransform; TransformRoot.RenderTransformOrigin = new RelativePoint(0, 0, RelativeUnit.Absolute); } ApplyLayoutTransform(); } /// <summary> /// Acceptable difference between two doubles. /// </summary> private const double AcceptableDelta = 0.0001; /// <summary> /// Number of decimals to round the Matrix to. /// </summary> private const int DecimalsAfterRound = 4; /// <summary> /// Actual DesiredSize of Child element (the value it returned from its MeasureOverride method). /// </summary> private Size _childActualSize = Size.Empty; /// <summary> /// RenderTransform/MatrixTransform applied to TransformRoot. /// </summary> private MatrixTransform _matrixTransform = new MatrixTransform(); /// <summary> /// Transformation matrix corresponding to _matrixTransform. /// </summary> private Matrix _transformation; private IDisposable _transformChangedEvent = null; /// <summary> /// Returns true if Size a is smaller than Size b in either dimension. /// </summary> /// <param name="a">Second Size.</param> /// <param name="b">First Size.</param> /// <returns>True if Size a is smaller than Size b in either dimension.</returns> private static bool IsSizeSmaller(Size a, Size b) { return (a.Width + AcceptableDelta < b.Width) || (a.Height + AcceptableDelta < b.Height); } /// <summary> /// Rounds the non-offset elements of a Matrix to avoid issues due to floating point imprecision. /// </summary> /// <param name="matrix">Matrix to round.</param> /// <param name="decimals">Number of decimal places to round to.</param> /// <returns>Rounded Matrix.</returns> private static Matrix RoundMatrix(Matrix matrix, int decimals) { return new Matrix( Math.Round(matrix.M11, decimals), Math.Round(matrix.M12, decimals), Math.Round(matrix.M21, decimals), Math.Round(matrix.M22, decimals), matrix.M31, matrix.M32); } /// <summary> /// Applies the layout transform on the LayoutTransformerControl content. /// </summary> /// <remarks> /// Only used in advanced scenarios (like animating the LayoutTransform). /// Should be used to notify the LayoutTransformer control that some aspect /// of its Transform property has changed. /// </remarks> private void ApplyLayoutTransform() { if (LayoutTransform == null) return; // Get the transform matrix and apply it _transformation = RoundMatrix(LayoutTransform.Value, DecimalsAfterRound); if (null != _matrixTransform) { _matrixTransform.Matrix = _transformation; } // New transform means re-layout is necessary InvalidateMeasure(); } /// <summary> /// Compute the largest usable size (greatest area) after applying the transformation to the specified bounds. /// </summary> /// <param name="arrangeBounds">Arrange bounds.</param> /// <returns>Largest Size possible.</returns> [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "Closely corresponds to WPF's FrameworkElement.FindMaximalAreaLocalSpaceRect.")] private Size ComputeLargestTransformedSize(Size arrangeBounds) { // Computed largest transformed size Size computedSize = Size.Empty; // Detect infinite bounds and constrain the scenario bool infiniteWidth = double.IsInfinity(arrangeBounds.Width); if (infiniteWidth) { // arrangeBounds.Width = arrangeBounds.Height; arrangeBounds = arrangeBounds.WithWidth(arrangeBounds.Height); } bool infiniteHeight = double.IsInfinity(arrangeBounds.Height); if (infiniteHeight) { //arrangeBounds.Height = arrangeBounds.Width; arrangeBounds = arrangeBounds.WithHeight(arrangeBounds.Width); } // Capture the matrix parameters double a = _transformation.M11; double b = _transformation.M12; double c = _transformation.M21; double d = _transformation.M22; // Compute maximum possible transformed width/height based on starting width/height // These constraints define two lines in the positive x/y quadrant double maxWidthFromWidth = Math.Abs(arrangeBounds.Width / a); double maxHeightFromWidth = Math.Abs(arrangeBounds.Width / c); double maxWidthFromHeight = Math.Abs(arrangeBounds.Height / b); double maxHeightFromHeight = Math.Abs(arrangeBounds.Height / d); // The transformed width/height that maximize the area under each segment is its midpoint // At most one of the two midpoints will satisfy both constraints double idealWidthFromWidth = maxWidthFromWidth / 2; double idealHeightFromWidth = maxHeightFromWidth / 2; double idealWidthFromHeight = maxWidthFromHeight / 2; double idealHeightFromHeight = maxHeightFromHeight / 2; // Compute slope of both constraint lines double slopeFromWidth = -(maxHeightFromWidth / maxWidthFromWidth); double slopeFromHeight = -(maxHeightFromHeight / maxWidthFromHeight); if ((0 == arrangeBounds.Width) || (0 == arrangeBounds.Height)) { // Check for empty bounds computedSize = new Size(arrangeBounds.Width, arrangeBounds.Height); } else if (infiniteWidth && infiniteHeight) { // Check for completely unbound scenario computedSize = new Size(double.PositiveInfinity, double.PositiveInfinity); } else if (!_transformation.HasInverse) { // Check for singular matrix computedSize = new Size(0, 0); } else if ((0 == b) || (0 == c)) { // Check for 0/180 degree special cases double maxHeight = (infiniteHeight ? double.PositiveInfinity : maxHeightFromHeight); double maxWidth = (infiniteWidth ? double.PositiveInfinity : maxWidthFromWidth); if ((0 == b) && (0 == c)) { // No constraints computedSize = new Size(maxWidth, maxHeight); } else if (0 == b) { // Constrained by width double computedHeight = Math.Min(idealHeightFromWidth, maxHeight); computedSize = new Size( maxWidth - Math.Abs((c * computedHeight) / a), computedHeight); } else if (0 == c) { // Constrained by height double computedWidth = Math.Min(idealWidthFromHeight, maxWidth); computedSize = new Size( computedWidth, maxHeight - Math.Abs((b * computedWidth) / d)); } } else if ((0 == a) || (0 == d)) { // Check for 90/270 degree special cases double maxWidth = (infiniteHeight ? double.PositiveInfinity : maxWidthFromHeight); double maxHeight = (infiniteWidth ? double.PositiveInfinity : maxHeightFromWidth); if ((0 == a) && (0 == d)) { // No constraints computedSize = new Size(maxWidth, maxHeight); } else if (0 == a) { // Constrained by width double computedHeight = Math.Min(idealHeightFromHeight, maxHeight); computedSize = new Size( maxWidth - Math.Abs((d * computedHeight) / b), computedHeight); } else if (0 == d) { // Constrained by height double computedWidth = Math.Min(idealWidthFromWidth, maxWidth); computedSize = new Size( computedWidth, maxHeight - Math.Abs((a * computedWidth) / c)); } } else if (idealHeightFromWidth <= ((slopeFromHeight * idealWidthFromWidth) + maxHeightFromHeight)) { // Check the width midpoint for viability (by being below the height constraint line) computedSize = new Size(idealWidthFromWidth, idealHeightFromWidth); } else if (idealHeightFromHeight <= ((slopeFromWidth * idealWidthFromHeight) + maxHeightFromWidth)) { // Check the height midpoint for viability (by being below the width constraint line) computedSize = new Size(idealWidthFromHeight, idealHeightFromHeight); } else { // Neither midpoint is viable; use the intersection of the two constraint lines instead // Compute width by setting heights equal (m1*x+c1=m2*x+c2) double computedWidth = (maxHeightFromHeight - maxHeightFromWidth) / (slopeFromWidth - slopeFromHeight); // Compute height from width constraint line (y=m*x+c; using height would give same result) computedSize = new Size( computedWidth, (slopeFromWidth * computedWidth) + maxHeightFromWidth); } // Return result return computedSize; } private void OnLayoutTransformChanged(AvaloniaPropertyChangedEventArgs e) { var newTransform = e.NewValue as Transform; _transformChangedEvent?.Dispose(); _transformChangedEvent = null; if (newTransform != null) { _transformChangedEvent = Observable.FromEventPattern<EventHandler, EventArgs>( v => newTransform.Changed += v, v => newTransform.Changed -= v) .Subscribe(onNext: v => ApplyLayoutTransform()); } ApplyLayoutTransform(); } } }
/* ==================================================================== Licensed To the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file To You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed To in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.SS.Formula { using System; using NPOI.SS.Formula.PTG; using NPOI.SS.Formula.Function; /** * Represents a syntactic element from a formula by encapsulating the corresponding <c>Ptg</c> * Token. Each <c>ParseNode</c> may have child <c>ParseNode</c>s in the case when the wrapped * <c>Ptg</c> is non-atomic. * * @author Josh Micich */ class ParseNode { public static ParseNode[] EMPTY_ARRAY = { }; private Ptg _token; private ParseNode[] _children; private bool _isIf; private int _tokenCount; public ParseNode(Ptg token, ParseNode[] children) { _token = token; _children = children; _isIf = IsIf(token); int tokenCount = 1; for (int i = 0; i < children.Length; i++) { tokenCount += children[i].TokenCount; } if (_isIf) { // there will be 2 or 3 extra tAttr Tokens according To whether the false param is present tokenCount += children.Length; } _tokenCount = tokenCount; } public ParseNode(Ptg token) : this(token, EMPTY_ARRAY) { } public ParseNode(Ptg token, ParseNode child0) : this(token, new ParseNode[] { child0, }) { } public ParseNode(Ptg token, ParseNode child0, ParseNode child1) : this(token, new ParseNode[] { child0, child1, }) { } private int TokenCount { get { return _tokenCount; } } public int EncodedSize { get { int result = _token is ArrayPtg ? ArrayPtg.PLAIN_TOKEN_SIZE : _token.Size; for (int i = 0; i < _children.Length; i++) { result += _children[i].EncodedSize; } return result; } } /** * Collects the array of <c>Ptg</c> Tokens for the specified tree. */ public static Ptg[] ToTokenArray(ParseNode rootNode) { TokenCollector temp = new TokenCollector(rootNode.TokenCount); rootNode.CollectPtgs(temp); return temp.GetResult(); } private void CollectPtgs(TokenCollector temp) { if (IsIf(_token)) { CollectIfPtgs(temp); return; } bool isPreFixOperator = _token is MemFuncPtg || _token is MemAreaPtg; if (isPreFixOperator) { temp.Add(_token); } for (int i = 0; i < GetChildren().Length; i++) { GetChildren()[i].CollectPtgs(temp); } if(!isPreFixOperator) { temp.Add(_token); } } /** * The IF() function Gets marked up with two or three tAttr Tokens. * Similar logic will be required for CHOOSE() when it is supported * * See excelfileformat.pdf sec 3.10.5 "tAttr (19H) */ private void CollectIfPtgs(TokenCollector temp) { // condition goes first GetChildren()[0].CollectPtgs(temp); // placeholder for tAttrIf int ifAttrIndex = temp.CreatePlaceholder(); // true parameter GetChildren()[1].CollectPtgs(temp); // placeholder for first skip attr int skipAfterTrueParamIndex = temp.CreatePlaceholder(); int trueParamSize = temp.sumTokenSizes(ifAttrIndex + 1, skipAfterTrueParamIndex); AttrPtg attrIf = AttrPtg.CreateIf(trueParamSize + 4);// distance to start of false parameter/tFuncVar. +4 for tAttrSkip after true if (GetChildren().Length > 2) { // false param present // false parameter GetChildren()[2].CollectPtgs(temp); int skipAfterFalseParamIndex = temp.CreatePlaceholder(); int falseParamSize = temp.sumTokenSizes(skipAfterTrueParamIndex + 1, skipAfterFalseParamIndex); AttrPtg attrSkipAfterTrue = AttrPtg.CreateSkip(falseParamSize + 4 + 4 - 1); // 1 less than distance to end of if FuncVar(size=4). +4 for attr skip before AttrPtg attrSkipAfterFalse = AttrPtg.CreateSkip(4 - 1); // 1 less than distance to end of if FuncVar(size=4). temp.SetPlaceholder(ifAttrIndex, attrIf); temp.SetPlaceholder(skipAfterTrueParamIndex, attrSkipAfterTrue); temp.SetPlaceholder(skipAfterFalseParamIndex, attrSkipAfterFalse); } else { // false parameter not present AttrPtg attrSkipAfterTrue = AttrPtg.CreateSkip(4 - 1); // 1 less than distance to end of if FuncVar(size=4). temp.SetPlaceholder(ifAttrIndex, attrIf); temp.SetPlaceholder(skipAfterTrueParamIndex, attrSkipAfterTrue); } temp.Add(GetToken()); } private static bool IsIf(Ptg token) { if (token is FuncVarPtg) { FuncVarPtg func = (FuncVarPtg)token; if (FunctionMetadataRegistry.FUNCTION_NAME_IF.Equals(func.Name)) { return true; } } return false; } public Ptg GetToken() { return _token; } public ParseNode[] GetChildren() { return _children; } private class TokenCollector { private Ptg[] _ptgs; private int _offset; public TokenCollector(int tokenCount) { _ptgs = new Ptg[tokenCount]; _offset = 0; } public int sumTokenSizes(int fromIx, int ToIx) { int result = 0; for (int i = fromIx; i < ToIx; i++) { result += _ptgs[i].Size; } return result; } public int CreatePlaceholder() { return _offset++; } public void Add(Ptg token) { if (token == null) { throw new ArgumentException("token must not be null"); } _ptgs[_offset] = token; _offset++; } public void SetPlaceholder(int index, Ptg token) { if (_ptgs[index] != null) { throw new InvalidOperationException("Invalid placeholder index (" + index + ")"); } _ptgs[index] = token; } public Ptg[] GetResult() { return _ptgs; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Sockets; using System.Reflection; using System.Text; using System.Threading; using log4net; using OpenMetaverse; using OpenMetaverse.Packets; using OpenSim.Framework; using OpenSim.Framework.Client; using OpenSim.Region.Framework.Scenes; namespace OpenSim.Region.OptionalModules.Agent.InternetRelayClientView.Server { public delegate void OnIRCClientReadyDelegate(IRCClientView cv); public class IRCClientView : IClientAPI, IClientCore, IClientIPEndpoint { public event OnIRCClientReadyDelegate OnIRCReady; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private readonly TcpClient m_client; private readonly Scene m_scene; private UUID m_agentID = UUID.Random(); private string m_username; private string m_nick; private bool m_hasNick = false; private bool m_hasUser = false; private bool m_connected = true; public IRCClientView(TcpClient client, Scene scene) { m_client = client; m_scene = scene; Watchdog.StartThread(InternalLoop, "IRCClientView", ThreadPriority.Normal, false); } private void SendServerCommand(string command) { SendCommand(":opensimircd " + command); } private void SendCommand(string command) { m_log.Info("[IRCd] Sending >>> " + command); byte[] buf = Util.UTF8.GetBytes(command + "\r\n"); m_client.GetStream().BeginWrite(buf, 0, buf.Length, SendComplete, null); } private void SendComplete(IAsyncResult result) { m_log.Info("[IRCd] Send Complete."); } private string IrcRegionName { // I know &Channel is more technically correct, but people are used to seeing #Channel // Dont shoot me! get { return "#" + m_scene.RegionInfo.RegionName.Replace(" ", "-"); } } private void InternalLoop() { try { string strbuf = String.Empty; while (m_connected && m_client.Connected) { byte[] buf = new byte[8]; // RFC1459 defines max message size as 512. int count = m_client.GetStream().Read(buf, 0, buf.Length); string line = Util.UTF8.GetString(buf, 0, count); strbuf += line; string message = ExtractMessage(strbuf); if (message != null) { // Remove from buffer strbuf = strbuf.Remove(0, message.Length); m_log.Info("[IRCd] Recieving <<< " + message); message = message.Trim(); // Extract command sequence string command = ExtractCommand(message); ProcessInMessage(message, command); } else { //m_log.Info("[IRCd] Recieved data, but not enough to make a message. BufLen is " + strbuf.Length + // "[" + strbuf + "]"); if (strbuf.Length == 0) { m_connected = false; m_log.Info("[IRCd] Buffer zero, closing..."); if (OnDisconnectUser != null) OnDisconnectUser(); } } Thread.Sleep(0); Watchdog.UpdateThread(); } } catch (IOException) { if (OnDisconnectUser != null) OnDisconnectUser(); m_log.Warn("[IRCd] Disconnected client."); } catch (SocketException) { if (OnDisconnectUser != null) OnDisconnectUser(); m_log.Warn("[IRCd] Disconnected client."); } Watchdog.RemoveThread(); } private void ProcessInMessage(string message, string command) { m_log.Info("[IRCd] Processing [MSG:" + message + "] [COM:" + command + "]"); if (command != null) { switch (command) { case "ADMIN": case "AWAY": case "CONNECT": case "DIE": case "ERROR": case "INFO": case "INVITE": case "ISON": case "KICK": case "KILL": case "LINKS": case "LUSERS": case "OPER": case "PART": case "REHASH": case "SERVICE": case "SERVLIST": case "SERVER": case "SQUERY": case "SQUIT": case "STATS": case "SUMMON": case "TIME": case "TRACE": case "VERSION": case "WALLOPS": case "WHOIS": case "WHOWAS": SendServerCommand("421 " + command + " :Command unimplemented"); break; // Connection Commands case "PASS": break; // Ignore for now. I want to implement authentication later however. case "JOIN": IRC_SendReplyJoin(); break; case "MODE": IRC_SendReplyModeChannel(); break; case "USER": IRC_ProcessUser(message); IRC_Ready(); break; case "USERHOST": string[] userhostArgs = ExtractParameters(message); if (userhostArgs[0] == ":" + m_nick) { SendServerCommand("302 :" + m_nick + "=+" + m_nick + "@" + ((IPEndPoint) m_client.Client.RemoteEndPoint).Address); } break; case "NICK": IRC_ProcessNick(message); IRC_Ready(); break; case "TOPIC": IRC_SendReplyTopic(); break; case "USERS": IRC_SendReplyUsers(); break; case "LIST": break; // TODO case "MOTD": IRC_SendMOTD(); break; case "NOTICE": // TODO break; case "WHO": // TODO IRC_SendNamesReply(); IRC_SendWhoReply(); break; case "PING": IRC_ProcessPing(message); break; // Special case, ignore this completely. case "PONG": break; case "QUIT": if (OnDisconnectUser != null) OnDisconnectUser(); break; case "NAMES": IRC_SendNamesReply(); break; case "PRIVMSG": IRC_ProcessPrivmsg(message); break; default: SendServerCommand("421 " + command + " :Unknown command"); break; } } } private void IRC_Ready() { if (m_hasUser && m_hasNick) { SendServerCommand("001 " + m_nick + " :Welcome to OpenSimulator IRCd"); SendServerCommand("002 " + m_nick + " :Running OpenSimVersion"); SendServerCommand("003 " + m_nick + " :This server was created over 9000 years ago"); SendServerCommand("004 " + m_nick + " :opensimirc r1 aoOirw abeiIklmnoOpqrstv"); SendServerCommand("251 " + m_nick + " :There are 0 users and 0 services on 1 servers"); SendServerCommand("252 " + m_nick + " 0 :operators online"); SendServerCommand("253 " + m_nick + " 0 :unknown connections"); SendServerCommand("254 " + m_nick + " 1 :channels formed"); SendServerCommand("255 " + m_nick + " :I have 1 users, 0 services and 1 servers"); SendCommand(":" + m_nick + " MODE " + m_nick + " :+i"); SendCommand(":" + m_nick + " JOIN :" + IrcRegionName); // Rename to 'Real Name' SendCommand(":" + m_nick + " NICK :" + m_username.Replace(" ", "")); m_nick = m_username.Replace(" ", ""); IRC_SendReplyJoin(); IRC_SendChannelPrivmsg("System", "Welcome to OpenSimulator."); IRC_SendChannelPrivmsg("System", "You are in a maze of twisty little passages, all alike."); IRC_SendChannelPrivmsg("System", "It is pitch black. You are likely to be eaten by a grue."); if (OnIRCReady != null) OnIRCReady(this); } } private void IRC_SendReplyJoin() { IRC_SendReplyTopic(); IRC_SendNamesReply(); } private void IRC_SendReplyModeChannel() { SendServerCommand("324 " + m_nick + " " + IrcRegionName + " +n"); //SendCommand(":" + IrcRegionName + " MODE +n"); } private void IRC_ProcessUser(string message) { string[] userArgs = ExtractParameters(message); // TODO: unused: string username = userArgs[0]; // TODO: unused: string hostname = userArgs[1]; // TODO: unused: string servername = userArgs[2]; string realname = userArgs[3].Replace(":", ""); m_username = realname; m_hasUser = true; } private void IRC_ProcessNick(string message) { string[] nickArgs = ExtractParameters(message); string nickname = nickArgs[0].Replace(":",""); m_nick = nickname; m_hasNick = true; } private void IRC_ProcessPing(string message) { string[] pingArgs = ExtractParameters(message); string pingHost = pingArgs[0]; SendCommand("PONG " + pingHost); } private void IRC_ProcessPrivmsg(string message) { string[] privmsgArgs = ExtractParameters(message); if (privmsgArgs[0] == IrcRegionName) { if (OnChatFromClient != null) { OSChatMessage msg = new OSChatMessage(); msg.Sender = this; msg.Channel = 0; msg.From = this.Name; msg.Message = privmsgArgs[1].Replace(":", ""); msg.Position = Vector3.Zero; msg.Scene = m_scene; msg.SenderObject = null; msg.SenderUUID = this.AgentId; msg.Type = ChatTypeEnum.Say; OnChatFromClient(this, msg); } } else { // Handle as an IM, later. } } private void IRC_SendNamesReply() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); foreach (EntityBase user in users) { SendServerCommand("353 " + m_nick + " = " + IrcRegionName + " :" + user.Name.Replace(" ", "")); } SendServerCommand("366 " + IrcRegionName + " :End of /NAMES list"); } private void IRC_SendWhoReply() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); foreach (EntityBase user in users) { /*SendServerCommand(String.Format("352 {0} {1} {2} {3} {4} {5} :0 {6}", IrcRegionName, user.Name.Replace(" ", ""), "nohost.com", "opensimircd", user.Name.Replace(" ", ""), 'H', user.Name));*/ SendServerCommand("352 " + m_nick + " " + IrcRegionName + " n=" + user.Name.Replace(" ", "") + " fakehost.com " + user.Name.Replace(" ", "") + " H " + ":0 " + user.Name); //SendServerCommand("352 " + IrcRegionName + " " + user.Name.Replace(" ", "") + " nohost.com irc.opensimulator " + user.Name.Replace(" ", "") + " H " + ":0 " + user.Name); } SendServerCommand("315 " + m_nick + " " + IrcRegionName + " :End of /WHO list"); } private void IRC_SendMOTD() { SendServerCommand("375 :- OpenSimulator Message of the day -"); SendServerCommand("372 :- Hiya!"); SendServerCommand("376 :End of /MOTD command"); } private void IRC_SendReplyTopic() { SendServerCommand("332 " + IrcRegionName + " :OpenSimulator IRC Server"); } private void IRC_SendReplyUsers() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); SendServerCommand("392 :UserID Terminal Host"); if (users.Length == 0) { SendServerCommand("395 :Nobody logged in"); return; } foreach (EntityBase user in users) { char[] nom = new char[8]; char[] term = "terminal_".ToCharArray(); char[] host = "hostname".ToCharArray(); string userName = user.Name.Replace(" ",""); for (int i = 0; i < nom.Length; i++) { if (userName.Length < i) nom[i] = userName[i]; else nom[i] = ' '; } SendServerCommand("393 :" + nom + " " + term + " " + host + ""); } SendServerCommand("394 :End of users"); } private static string ExtractMessage(string buffer) { int pos = buffer.IndexOf("\r\n"); if (pos == -1) return null; string command = buffer.Substring(0, pos + 2); return command; } private static string ExtractCommand(string msg) { string[] msgs = msg.Split(' '); if (msgs.Length < 2) { m_log.Warn("[IRCd] Dropped msg: " + msg); return null; } if (msgs[0].StartsWith(":")) return msgs[1]; return msgs[0]; } private static string[] ExtractParameters(string msg) { string[] msgs = msg.Split(' '); List<string> parms = new List<string>(msgs.Length); bool foundCommand = false; string command = ExtractCommand(msg); for (int i=0;i<msgs.Length;i++) { if (msgs[i] == command) { foundCommand = true; continue; } if (foundCommand != true) continue; if (i != 0 && msgs[i].StartsWith(":")) { List<string> tmp = new List<string>(); for (int j=i;j<msgs.Length;j++) { tmp.Add(msgs[j]); } parms.Add(string.Join(" ", tmp.ToArray())); break; } parms.Add(msgs[i]); } return parms.ToArray(); } #region Implementation of IClientAPI public Vector3 StartPos { get { return new Vector3(((int)Constants.RegionSize * 0.5f), ((int)Constants.RegionSize * 0.5f), 50); } set { } } public bool TryGet<T>(out T iface) { iface = default(T); return false; } public T Get<T>() { return default(T); } public UUID AgentId { get { return m_agentID; } } public void Disconnect(string reason) { IRC_SendChannelPrivmsg("System", "You have been eaten by a grue. (" + reason + ")"); m_connected = false; m_client.Close(); } public void Disconnect() { IRC_SendChannelPrivmsg("System", "You have been eaten by a grue."); m_connected = false; m_client.Close(); } public UUID SessionId { get { return m_agentID; } } public UUID SecureSessionId { get { return m_agentID; } } public UUID ActiveGroupId { get { return UUID.Zero; } } public string ActiveGroupName { get { return "IRCd User"; } } public ulong ActiveGroupPowers { get { return 0; } } public ulong GetGroupPowers(UUID groupID) { return 0; } public bool IsGroupMember(UUID GroupID) { return false; } public string FirstName { get { string[] names = m_username.Split(' '); return names[0]; } } public string LastName { get { string[] names = m_username.Split(' '); if (names.Length > 1) return names[1]; return names[0]; } } public IScene Scene { get { return m_scene; } } public int NextAnimationSequenceNumber { get { return 0; } } public string Name { get { return m_username; } } public bool IsActive { get { return true; } set { if (!value) Disconnect("IsActive Disconnected?"); } } public bool IsLoggingOut { get { return false; } set { } } public bool SendLogoutPacketWhenClosing { set { } } public uint CircuitCode { get { return (uint)Util.RandomClass.Next(0,int.MaxValue); } } public IPEndPoint RemoteEndPoint { get { return (IPEndPoint)m_client.Client.RemoteEndPoint; } } #pragma warning disable 67 public event GenericMessage OnGenericMessage; public event ImprovedInstantMessage OnInstantMessage; public event ChatMessage OnChatFromClient; public event TextureRequest OnRequestTexture; public event RezObject OnRezObject; public event ModifyTerrain OnModifyTerrain; public event BakeTerrain OnBakeTerrain; public event EstateChangeInfo OnEstateChangeInfo; public event SetAppearance OnSetAppearance; public event AvatarNowWearing OnAvatarNowWearing; public event RezSingleAttachmentFromInv OnRezSingleAttachmentFromInv; public event RezMultipleAttachmentsFromInv OnRezMultipleAttachmentsFromInv; public event UUIDNameRequest OnDetachAttachmentIntoInv; public event ObjectAttach OnObjectAttach; public event ObjectDeselect OnObjectDetach; public event ObjectDrop OnObjectDrop; public event StartAnim OnStartAnim; public event StopAnim OnStopAnim; public event LinkObjects OnLinkObjects; public event DelinkObjects OnDelinkObjects; public event RequestMapBlocks OnRequestMapBlocks; public event RequestMapName OnMapNameRequest; public event TeleportLocationRequest OnTeleportLocationRequest; public event DisconnectUser OnDisconnectUser; public event RequestAvatarProperties OnRequestAvatarProperties; public event SetAlwaysRun OnSetAlwaysRun; public event TeleportLandmarkRequest OnTeleportLandmarkRequest; public event DeRezObject OnDeRezObject; public event Action<IClientAPI> OnRegionHandShakeReply; public event GenericCall1 OnRequestWearables; public event GenericCall1 OnCompleteMovementToRegion; public event UpdateAgent OnPreAgentUpdate; public event UpdateAgent OnAgentUpdate; public event AgentRequestSit OnAgentRequestSit; public event AgentSit OnAgentSit; public event AvatarPickerRequest OnAvatarPickerRequest; public event Action<IClientAPI> OnRequestAvatarsData; public event AddNewPrim OnAddPrim; public event FetchInventory OnAgentDataUpdateRequest; public event TeleportLocationRequest OnSetStartLocationRequest; public event RequestGodlikePowers OnRequestGodlikePowers; public event GodKickUser OnGodKickUser; public event ObjectDuplicate OnObjectDuplicate; public event ObjectDuplicateOnRay OnObjectDuplicateOnRay; public event GrabObject OnGrabObject; public event DeGrabObject OnDeGrabObject; public event MoveObject OnGrabUpdate; public event SpinStart OnSpinStart; public event SpinObject OnSpinUpdate; public event SpinStop OnSpinStop; public event UpdateShape OnUpdatePrimShape; public event ObjectExtraParams OnUpdateExtraParams; public event ObjectRequest OnObjectRequest; public event ObjectSelect OnObjectSelect; public event ObjectDeselect OnObjectDeselect; public event GenericCall7 OnObjectDescription; public event GenericCall7 OnObjectName; public event GenericCall7 OnObjectClickAction; public event GenericCall7 OnObjectMaterial; public event RequestObjectPropertiesFamily OnRequestObjectPropertiesFamily; public event UpdatePrimFlags OnUpdatePrimFlags; public event UpdatePrimTexture OnUpdatePrimTexture; public event UpdateVector OnUpdatePrimGroupPosition; public event UpdateVector OnUpdatePrimSinglePosition; public event UpdatePrimRotation OnUpdatePrimGroupRotation; public event UpdatePrimSingleRotation OnUpdatePrimSingleRotation; public event UpdatePrimSingleRotationPosition OnUpdatePrimSingleRotationPosition; public event UpdatePrimGroupRotation OnUpdatePrimGroupMouseRotation; public event UpdateVector OnUpdatePrimScale; public event UpdateVector OnUpdatePrimGroupScale; public event StatusChange OnChildAgentStatus; public event GenericCall2 OnStopMovement; public event Action<UUID> OnRemoveAvatar; public event ObjectPermissions OnObjectPermissions; public event CreateNewInventoryItem OnCreateNewInventoryItem; public event LinkInventoryItem OnLinkInventoryItem; public event CreateInventoryFolder OnCreateNewInventoryFolder; public event UpdateInventoryFolder OnUpdateInventoryFolder; public event MoveInventoryFolder OnMoveInventoryFolder; public event FetchInventoryDescendents OnFetchInventoryDescendents; public event PurgeInventoryDescendents OnPurgeInventoryDescendents; public event FetchInventory OnFetchInventory; public event RequestTaskInventory OnRequestTaskInventory; public event UpdateInventoryItem OnUpdateInventoryItem; public event CopyInventoryItem OnCopyInventoryItem; public event MoveInventoryItem OnMoveInventoryItem; public event RemoveInventoryFolder OnRemoveInventoryFolder; public event RemoveInventoryItem OnRemoveInventoryItem; public event UDPAssetUploadRequest OnAssetUploadRequest; public event XferReceive OnXferReceive; public event RequestXfer OnRequestXfer; public event ConfirmXfer OnConfirmXfer; public event AbortXfer OnAbortXfer; public event RezScript OnRezScript; public event UpdateTaskInventory OnUpdateTaskInventory; public event MoveTaskInventory OnMoveTaskItem; public event RemoveTaskInventory OnRemoveTaskItem; public event RequestAsset OnRequestAsset; public event UUIDNameRequest OnNameFromUUIDRequest; public event ParcelAccessListRequest OnParcelAccessListRequest; public event ParcelAccessListUpdateRequest OnParcelAccessListUpdateRequest; public event ParcelPropertiesRequest OnParcelPropertiesRequest; public event ParcelDivideRequest OnParcelDivideRequest; public event ParcelJoinRequest OnParcelJoinRequest; public event ParcelPropertiesUpdateRequest OnParcelPropertiesUpdateRequest; public event ParcelSelectObjects OnParcelSelectObjects; public event ParcelObjectOwnerRequest OnParcelObjectOwnerRequest; public event ParcelAbandonRequest OnParcelAbandonRequest; public event ParcelGodForceOwner OnParcelGodForceOwner; public event ParcelReclaim OnParcelReclaim; public event ParcelReturnObjectsRequest OnParcelReturnObjectsRequest; public event ParcelDeedToGroup OnParcelDeedToGroup; public event RegionInfoRequest OnRegionInfoRequest; public event EstateCovenantRequest OnEstateCovenantRequest; public event FriendActionDelegate OnApproveFriendRequest; public event FriendActionDelegate OnDenyFriendRequest; public event FriendshipTermination OnTerminateFriendship; public event GrantUserFriendRights OnGrantUserRights; public event MoneyTransferRequest OnMoneyTransferRequest; public event EconomyDataRequest OnEconomyDataRequest; public event MoneyBalanceRequest OnMoneyBalanceRequest; public event UpdateAvatarProperties OnUpdateAvatarProperties; public event ParcelBuy OnParcelBuy; public event RequestPayPrice OnRequestPayPrice; public event ObjectSaleInfo OnObjectSaleInfo; public event ObjectBuy OnObjectBuy; public event BuyObjectInventory OnBuyObjectInventory; public event RequestTerrain OnRequestTerrain; public event RequestTerrain OnUploadTerrain; public event ObjectIncludeInSearch OnObjectIncludeInSearch; public event UUIDNameRequest OnTeleportHomeRequest; public event ScriptAnswer OnScriptAnswer; public event AgentSit OnUndo; public event AgentSit OnRedo; public event LandUndo OnLandUndo; public event ForceReleaseControls OnForceReleaseControls; public event GodLandStatRequest OnLandStatRequest; public event DetailedEstateDataRequest OnDetailedEstateDataRequest; public event SetEstateFlagsRequest OnSetEstateFlagsRequest; public event SetEstateTerrainBaseTexture OnSetEstateTerrainBaseTexture; public event SetEstateTerrainDetailTexture OnSetEstateTerrainDetailTexture; public event SetEstateTerrainTextureHeights OnSetEstateTerrainTextureHeights; public event CommitEstateTerrainTextureRequest OnCommitEstateTerrainTextureRequest; public event SetRegionTerrainSettings OnSetRegionTerrainSettings; public event EstateRestartSimRequest OnEstateRestartSimRequest; public event EstateChangeCovenantRequest OnEstateChangeCovenantRequest; public event UpdateEstateAccessDeltaRequest OnUpdateEstateAccessDeltaRequest; public event SimulatorBlueBoxMessageRequest OnSimulatorBlueBoxMessageRequest; public event EstateBlueBoxMessageRequest OnEstateBlueBoxMessageRequest; public event EstateDebugRegionRequest OnEstateDebugRegionRequest; public event EstateTeleportOneUserHomeRequest OnEstateTeleportOneUserHomeRequest; public event EstateTeleportAllUsersHomeRequest OnEstateTeleportAllUsersHomeRequest; public event UUIDNameRequest OnUUIDGroupNameRequest; public event RegionHandleRequest OnRegionHandleRequest; public event ParcelInfoRequest OnParcelInfoRequest; public event RequestObjectPropertiesFamily OnObjectGroupRequest; public event ScriptReset OnScriptReset; public event GetScriptRunning OnGetScriptRunning; public event SetScriptRunning OnSetScriptRunning; public event UpdateVector OnAutoPilotGo; public event TerrainUnacked OnUnackedTerrain; public event ActivateGesture OnActivateGesture; public event DeactivateGesture OnDeactivateGesture; public event ObjectOwner OnObjectOwner; public event DirPlacesQuery OnDirPlacesQuery; public event DirFindQuery OnDirFindQuery; public event DirLandQuery OnDirLandQuery; public event DirPopularQuery OnDirPopularQuery; public event DirClassifiedQuery OnDirClassifiedQuery; public event EventInfoRequest OnEventInfoRequest; public event ParcelSetOtherCleanTime OnParcelSetOtherCleanTime; public event MapItemRequest OnMapItemRequest; public event OfferCallingCard OnOfferCallingCard; public event AcceptCallingCard OnAcceptCallingCard; public event DeclineCallingCard OnDeclineCallingCard; public event SoundTrigger OnSoundTrigger; public event StartLure OnStartLure; public event TeleportLureRequest OnTeleportLureRequest; public event NetworkStats OnNetworkStatsUpdate; public event ClassifiedInfoRequest OnClassifiedInfoRequest; public event ClassifiedInfoUpdate OnClassifiedInfoUpdate; public event ClassifiedDelete OnClassifiedDelete; public event ClassifiedDelete OnClassifiedGodDelete; public event EventNotificationAddRequest OnEventNotificationAddRequest; public event EventNotificationRemoveRequest OnEventNotificationRemoveRequest; public event EventGodDelete OnEventGodDelete; public event ParcelDwellRequest OnParcelDwellRequest; public event UserInfoRequest OnUserInfoRequest; public event UpdateUserInfo OnUpdateUserInfo; public event RetrieveInstantMessages OnRetrieveInstantMessages; public event PickDelete OnPickDelete; public event PickGodDelete OnPickGodDelete; public event PickInfoUpdate OnPickInfoUpdate; public event AvatarNotesUpdate OnAvatarNotesUpdate; public event MuteListRequest OnMuteListRequest; public event AvatarInterestUpdate OnAvatarInterestUpdate; public event PlacesQuery OnPlacesQuery; public event FindAgentUpdate OnFindAgent; public event TrackAgentUpdate OnTrackAgent; public event NewUserReport OnUserReport; public event SaveStateHandler OnSaveState; public event GroupAccountSummaryRequest OnGroupAccountSummaryRequest; public event GroupAccountDetailsRequest OnGroupAccountDetailsRequest; public event GroupAccountTransactionsRequest OnGroupAccountTransactionsRequest; public event FreezeUserUpdate OnParcelFreezeUser; public event EjectUserUpdate OnParcelEjectUser; public event ParcelBuyPass OnParcelBuyPass; public event ParcelGodMark OnParcelGodMark; public event GroupActiveProposalsRequest OnGroupActiveProposalsRequest; public event GroupVoteHistoryRequest OnGroupVoteHistoryRequest; public event SimWideDeletesDelegate OnSimWideDeletes; public event SendPostcard OnSendPostcard; public event MuteListEntryUpdate OnUpdateMuteListEntry; public event MuteListEntryRemove OnRemoveMuteListEntry; public event GodlikeMessage onGodlikeMessage; public event GodUpdateRegionInfoUpdate OnGodUpdateRegionInfoUpdate; #pragma warning restore 67 public void SetDebugPacketLevel(int newDebug) { } public void InPacket(object NewPack) { } public void ProcessInPacket(Packet NewPack) { } public void Close() { Disconnect(); } public void Kick(string message) { Disconnect(message); } public void Start() { Scene.AddNewClient(this); // Mimicking LLClientView which gets always set appearance from client. Scene scene = (Scene)Scene; AvatarAppearance appearance; scene.GetAvatarAppearance(this, out appearance); OnSetAppearance(this, appearance.Texture, (byte[])appearance.VisualParams.Clone()); } public void SendRegionHandshake(RegionInfo regionInfo, RegionHandshakeArgs args) { m_log.Info("[IRCd ClientStack] Completing Handshake to Region"); if (OnRegionHandShakeReply != null) { OnRegionHandShakeReply(this); } if (OnCompleteMovementToRegion != null) { OnCompleteMovementToRegion(this); } } public void Stop() { Disconnect(); } public void SendWearables(AvatarWearable[] wearables, int serial) { } public void SendAppearance(UUID agentID, byte[] visualParams, byte[] textureEntry) { } public void SendStartPingCheck(byte seq) { } public void SendKillObject(ulong regionHandle, uint localID) { } public void SendAnimations(UUID[] animID, int[] seqs, UUID sourceAgentId, UUID[] objectIDs) { } public void SendChatMessage(string message, byte type, Vector3 fromPos, string fromName, UUID fromAgentID, byte source, byte audible) { if (audible > 0 && message.Length > 0) IRC_SendChannelPrivmsg(fromName, message); } private void IRC_SendChannelPrivmsg(string fromName, string message) { SendCommand(":" + fromName.Replace(" ", "") + " PRIVMSG " + IrcRegionName + " :" + message); } public void SendInstantMessage(GridInstantMessage im) { // TODO } public void SendGenericMessage(string method, List<string> message) { } public void SendGenericMessage(string method, List<byte[]> message) { } public void SendLayerData(float[] map) { } public void SendLayerData(int px, int py, float[] map) { } public void SendWindData(Vector2[] windSpeeds) { } public void SendCloudData(float[] cloudCover) { } public void MoveAgentIntoRegion(RegionInfo regInfo, Vector3 pos, Vector3 look) { } public void InformClientOfNeighbour(ulong neighbourHandle, IPEndPoint neighbourExternalEndPoint) { } public AgentCircuitData RequestClientInfo() { return new AgentCircuitData(); } public void CrossRegion(ulong newRegionHandle, Vector3 pos, Vector3 lookAt, IPEndPoint newRegionExternalEndPoint, string capsURL) { } public void SendMapBlock(List<MapBlockData> mapBlocks, uint flag) { } public void SendLocalTeleport(Vector3 position, Vector3 lookAt, uint flags) { } public void SendRegionTeleport(ulong regionHandle, byte simAccess, IPEndPoint regionExternalEndPoint, uint locationID, uint flags, string capsURL) { } public void SendTeleportFailed(string reason) { } public void SendTeleportStart(uint flags) { } public void SendTeleportProgress(uint flags, string message) { } public void SendMoneyBalance(UUID transaction, bool success, byte[] description, int balance) { } public void SendPayPrice(UUID objectID, int[] payPrice) { } public void SendCoarseLocationUpdate(List<UUID> users, List<Vector3> CoarseLocations) { } public void SendAvatarDataImmediate(ISceneEntity avatar) { } public void SendPrimUpdate(ISceneEntity entity, PrimUpdateFlags updateFlags) { } public void ReprioritizeUpdates() { } public void FlushPrimUpdates() { } public void SendInventoryFolderDetails(UUID ownerID, UUID folderID, List<InventoryItemBase> items, List<InventoryFolderBase> folders, int version, bool fetchFolders, bool fetchItems) { } public void SendInventoryItemDetails(UUID ownerID, InventoryItemBase item) { } public void SendInventoryItemCreateUpdate(InventoryItemBase Item, uint callbackId) { } public void SendRemoveInventoryItem(UUID itemID) { } public void SendTakeControls(int controls, bool passToAgent, bool TakeControls) { } public void SendTaskInventory(UUID taskID, short serial, byte[] fileName) { } public void SendBulkUpdateInventory(InventoryNodeBase node) { } public void SendXferPacket(ulong xferID, uint packet, byte[] data) { } public void SendAbortXferPacket(ulong xferID) { } public void SendEconomyData(float EnergyEfficiency, int ObjectCapacity, int ObjectCount, int PriceEnergyUnit, int PriceGroupCreate, int PriceObjectClaim, float PriceObjectRent, float PriceObjectScaleFactor, int PriceParcelClaim, float PriceParcelClaimFactor, int PriceParcelRent, int PricePublicObjectDecay, int PricePublicObjectDelete, int PriceRentLight, int PriceUpload, int TeleportMinPrice, float TeleportPriceExponent) { } public void SendAvatarPickerReply(AvatarPickerReplyAgentDataArgs AgentData, List<AvatarPickerReplyDataArgs> Data) { } public void SendAgentDataUpdate(UUID agentid, UUID activegroupid, string firstname, string lastname, ulong grouppowers, string groupname, string grouptitle) { } public void SendPreLoadSound(UUID objectID, UUID ownerID, UUID soundID) { } public void SendPlayAttachedSound(UUID soundID, UUID objectID, UUID ownerID, float gain, byte flags) { } public void SendTriggeredSound(UUID soundID, UUID ownerID, UUID objectID, UUID parentID, ulong handle, Vector3 position, float gain) { } public void SendAttachedSoundGainChange(UUID objectID, float gain) { } public void SendNameReply(UUID profileId, string firstname, string lastname) { } public void SendAlertMessage(string message) { IRC_SendChannelPrivmsg("Alert",message); } public void SendAgentAlertMessage(string message, bool modal) { } public void SendLoadURL(string objectname, UUID objectID, UUID ownerID, bool groupOwned, string message, string url) { IRC_SendChannelPrivmsg(objectname,url); } public void SendDialog(string objectname, UUID objectID, string ownerFirstName, string ownerLastName, string msg, UUID textureID, int ch, string[] buttonlabels) { } public bool AddMoney(int debit) { return true; } public void SendSunPos(Vector3 sunPos, Vector3 sunVel, ulong CurrentTime, uint SecondsPerSunCycle, uint SecondsPerYear, float OrbitalPosition) { } public void SendViewerEffect(ViewerEffectPacket.EffectBlock[] effectBlocks) { } public void SendViewerTime(int phase) { } public UUID GetDefaultAnimation(string name) { return UUID.Zero; } public void SendAvatarProperties(UUID avatarID, string aboutText, string bornOn, byte[] charterMember, string flAbout, uint flags, UUID flImageID, UUID imageID, string profileURL, UUID partnerID) { } public void SendScriptQuestion(UUID taskID, string taskName, string ownerName, UUID itemID, int question) { } public void SendHealth(float health) { } public void SendEstateList(UUID invoice, int code, UUID[] Data, uint estateID) { } public void SendBannedUserList(UUID invoice, EstateBan[] banlist, uint estateID) { } public void SendRegionInfoToEstateMenu(RegionInfoForEstateMenuArgs args) { } public void SendEstateCovenantInformation(UUID covenant) { } public void SendDetailedEstateData(UUID invoice, string estateName, uint estateID, uint parentEstate, uint estateFlags, uint sunPosition, UUID covenant, string abuseEmail, UUID estateOwner) { } public void SendLandProperties(int sequence_id, bool snap_selection, int request_result, ILandObject lo, float simObjectBonusFactor, int parcelObjectCapacity, int simObjectCapacity, uint regionFlags) { } public void SendLandAccessListData(List<UUID> avatars, uint accessFlag, int localLandID) { } public void SendForceClientSelectObjects(List<uint> objectIDs) { } public void SendCameraConstraint(Vector4 ConstraintPlane) { } public void SendLandObjectOwners(LandData land, List<UUID> groups, Dictionary<UUID, int> ownersAndCount) { } public void SendLandParcelOverlay(byte[] data, int sequence_id) { } public void SendParcelMediaCommand(uint flags, ParcelMediaCommandEnum command, float time) { } public void SendParcelMediaUpdate(string mediaUrl, UUID mediaTextureID, byte autoScale, string mediaType, string mediaDesc, int mediaWidth, int mediaHeight, byte mediaLoop) { } public void SendAssetUploadCompleteMessage(sbyte AssetType, bool Success, UUID AssetFullID) { } public void SendConfirmXfer(ulong xferID, uint PacketID) { } public void SendXferRequest(ulong XferID, short AssetType, UUID vFileID, byte FilePath, byte[] FileName) { } public void SendInitiateDownload(string simFileName, string clientFileName) { } public void SendImageFirstPart(ushort numParts, UUID ImageUUID, uint ImageSize, byte[] ImageData, byte imageCodec) { } public void SendImageNextPart(ushort partNumber, UUID imageUuid, byte[] imageData) { } public void SendImageNotFound(UUID imageid) { } public void SendShutdownConnectionNotice() { // TODO } public void SendSimStats(SimStats stats) { } public void SendObjectPropertiesFamilyData(ISceneEntity Entity, uint RequestFlags) { } public void SendObjectPropertiesReply(ISceneEntity entity) { } public void SendAgentOffline(UUID[] agentIDs) { } public void SendAgentOnline(UUID[] agentIDs) { } public void SendSitResponse(UUID TargetID, Vector3 OffsetPos, Quaternion SitOrientation, bool autopilot, Vector3 CameraAtOffset, Vector3 CameraEyeOffset, bool ForceMouseLook) { } public void SendAdminResponse(UUID Token, uint AdminLevel) { } public void SendGroupMembership(GroupMembershipData[] GroupMembership) { } public void SendGroupNameReply(UUID groupLLUID, string GroupName) { } public void SendJoinGroupReply(UUID groupID, bool success) { } public void SendEjectGroupMemberReply(UUID agentID, UUID groupID, bool success) { } public void SendLeaveGroupReply(UUID groupID, bool success) { } public void SendCreateGroupReply(UUID groupID, bool success, string message) { } public void SendLandStatReply(uint reportType, uint requestFlags, uint resultCount, LandStatReportItem[] lsrpia) { } public void SendScriptRunningReply(UUID objectID, UUID itemID, bool running) { } public void SendAsset(AssetRequestToClient req) { } public void SendTexture(AssetBase TextureAsset) { } public virtual void SetChildAgentThrottle(byte[] throttle) { } public byte[] GetThrottlesPacked(float multiplier) { return new byte[0]; } public event ViewerEffectEventHandler OnViewerEffect; public event Action<IClientAPI> OnLogout; public event Action<IClientAPI> OnConnectionClosed; public void SendBlueBoxMessage(UUID FromAvatarID, string FromAvatarName, string Message) { IRC_SendChannelPrivmsg(FromAvatarName, Message); } public void SendLogoutPacket() { Disconnect(); } public EndPoint GetClientEP() { return null; } public ClientInfo GetClientInfo() { return new ClientInfo(); } public void SetClientInfo(ClientInfo info) { } public void SetClientOption(string option, string value) { } public string GetClientOption(string option) { return String.Empty; } public void Terminate() { Disconnect(); } public void SendSetFollowCamProperties(UUID objectID, SortedDictionary<int, float> parameters) { } public void SendClearFollowCamProperties(UUID objectID) { } public void SendRegionHandle(UUID regoinID, ulong handle) { } public void SendParcelInfo(RegionInfo info, LandData land, UUID parcelID, uint x, uint y) { } public void SendScriptTeleportRequest(string objName, string simName, Vector3 pos, Vector3 lookAt) { } public void SendDirPlacesReply(UUID queryID, DirPlacesReplyData[] data) { } public void SendDirPeopleReply(UUID queryID, DirPeopleReplyData[] data) { } public void SendDirEventsReply(UUID queryID, DirEventsReplyData[] data) { } public void SendDirGroupsReply(UUID queryID, DirGroupsReplyData[] data) { } public void SendDirClassifiedReply(UUID queryID, DirClassifiedReplyData[] data) { } public void SendDirLandReply(UUID queryID, DirLandReplyData[] data) { } public void SendDirPopularReply(UUID queryID, DirPopularReplyData[] data) { } public void SendEventInfoReply(EventData info) { } public void SendMapItemReply(mapItemReply[] replies, uint mapitemtype, uint flags) { } public void SendAvatarGroupsReply(UUID avatarID, GroupMembershipData[] data) { } public void SendOfferCallingCard(UUID srcID, UUID transactionID) { } public void SendAcceptCallingCard(UUID transactionID) { } public void SendDeclineCallingCard(UUID transactionID) { } public void SendTerminateFriend(UUID exFriendID) { } public void SendAvatarClassifiedReply(UUID targetID, UUID[] classifiedID, string[] name) { } public void SendClassifiedInfoReply(UUID classifiedID, UUID creatorID, uint creationDate, uint expirationDate, uint category, string name, string description, UUID parcelID, uint parentEstate, UUID snapshotID, string simName, Vector3 globalPos, string parcelName, byte classifiedFlags, int price) { } public void SendAgentDropGroup(UUID groupID) { } public void RefreshGroupMembership() { } public void SendAvatarNotesReply(UUID targetID, string text) { } public void SendAvatarPicksReply(UUID targetID, Dictionary<UUID, string> picks) { } public void SendPickInfoReply(UUID pickID, UUID creatorID, bool topPick, UUID parcelID, string name, string desc, UUID snapshotID, string user, string originalName, string simName, Vector3 posGlobal, int sortOrder, bool enabled) { } public void SendAvatarClassifiedReply(UUID targetID, Dictionary<UUID, string> classifieds) { } public void SendAvatarInterestUpdate(IClientAPI client, uint wantmask, string wanttext, uint skillsmask, string skillstext, string languages) { } public void SendParcelDwellReply(int localID, UUID parcelID, float dwell) { } public void SendUserInfoReply(bool imViaEmail, bool visible, string email) { } public void SendUseCachedMuteList() { } public void SendMuteListUpdate(string filename) { } public void KillEndDone() { } public bool AddGenericPacketHandler(string MethodName, GenericMessage handler) { return true; } #endregion #region Implementation of IClientIPEndpoint public IPAddress EndPoint { get { return ((IPEndPoint) m_client.Client.RemoteEndPoint).Address; } } #endregion public void SendRebakeAvatarTextures(UUID textureID) { } public void SendAvatarInterestsReply(UUID avatarID, uint wantMask, string wantText, uint skillsMask, string skillsText, string languages) { } public void SendGroupAccountingDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID, int amt) { } public void SendGroupAccountingSummary(IClientAPI sender,UUID groupID, uint moneyAmt, int totalTier, int usedTier) { } public void SendGroupTransactionsSummaryDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID,int amt) { } public void SendGroupVoteHistory(UUID groupID, UUID transactionID, GroupVoteHistory[] Votes) { } public void SendGroupActiveProposals(UUID groupID, UUID transactionID, GroupActiveProposals[] Proposals) { } public void SendChangeUserRights(UUID agentID, UUID friendID, int rights) { } public void SendTextBoxRequest(string message, int chatChannel, string objectname, string ownerFirstName, string ownerLastName, UUID objectId) { } public void StopFlying(ISceneEntity presence) { } public void SendPlacesReply(UUID queryID, UUID transactionID, PlacesReplyData[] data) { } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.Text.Shared.Extensions; using Microsoft.VisualStudio.Text; using Roslyn.Test.EditorUtilities; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.UnitTests.Extensions { public class ITextSnapshotLineExtensionsTests { [WpfFact] public void GetFirstNonWhitespacePosition_EmptyLineReturnsNull() { var position = GetFirstNonWhitespacePosition(string.Empty); Assert.Null(position); } [WpfFact] public void GetFirstNonWhitespacePosition_WhitespaceLineReturnsNull1() { var position = GetFirstNonWhitespacePosition(" "); Assert.Null(position); } [WpfFact] public void GetFirstNonWhitespacePosition_WhitespaceLineReturnsNull2() { var position = GetFirstNonWhitespacePosition(" \t "); Assert.Null(position); } [WpfFact] public void GetFirstNonWhitespacePosition_WhitespaceLineReturnsNull3() { var position = GetFirstNonWhitespacePosition("\t\t"); Assert.Null(position); } [WpfFact] public void GetFirstNonWhitespacePosition_TextLine() { var position = GetFirstNonWhitespacePosition("Foo"); Assert.Equal(0, position.Value); } [WpfFact] public void GetFirstNonWhitespacePosition_TextLineStartingWithWhitespace1() { var position = GetFirstNonWhitespacePosition(" Foo"); Assert.Equal(4, position.Value); } [WpfFact] public void GetFirstNonWhitespacePosition_TextLineStartingWithWhitespace2() { var position = GetFirstNonWhitespacePosition(" \t Foo"); Assert.Equal(3, position.Value); } [WpfFact] public void GetFirstNonWhitespacePosition_TextLineStartingWithWhitespace3() { var position = GetFirstNonWhitespacePosition("\t\tFoo"); Assert.Equal(2, position.Value); } [WpfFact] public void GetLastNonWhitespacePosition_EmptyLineReturnsNull() { var position = GetLastNonWhitespacePosition(string.Empty); Assert.Null(position); } [WpfFact] public void GetLastNonWhitespacePosition_WhitespaceLineReturnsNull1() { var position = GetLastNonWhitespacePosition(" "); Assert.Null(position); } [WpfFact] public void GetLastNonWhitespacePosition_WhitespaceLineReturnsNull2() { var position = GetLastNonWhitespacePosition(" \t "); Assert.Null(position); } [WpfFact] public void GetLastNonWhitespacePosition_WhitespaceLineReturnsNull3() { var position = GetLastNonWhitespacePosition("\t\t"); Assert.Null(position); } [WpfFact] public void GetLastNonWhitespacePosition_TextLine() { var position = GetLastNonWhitespacePosition("Foo"); Assert.Equal(2, position.Value); } [WpfFact] public void GetLastNonWhitespacePosition_TextLineEndingWithWhitespace1() { var position = GetLastNonWhitespacePosition("Foo "); Assert.Equal(2, position.Value); } [WpfFact] public void GetLastNonWhitespacePosition_TextLineEndingWithWhitespace2() { var position = GetLastNonWhitespacePosition("Foo \t "); Assert.Equal(2, position.Value); } [WpfFact] public void GetLastNonWhitespacePosition_TextLineEndingWithWhitespace3() { var position = GetLastNonWhitespacePosition("Foo\t\t"); Assert.Equal(2, position.Value); } [WpfFact] public void IsEmptyOrWhitespace_EmptyLineReturnsTrue() { var value = IsEmptyOrWhitespace(string.Empty); Assert.True(value); } [WpfFact] public void IsEmptyOrWhitespace_WhitespaceLineReturnsTrue1() { var value = IsEmptyOrWhitespace(" "); Assert.True(value); } [WpfFact] public void IsEmptyOrWhitespace_WhitespaceLineReturnsTrue2() { var value = IsEmptyOrWhitespace("\t\t"); Assert.True(value); } [WpfFact] public void IsEmptyOrWhitespace_WhitespaceLineReturnsTrue3() { var value = IsEmptyOrWhitespace(" \t "); Assert.True(value); } [WpfFact] public void IsEmptyOrWhitespace_TextLineReturnsFalse() { var value = IsEmptyOrWhitespace("Foo"); Assert.False(value); } [WpfFact] public void IsEmptyOrWhitespace_TextLineStartingWithWhitespaceReturnsFalse1() { var value = IsEmptyOrWhitespace(" Foo"); Assert.False(value); } [WpfFact] public void IsEmptyOrWhitespace_TextLineStartingWithWhitespaceReturnsFalse2() { var value = IsEmptyOrWhitespace(" \t Foo"); Assert.False(value); } [WpfFact] public void IsEmptyOrWhitespace_TextLineStartingWithWhitespaceReturnsFalse3() { var value = IsEmptyOrWhitespace("\t\tFoo"); Assert.False(value); } private ITextSnapshotLine GetLine(string codeLine) { var snapshot = EditorFactory.CreateBuffer(TestExportProvider.ExportProviderWithCSharpAndVisualBasic, codeLine).CurrentSnapshot; return snapshot.GetLineFromLineNumber(0); } private bool IsEmptyOrWhitespace(string codeLine) { var line = GetLine(codeLine); return line.IsEmptyOrWhitespace(); } private int? GetFirstNonWhitespacePosition(string codeLine) { var line = GetLine(codeLine); return line.GetFirstNonWhitespacePosition(); } private int? GetLastNonWhitespacePosition(string codeLine) { var line = GetLine(codeLine); return line.GetLastNonWhitespacePosition(); } } }
/* * PicoParser.cs * * THIS FILE HAS BEEN GENERATED AUTOMATICALLY. DO NOT EDIT! */ using System.IO; using PerCederberg.Grammatica.Runtime; namespace MessyLab.PicoComputer { /** * <remarks>A token stream parser.</remarks> */ internal class PicoParser : RecursiveDescentParser { /** * <summary>An enumeration with the generated production node * identity constants.</summary> */ private enum SynteticPatterns { SUBPRODUCTION_1 = 3001, SUBPRODUCTION_2 = 3002, SUBPRODUCTION_3 = 3003, SUBPRODUCTION_4 = 3004, SUBPRODUCTION_5 = 3005, SUBPRODUCTION_6 = 3006, SUBPRODUCTION_7 = 3007, SUBPRODUCTION_8 = 3008, SUBPRODUCTION_9 = 3009, SUBPRODUCTION_10 = 3010, SUBPRODUCTION_11 = 3011 } /** * <summary>Creates a new parser with a default analyzer.</summary> * * <param name='input'>the input stream to read from</param> * * <exception cref='ParserCreationException'>if the parser * couldn't be initialized correctly</exception> */ public PicoParser(TextReader input) : base(input) { CreatePatterns(); } /** * <summary>Creates a new parser.</summary> * * <param name='input'>the input stream to read from</param> * * <param name='analyzer'>the analyzer to parse with</param> * * <exception cref='ParserCreationException'>if the parser * couldn't be initialized correctly</exception> */ public PicoParser(TextReader input, PicoAnalyzer analyzer) : base(input, analyzer) { CreatePatterns(); } /** * <summary>Creates a new tokenizer for this parser. Can be overridden * by a subclass to provide a custom implementation.</summary> * * <param name='input'>the input stream to read from</param> * * <returns>the tokenizer created</returns> * * <exception cref='ParserCreationException'>if the tokenizer * couldn't be initialized correctly</exception> */ protected override Tokenizer NewTokenizer(TextReader input) { return new PicoTokenizer(input); } /** * <summary>Initializes the parser by creating all the production * patterns.</summary> * * <exception cref='ParserCreationException'>if the parser * couldn't be initialized correctly</exception> */ private void CreatePatterns() { ProductionPattern pattern; ProductionPatternAlternative alt; pattern = new ProductionPattern((int) PicoConstants.PROGRAM, "Program"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.SEPARATOR, 0, 1); alt.AddProduction((int) PicoConstants.SYMBOLS, 1, 1); alt.AddProduction((int) PicoConstants.ORIGIN, 1, 1); alt.AddProduction((int) PicoConstants.LINES, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.SEPARATOR, "Separator"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.ENTER, 1, 1); alt.AddToken((int) PicoConstants.ENTER, 0, -1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.SYMBOLS, "Symbols"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.SYMBOL, 1, 1); alt.AddProduction((int) PicoConstants.SYMBOL, 0, -1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.SYMBOL, "Symbol"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.IDENTIFIER, 1, 1); alt.AddToken((int) PicoConstants.EQUALS, 1, 1); alt.AddProduction((int) PicoConstants.INTEGER, 1, 1); alt.AddProduction((int) PicoConstants.SEPARATOR, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.INTEGER, "Integer"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.SIGN, 0, 1); alt.AddToken((int) PicoConstants.NUMBER, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ORIGIN, "Origin"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.ORG, 1, 1); alt.AddToken((int) PicoConstants.NUMBER, 1, 1); alt.AddProduction((int) PicoConstants.SEPARATOR, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.LINES, "Lines"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.LINE, 1, 1); alt.AddProduction((int) PicoConstants.LINE, 0, -1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.LINE, "Line"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_1, 0, 1); alt.AddProduction((int) PicoConstants.INSTRUCTION, 1, 1); alt.AddProduction((int) PicoConstants.SEPARATOR, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.INSTRUCTION, "Instruction"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.MOVE, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARITHMETIC, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.BRANCH, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.IO, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.CALL, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.RETURN, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.END, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.MOVE, "Move"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.MOV, 1, 1); alt.AddProduction((int) PicoConstants.MOVE_ARGS, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARITHMETIC, "Arithmetic"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_2, 1, 1); alt.AddProduction((int) PicoConstants.ARITHMETIC_ARGS, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.BRANCH, "Branch"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_3, 1, 1); alt.AddProduction((int) PicoConstants.BRANCH_ARGS, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.IO, "IO"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_4, 1, 1); alt.AddProduction((int) PicoConstants.IOARGS, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.CALL, "Call"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.JSR, 1, 1); alt.AddProduction((int) PicoConstants.ARG3, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.RETURN, "Return"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.RTS, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.END, "End"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.STOP, 1, 1); alt.AddProduction((int) PicoConstants.END_ARGS, 0, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.MOVE_ARGS, "MoveArgs"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_5, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARITHMETIC_ARGS, "ArithmeticArgs"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_6, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.BRANCH_ARGS, "BranchArgs"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_8, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.IOARGS, "IOArgs"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_9, 0, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.END_ARGS, "EndArgs"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_11, 0, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG12, "Arg12"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG2, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG34, "Arg34"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG3, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG4, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG123, "Arg123"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG2, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG3, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG1234, "Arg1234"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG2, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG3, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG4, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG1, "Arg1"); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.INTEGER, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG2, "Arg2"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.HASH, 1, 1); alt.AddToken((int) PicoConstants.IDENTIFIER, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG3, "Arg3"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.IDENTIFIER, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) PicoConstants.ARG4, "Arg4"); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.LEFT_PAREN, 1, 1); alt.AddToken((int) PicoConstants.IDENTIFIER, 1, 1); alt.AddToken((int) PicoConstants.RIGHT_PAREN, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_1, "Subproduction1"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.IDENTIFIER, 1, 1); alt.AddToken((int) PicoConstants.COLON, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_2, "Subproduction2"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.ADD, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.SUB, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.MUL, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.DIV, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_3, "Subproduction3"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.BEQ, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.BGT, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_4, "Subproduction4"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.IN, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.OUT, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_5, "Subproduction5"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1234, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG123, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_6, "Subproduction6"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG1234, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG12, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_7, "Subproduction7"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_8, "Subproduction8"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_7, 1, 1); pattern.AddAlternative(alt); alt = new ProductionPatternAlternative(); alt.AddProduction((int) PicoConstants.ARG1, 1, 1); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_9, "Subproduction9"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG1234, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_10, "Subproduction10"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); pattern.AddAlternative(alt); AddPattern(pattern); pattern = new ProductionPattern((int) SynteticPatterns.SUBPRODUCTION_11, "Subproduction11"); pattern.Synthetic = true; alt = new ProductionPatternAlternative(); alt.AddToken((int) PicoConstants.COMMA, 1, 1); alt.AddProduction((int) PicoConstants.ARG34, 1, 1); alt.AddProduction((int) SynteticPatterns.SUBPRODUCTION_10, 0, 1); pattern.AddAlternative(alt); AddPattern(pattern); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Win32.SafeHandles; // TODO: Once we upgrade to C# 6, remove all of these and simply import the Http class. using CURLAUTH = Interop.Http.CURLAUTH; using CURLcode = Interop.Http.CURLcode; using CURLINFO = Interop.Http.CURLINFO; using CURLMcode = Interop.Http.CURLMcode; using CURLMSG = Interop.Http.CURLMSG; using CURLoption = Interop.Http.CURLoption; using SafeCurlMultiHandle = Interop.Http.SafeCurlMultiHandle; using CurlSeekResult = Interop.Http.CurlSeekResult; namespace System.Net.Http { internal partial class CurlHandler : HttpMessageHandler { /// <summary>Provides a multi handle and the associated processing for all requests on the handle.</summary> private sealed class MultiAgent { private static readonly Interop.Http.ReadWriteCallback s_receiveHeadersCallback = CurlReceiveHeadersCallback; private static readonly Interop.Http.ReadWriteCallback s_sendCallback = CurlSendCallback; private static readonly Interop.Http.SeekCallback s_seekCallback = CurlSeekCallback; private static readonly Interop.Http.ReadWriteCallback s_receiveBodyCallback = CurlReceiveBodyCallback; /// <summary> /// A collection of not-yet-processed incoming requests for work to be done /// by this multi agent. This can include making new requests, canceling /// active requests, or unpausing active requests. /// Protected by a lock on <see cref="_incomingRequests"/>. /// </summary> private readonly Queue<IncomingRequest> _incomingRequests = new Queue<IncomingRequest>(); /// <summary>Map of activeOperations, indexed by a GCHandle ptr.</summary> private readonly Dictionary<IntPtr, ActiveRequest> _activeOperations = new Dictionary<IntPtr, ActiveRequest>(); /// <summary> /// Special file descriptor used to wake-up curl_multi_wait calls. This is the read /// end of a pipe, with the write end written to when work is queued or when cancellation /// is requested. This is only valid while the worker is executing. /// </summary> private SafeFileHandle _wakeupRequestedPipeFd; /// <summary> /// Write end of the pipe connected to <see cref="_wakeupRequestedPipeFd"/>. /// This is only valid while the worker is executing. /// </summary> private SafeFileHandle _requestWakeupPipeFd; /// <summary> /// Task for the currently running worker, or null if there is no current worker. /// Protected by a lock on <see cref="_incomingRequests"/>. /// </summary> private Task _runningWorker; /// <summary>Queues a request for the multi handle to process.</summary> public void Queue(IncomingRequest request) { lock (_incomingRequests) { // Add the request, then initiate processing. _incomingRequests.Enqueue(request); EnsureWorkerIsRunning(); } } /// <summary>Gets the ID of the currently running worker, or null if there isn't one.</summary> internal int? RunningWorkerId { get { return _runningWorker != null ? (int?)_runningWorker.Id : null; } } /// <summary>Schedules the processing worker if one hasn't already been scheduled.</summary> private void EnsureWorkerIsRunning() { Debug.Assert(Monitor.IsEntered(_incomingRequests), "Needs to be called under _incomingRequests lock"); if (_runningWorker == null) { VerboseTrace("MultiAgent worker queued"); // Create pipe used to forcefully wake up curl_multi_wait calls when something important changes. // This is created here rather than in Process so that the pipe is available immediately // for subsequent queue calls to use. Debug.Assert(_wakeupRequestedPipeFd == null, "Read pipe should have been cleared"); Debug.Assert(_requestWakeupPipeFd == null, "Write pipe should have been cleared"); unsafe { int* fds = stackalloc int[2]; Interop.CheckIo(Interop.Sys.Pipe(fds)); _wakeupRequestedPipeFd = new SafeFileHandle((IntPtr)fds[Interop.Sys.ReadEndOfPipe], true); _requestWakeupPipeFd = new SafeFileHandle((IntPtr)fds[Interop.Sys.WriteEndOfPipe], true); } // Kick off the processing task. It's "DenyChildAttach" to avoid any surprises if // code happens to create attached tasks, and it's LongRunning because this thread // is likely going to sit around for a while in a wait loop (and the more requests // are concurrently issued to the same agent, the longer the thread will be around). const TaskCreationOptions Options = TaskCreationOptions.DenyChildAttach | TaskCreationOptions.LongRunning; _runningWorker = new Task(s => { VerboseTrace("MultiAgent worker starting"); var thisRef = (MultiAgent)s; try { // Do the actual processing thisRef.WorkerLoop(); } catch (Exception exc) { Debug.Fail("Unexpected exception from processing loop: " + exc.ToString()); } finally { VerboseTrace("MultiAgent worker shutting down"); lock (thisRef._incomingRequests) { // Close our wakeup pipe (ignore close errors). // This is done while holding the lock to prevent // subsequent Queue calls to see an improperly configured // set of descriptors. thisRef._wakeupRequestedPipeFd.Dispose(); thisRef._wakeupRequestedPipeFd = null; thisRef._requestWakeupPipeFd.Dispose(); thisRef._requestWakeupPipeFd = null; // In the time between we stopped processing and now, // more requests could have been added. If they were // kick off another processing loop. thisRef._runningWorker = null; if (thisRef._incomingRequests.Count > 0) { thisRef.EnsureWorkerIsRunning(); } } } }, this, CancellationToken.None, Options); _runningWorker.Start(TaskScheduler.Default); // started after _runningWorker field set to avoid race conditions } else // _workerRunning == true { // The worker is already running. If there are already queued requests, we're done. // However, if there aren't any queued requests, Process could be blocked inside of // curl_multi_wait, and we want to make sure it wakes up to see that there additional // requests waiting to be handled. So we write to the wakeup pipe. Debug.Assert(_incomingRequests.Count >= 1, "We just queued a request, so the count should be at least 1"); if (_incomingRequests.Count == 1) { RequestWakeup(); } } } /// <summary>Write a byte to the wakeup pipe.</summary> private void RequestWakeup() { unsafe { VerboseTrace("Writing to wakeup pipe"); byte b = 1; Interop.CheckIo(Interop.Sys.Write(_requestWakeupPipeFd, &b, 1)); } } /// <summary>Clears data from the wakeup pipe.</summary> /// <remarks> /// This must only be called when we know there's data to be read. /// The MultiAgent could easily deadlock if it's called when there's no data in the pipe. /// </remarks> private unsafe void ReadFromWakeupPipeWhenKnownToContainData() { // It's possible but unlikely that there will be tons of extra data in the pipe, // more than we end up reading out here (it's unlikely because we only write a byte to the pipe when // transitioning from 0 to 1 incoming request). In that unlikely event, the worst // case will be that the next one or more waits will wake up immediately, with each one // subsequently clearing out more of the pipe. const int ClearBufferSize = 64; // sufficiently large to clear the pipe in any normal case byte* clearBuf = stackalloc byte[ClearBufferSize]; int bytesRead = Interop.CheckIo(Interop.Sys.Read(_wakeupRequestedPipeFd, clearBuf, ClearBufferSize)); VerboseTraceIf(bytesRead > 1, "Read more than one byte from wakeup pipe: " + bytesRead); } /// <summary>Requests that libcurl unpause the connection associated with this request.</summary> internal void RequestUnpause(EasyRequest easy) { VerboseTrace(easy: easy); Queue(new IncomingRequest { Easy = easy, Type = IncomingRequestType.Unpause }); } /// <summary>Creates and configures a new multi handle.</summary> private SafeCurlMultiHandle CreateAndConfigureMultiHandle() { // Create the new handle SafeCurlMultiHandle multiHandle = Interop.Http.MultiCreate(); if (multiHandle.IsInvalid) { throw CreateHttpRequestException(); } // In support of HTTP/2, enable HTTP/2 connections to be multiplexed if possible. // We must only do this if the version of libcurl being used supports HTTP/2 multiplexing. // Due to a change in a libcurl signature, if we try to make this call on an older libcurl, // we'll end up accidentally and unconditionally enabling HTTP 1.1 pipelining. if (s_supportsHttp2Multiplexing) { ThrowIfCURLMError(Interop.Http.MultiSetOptionLong(multiHandle, Interop.Http.CURLMoption.CURLMOPT_PIPELINING, (long)Interop.Http.CurlPipe.CURLPIPE_MULTIPLEX)); } return multiHandle; } private void WorkerLoop() { Debug.Assert(!Monitor.IsEntered(_incomingRequests), "No locks should be held while invoking Process"); Debug.Assert(_runningWorker != null && _runningWorker.Id == Task.CurrentId, "This is the worker, so it must be running"); Debug.Assert(_wakeupRequestedPipeFd != null && !_wakeupRequestedPipeFd.IsInvalid, "Should have a valid pipe for wake ups"); // Create the multi handle to use for this round of processing. This one handle will be used // to service all easy requests currently available and all those that come in while // we're processing other requests. Once the work quiesces and there are no more requests // to process, this multi handle will be released as the worker goes away. The next // time a request arrives and a new worker is spun up, a new multi handle will be created. SafeCurlMultiHandle multiHandle = CreateAndConfigureMultiHandle(); // Clear our active operations table. This should already be clear, either because // all previous operations completed without unexpected exception, or in the case of an // unexpected exception we should have cleaned up gracefully anyway. But just in case... Debug.Assert(_activeOperations.Count == 0, "We shouldn't have any active operations when starting processing."); _activeOperations.Clear(); bool endingSuccessfully = false; try { // Continue processing as long as there are any active operations while (true) { // First handle any requests in the incoming requests queue. while (true) { IncomingRequest request; lock (_incomingRequests) { if (_incomingRequests.Count == 0) break; request = _incomingRequests.Dequeue(); } HandleIncomingRequest(multiHandle, request); } // If we have no active operations, we're done. if (_activeOperations.Count == 0) { endingSuccessfully = true; return; } // We have one or more active operations. Run any work that needs to be run. ThrowIfCURLMError(Interop.Http.MultiPerform(multiHandle)); // Complete and remove any requests that have finished being processed. CURLMSG message; IntPtr easyHandle; CURLcode result; while (Interop.Http.MultiInfoRead(multiHandle, out message, out easyHandle, out result)) { Debug.Assert(message == CURLMSG.CURLMSG_DONE, "CURLMSG_DONE is supposed to be the only message type"); if (message == CURLMSG.CURLMSG_DONE) { IntPtr gcHandlePtr; CURLcode getInfoResult = Interop.Http.EasyGetInfoPointer(easyHandle, CURLINFO.CURLINFO_PRIVATE, out gcHandlePtr); Debug.Assert(getInfoResult == CURLcode.CURLE_OK, "Failed to get info on a completing easy handle"); if (getInfoResult == CURLcode.CURLE_OK) { ActiveRequest completedOperation; bool gotActiveOp = _activeOperations.TryGetValue(gcHandlePtr, out completedOperation); Debug.Assert(gotActiveOp, "Expected to find GCHandle ptr in active operations table"); if (gotActiveOp) { DeactivateActiveRequest(multiHandle, completedOperation.Easy, gcHandlePtr, completedOperation.CancellationRegistration); FinishRequest(completedOperation.Easy, result); } } } } // Wait for more things to do. bool isWakeupRequestedPipeActive; bool isTimeout; ThrowIfCURLMError(Interop.Http.MultiWait(multiHandle, _wakeupRequestedPipeFd, out isWakeupRequestedPipeActive, out isTimeout)); if (isWakeupRequestedPipeActive) { // We woke up (at least in part) because a wake-up was requested. // Read the data out of the pipe to clear it. Debug.Assert(!isTimeout, "should not have timed out if isExtraFileDescriptorActive"); VerboseTrace("curl_multi_wait wake-up notification"); ReadFromWakeupPipeWhenKnownToContainData(); } VerboseTraceIf(isTimeout, "curl_multi_wait timeout"); // PERF NOTE: curl_multi_wait uses poll (assuming it's available), which is O(N) in terms of the number of fds // being waited on. If this ends up being a scalability bottleneck, we can look into using the curl_multi_socket_* // APIs, which would let us switch to using epoll by being notified when sockets file descriptors are added or // removed and configuring the epoll context with EPOLL_CTL_ADD/DEL, which at the expense of a lot of additional // complexity would let us turn the O(N) operation into an O(1) operation. The additional complexity would come // not only in the form of additional callbacks and managing the socket collection, but also in the form of timer // management, which is necessary when using the curl_multi_socket_* APIs and which we avoid by using just // curl_multi_wait/perform. } } finally { // If we got an unexpected exception, something very bad happened. We may have some // operations that we initiated but that weren't completed. Make sure to clean up any // such operations, failing them and releasing their resources. if (_activeOperations.Count > 0) { Debug.Assert(!endingSuccessfully, "We should only have remaining operations if we got an unexpected exception"); foreach (KeyValuePair<IntPtr, ActiveRequest> pair in _activeOperations) { ActiveRequest failingOperation = pair.Value; IntPtr failingOperationGcHandle = pair.Key; DeactivateActiveRequest(multiHandle, failingOperation.Easy, failingOperationGcHandle, failingOperation.CancellationRegistration); // Complete the operation's task and clean up any of its resources failingOperation.Easy.FailRequest(CreateHttpRequestException()); failingOperation.Easy.Cleanup(); // no active processing remains, so cleanup } // Clear the table. _activeOperations.Clear(); } // Finally, dispose of the multi handle. multiHandle.Dispose(); } } private void HandleIncomingRequest(SafeCurlMultiHandle multiHandle, IncomingRequest request) { Debug.Assert(!Monitor.IsEntered(_incomingRequests), "Incoming requests lock should only be held while accessing the queue"); VerboseTrace("Type: " + request.Type, easy: request.Easy); EasyRequest easy = request.Easy; switch (request.Type) { case IncomingRequestType.New: ActivateNewRequest(multiHandle, easy); break; case IncomingRequestType.Cancel: Debug.Assert(easy._associatedMultiAgent == this, "Should only cancel associated easy requests"); Debug.Assert(easy._cancellationToken.IsCancellationRequested, "Cancellation should have been requested"); FindAndFailActiveRequest(multiHandle, easy, new OperationCanceledException(easy._cancellationToken)); break; case IncomingRequestType.Unpause: Debug.Assert(easy._associatedMultiAgent == this, "Should only unpause associated easy requests"); if (!easy._easyHandle.IsClosed) { IntPtr gcHandlePtr; ActiveRequest ar; Debug.Assert(FindActiveRequest(easy, out gcHandlePtr, out ar), "Couldn't find active request for unpause"); CURLcode unpauseResult = Interop.Http.EasyUnpause(easy._easyHandle); try { ThrowIfCURLEError(unpauseResult); } catch (Exception exc) { FindAndFailActiveRequest(multiHandle, easy, exc); } } break; default: Debug.Fail("Invalid request type: " + request.Type); break; } } private void ActivateNewRequest(SafeCurlMultiHandle multiHandle, EasyRequest easy) { Debug.Assert(easy != null, "We should never get a null request"); Debug.Assert(easy._associatedMultiAgent == null, "New requests should not be associated with an agent yet"); // If cancellation has been requested, complete the request proactively if (easy._cancellationToken.IsCancellationRequested) { easy.FailRequest(new OperationCanceledException(easy._cancellationToken)); easy.Cleanup(); // no active processing remains, so cleanup return; } // Otherwise, configure it. Most of the configuration was already done when the EasyRequest // was created, but there's additional configuration we need to do specific to this // multi agent, specifically telling the easy request about its own GCHandle and setting // up callbacks for data processing. Once it's configured, add it to the multi handle. GCHandle gcHandle = GCHandle.Alloc(easy); IntPtr gcHandlePtr = GCHandle.ToIntPtr(gcHandle); try { easy._associatedMultiAgent = this; easy.SetCurlOption(CURLoption.CURLOPT_PRIVATE, gcHandlePtr); easy.SetCurlCallbacks(gcHandlePtr, s_receiveHeadersCallback, s_sendCallback, s_seekCallback, s_receiveBodyCallback); ThrowIfCURLMError(Interop.Http.MultiAddHandle(multiHandle, easy._easyHandle)); } catch (Exception exc) { gcHandle.Free(); easy.FailRequest(exc); easy.Cleanup(); // no active processing remains, so cleanup return; } // And if cancellation can be requested, hook up a cancellation callback. // This callback will put the easy request back into the queue, which will // ensure that a wake-up request has been issued. When we pull // the easy request out of the request queue, we'll see that it's already // associated with this agent, meaning that it's a cancellation request, // and we'll deal with it appropriately. var cancellationReg = default(CancellationTokenRegistration); if (easy._cancellationToken.CanBeCanceled) { cancellationReg = easy._cancellationToken.Register(s => { var state = (Tuple<MultiAgent, EasyRequest>)s; state.Item1.Queue(new IncomingRequest { Easy = state.Item2, Type = IncomingRequestType.Cancel }); }, Tuple.Create<MultiAgent, EasyRequest>(this, easy)); } // Finally, add it to our map. _activeOperations.Add( gcHandlePtr, new ActiveRequest { Easy = easy, CancellationRegistration = cancellationReg }); } private void DeactivateActiveRequest( SafeCurlMultiHandle multiHandle, EasyRequest easy, IntPtr gcHandlePtr, CancellationTokenRegistration cancellationRegistration) { // Remove the operation from the multi handle so we can shut down the multi handle cleanly CURLMcode removeResult = Interop.Http.MultiRemoveHandle(multiHandle, easy._easyHandle); Debug.Assert(removeResult == CURLMcode.CURLM_OK, "Failed to remove easy handle"); // ignore cleanup errors in release // Release the associated GCHandle so that it's not kept alive forever if (gcHandlePtr != IntPtr.Zero) { try { GCHandle.FromIntPtr(gcHandlePtr).Free(); _activeOperations.Remove(gcHandlePtr); } catch (InvalidOperationException) { Debug.Fail("Couldn't get/free the GCHandle for an active operation while shutting down due to failure"); } } // Undo cancellation registration cancellationRegistration.Dispose(); } private bool FindActiveRequest(EasyRequest easy, out IntPtr gcHandlePtr, out ActiveRequest activeRequest) { // We maintain an IntPtr=>ActiveRequest mapping, which makes it cheap to look-up by GCHandle ptr but // expensive to look up by EasyRequest. If we find this becoming a bottleneck, we can add a reverse // map that stores the other direction as well. foreach (KeyValuePair<IntPtr, ActiveRequest> pair in _activeOperations) { if (pair.Value.Easy == easy) { gcHandlePtr = pair.Key; activeRequest = pair.Value; return true; } } gcHandlePtr = IntPtr.Zero; activeRequest = default(ActiveRequest); return false; } private void FindAndFailActiveRequest(SafeCurlMultiHandle multiHandle, EasyRequest easy, Exception error) { VerboseTrace("Error: " + error.Message, easy: easy); IntPtr gcHandlePtr; ActiveRequest activeRequest; if (FindActiveRequest(easy, out gcHandlePtr, out activeRequest)) { DeactivateActiveRequest(multiHandle, easy, gcHandlePtr, activeRequest.CancellationRegistration); easy.FailRequest(error); easy.Cleanup(); // no active processing remains, so we can cleanup } else { Debug.Assert(easy.Task.IsCompleted, "We should only not be able to find the request if it failed or we started to send back the response."); } } private void FinishRequest(EasyRequest completedOperation, CURLcode messageResult) { VerboseTrace("messageResult: " + messageResult, easy: completedOperation); if (completedOperation._responseMessage.StatusCode != HttpStatusCode.Unauthorized) { if (completedOperation._handler.PreAuthenticate) { long authAvailable; if (Interop.Http.EasyGetInfoLong(completedOperation._easyHandle, CURLINFO.CURLINFO_HTTPAUTH_AVAIL, out authAvailable) == CURLcode.CURLE_OK) { completedOperation._handler.AddCredentialToCache( completedOperation._requestMessage.RequestUri, (CURLAUTH)authAvailable, completedOperation._networkCredential); } // Ignore errors: no need to fail for the sake of putting the credentials into the cache } completedOperation._handler.AddResponseCookies( completedOperation._requestMessage.RequestUri, completedOperation._responseMessage); } // Complete or fail the request try { bool unsupportedProtocolRedirect = messageResult == CURLcode.CURLE_UNSUPPORTED_PROTOCOL && completedOperation._isRedirect; if (!unsupportedProtocolRedirect) { ThrowIfCURLEError(messageResult); } completedOperation.EnsureResponseMessagePublished(); } catch (Exception exc) { completedOperation.FailRequest(exc); } // At this point, we've completed processing the entire request, either due to error // or due to completing the entire response. completedOperation.Cleanup(); } private static ulong CurlReceiveHeadersCallback(IntPtr buffer, ulong size, ulong nitems, IntPtr context) { CurlHandler.VerboseTrace("size: " + size + ", nitems: " + nitems); size *= nitems; if (size == 0) { return 0; } EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { try { // The callback is invoked once per header; multi-line headers get merged into a single line. string responseHeader = Marshal.PtrToStringAnsi(buffer).Trim(); HttpResponseMessage response = easy._responseMessage; if (!TryParseStatusLine(response, responseHeader, easy)) { int index = 0; string headerName = CurlResponseParseUtils.ReadHeaderName(responseHeader, out index); if (headerName != null) { string headerValue = responseHeader.Substring(index).Trim(); if (!response.Headers.TryAddWithoutValidation(headerName, headerValue)) { response.Content.Headers.TryAddWithoutValidation(headerName, headerValue); } else if (easy._isRedirect && string.Equals(headerName, HttpKnownHeaderNames.Location, StringComparison.OrdinalIgnoreCase)) { HandleRedirectLocationHeader(easy, headerValue); } } } return size; } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Returing a value other than size fails the callback and forces // request completion with an error return size - 1; } private static ulong CurlReceiveBodyCallback( IntPtr buffer, ulong size, ulong nitems, IntPtr context) { CurlHandler.VerboseTrace("size: " + size + ", nitems: " + nitems); size *= nitems; EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { try { if (!(easy.Task.IsCanceled || easy.Task.IsFaulted)) { // Complete the task if it hasn't already been. This will make the // stream available to consumers. A previous write callback // may have already completed the task to publish the response. easy.EnsureResponseMessagePublished(); // Try to transfer the data to a reader. This will return either the // amount of data transferred (equal to the amount requested // to be transferred), or it will return a pause request. return easy._responseMessage.ResponseStream.TransferDataToStream(buffer, (long)size); } } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Returing a value other than size fails the callback and forces // request completion with an error. CurlHandler.VerboseTrace("Error: returning a bad size to abort the request"); return (size > 0) ? size - 1 : 1; } private static ulong CurlSendCallback(IntPtr buffer, ulong size, ulong nitems, IntPtr context) { CurlHandler.VerboseTrace("size: " + size + ", nitems: " + nitems); int length = checked((int)(size * nitems)); Debug.Assert(length <= RequestBufferSize, "length " + length + " should not be larger than RequestBufferSize " + RequestBufferSize); if (length == 0) { return 0; } EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { Debug.Assert(easy._requestContentStream != null, "We should only be in the send callback if we have a request content stream"); Debug.Assert(easy._associatedMultiAgent != null, "The request should be associated with a multi agent."); try { // Transfer data from the request's content stream to libcurl return TransferDataFromRequestStream(buffer, length, easy); } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Something went wrong. return Interop.Http.CURL_READFUNC_ABORT; } /// <summary> /// Transfers up to <paramref name="length"/> data from the <paramref name="easy"/>'s /// request content (non-memory) stream to the buffer. /// </summary> /// <returns>The number of bytes transferred.</returns> private static ulong TransferDataFromRequestStream(IntPtr buffer, int length, EasyRequest easy) { MultiAgent multi = easy._associatedMultiAgent; // First check to see whether there's any data available from a previous asynchronous read request. // If there is, the transfer state's Task field will be non-null, with its Result representing // the number of bytes read. The Buffer will then contain all of that read data. If the Count // is 0, then this is the first time we're checking that Task, and so we populate the Count // from that read result. After that, we can transfer as much data remains between Offset and // Count. Multiple callbacks may pull from that one read. EasyRequest.SendTransferState sts = easy._sendTransferState; if (sts != null) { // Is there a previous read that may still have data to be consumed? if (sts._task != null) { if (!sts._task.IsCompleted) { // We have a previous read that's not yet completed. This should be quite rare, but it can // happen when we're unpaused prematurely, potentially due to the request still finishing // being sent as the server starts to send a response. Since we still have the outstanding // read, we simply re-pause. When the task completes (which could have happened immediately // after the check). the continuation we previously created will fire and queue an unpause. // Since all of this processing is single-threaded on the current thread, that unpause request // is guaranteed to happen after this re-pause. multi.VerboseTrace("Re-pausing reading after a spurious un-pause", easy: easy); return Interop.Http.CURL_READFUNC_PAUSE; } // Determine how many bytes were read on the last asynchronous read. // If nothing was read, then we're done and can simply return 0 to indicate // the end of the stream. int bytesRead = sts._task.GetAwaiter().GetResult(); // will throw if read failed Debug.Assert(bytesRead >= 0 && bytesRead <= sts._buffer.Length, "ReadAsync returned an invalid result length: " + bytesRead); if (bytesRead == 0) { multi.VerboseTrace("End of stream from stored task", easy: easy); sts.SetTaskOffsetCount(null, 0, 0); return 0; } // If Count is still 0, then this is the first time after the task completed // that we're examining the data: transfer the bytesRead to the Count. if (sts._count == 0) { multi.VerboseTrace("ReadAsync completed with bytes: " + bytesRead, easy: easy); sts._count = bytesRead; } // Now Offset and Count are both accurate. Determine how much data we can copy to libcurl... int availableData = sts._count - sts._offset; Debug.Assert(availableData > 0, "There must be some data still available."); // ... and copy as much of that as libcurl will allow. int bytesToCopy = Math.Min(availableData, length); Marshal.Copy(sts._buffer, sts._offset, buffer, bytesToCopy); multi.VerboseTrace("Copied " + bytesToCopy + " bytes from request stream", easy: easy); // Update the offset. If we've gone through all of the data, reset the state // so that the next time we're called back we'll do a new read. sts._offset += bytesToCopy; Debug.Assert(sts._offset <= sts._count, "Offset should never exceed count"); if (sts._offset == sts._count) { sts.SetTaskOffsetCount(null, 0, 0); } // Return the amount of data copied Debug.Assert(bytesToCopy > 0, "We should never return 0 bytes here."); return (ulong)bytesToCopy; } // sts was non-null but sts.Task was null, meaning there was no previous task/data // from which to satisfy any of this request. } else // sts == null { // Allocate a transfer state object to use for the remainder of this request. easy._sendTransferState = sts = new EasyRequest.SendTransferState(); } Debug.Assert(sts != null, "By this point we should have a transfer object"); Debug.Assert(sts._task == null, "There shouldn't be a task now."); Debug.Assert(sts._count == 0, "Count should be zero."); Debug.Assert(sts._offset == 0, "Offset should be zero."); // If we get here, there was no previously read data available to copy. // Initiate a new asynchronous read. Task<int> asyncRead = easy._requestContentStream.ReadAsyncInternal( sts._buffer, 0, Math.Min(sts._buffer.Length, length), easy._cancellationToken); Debug.Assert(asyncRead != null, "Badly implemented stream returned a null task from ReadAsync"); // Even though it's "Async", it's possible this read could complete synchronously or extremely quickly. // Check to see if it did, in which case we can also satisfy the libcurl request synchronously in this callback. if (asyncRead.IsCompleted) { multi.VerboseTrace("ReadAsync completed immediately", easy: easy); // Get the amount of data read. int bytesRead = asyncRead.GetAwaiter().GetResult(); // will throw if read failed if (bytesRead == 0) { multi.VerboseTrace("End of stream from quick returning ReadAsync", easy: easy); return 0; } // Copy as much as we can. int bytesToCopy = Math.Min(bytesRead, length); Debug.Assert(bytesToCopy > 0 && bytesToCopy <= sts._buffer.Length, "ReadAsync quickly returned an invalid result length: " + bytesToCopy); Marshal.Copy(sts._buffer, 0, buffer, bytesToCopy); multi.VerboseTrace("Copied " + bytesToCopy + " from quick returning ReadAsync", easy: easy); // If we read more than we were able to copy, stash it away for the next read. if (bytesToCopy < bytesRead) { multi.VerboseTrace("Stashing away " + (bytesRead - bytesToCopy) + " bytes for next read.", easy: easy); sts.SetTaskOffsetCount(asyncRead, bytesToCopy, bytesRead); } // Return the number of bytes read. return (ulong)bytesToCopy; } // Otherwise, the read completed asynchronously. Store the task, and hook up a continuation // such that the connection will be unpaused once the task completes. sts.SetTaskOffsetCount(asyncRead, 0, 0); asyncRead.ContinueWith((t, s) => { EasyRequest easyRef = (EasyRequest)s; easyRef._associatedMultiAgent.RequestUnpause(easyRef); }, easy, CancellationToken.None, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default); // Then pause the connection. multi.VerboseTrace("Pausing the connection", easy: easy); return Interop.Http.CURL_READFUNC_PAUSE; } private static CurlSeekResult CurlSeekCallback(IntPtr context, long offset, int origin) { CurlHandler.VerboseTrace("offset: " + offset + ", origin: " + origin); EasyRequest easy; if (TryGetEasyRequestFromContext(context, out easy)) { try { // If libcul is requesting we seek back to the beginning and if the request // content stream is in a position to reset itself, reset and let libcurl // know we did the seek; otherwise, let it know we can't seek. if (offset == 0 && origin == (int)SeekOrigin.Begin && easy._requestContentStream != null && easy._requestContentStream.TryReset()) { // Dump any state associated with the old stream's position if (easy._sendTransferState != null) { easy._sendTransferState.SetTaskOffsetCount(null, 0, 0); } // Restart the transfer easy._requestContentStream.Run(); return CurlSeekResult.CURL_SEEKFUNC_OK; } else { return CurlSeekResult.CURL_SEEKFUNC_CANTSEEK; } } catch (Exception ex) { easy.FailRequest(ex); // cleanup will be handled by main processing loop } } // Something went wrong return CurlSeekResult.CURL_SEEKFUNC_FAIL; } private static bool TryGetEasyRequestFromContext(IntPtr context, out EasyRequest easy) { // Get the EasyRequest from the context try { GCHandle handle = GCHandle.FromIntPtr(context); easy = (EasyRequest)handle.Target; Debug.Assert(easy != null, "Expected non-null EasyRequest in GCHandle"); return easy != null; } catch (InvalidCastException) { Debug.Fail("EasyRequest wasn't the GCHandle's Target"); } catch (InvalidOperationException) { Debug.Fail("Invalid GCHandle"); } easy = null; return false; } [Conditional(VerboseDebuggingConditional)] private void VerboseTrace(string text = null, [CallerMemberName] string memberName = null, EasyRequest easy = null) { CurlHandler.VerboseTrace(text, memberName, easy, agent: this); } [Conditional(VerboseDebuggingConditional)] private void VerboseTraceIf(bool condition, string text = null, [CallerMemberName] string memberName = null, EasyRequest easy = null) { if (condition) { CurlHandler.VerboseTrace(text, memberName, easy, agent: this); } } /// <summary>Represents an active request currently being processed by the agent.</summary> private struct ActiveRequest { public EasyRequest Easy; public CancellationTokenRegistration CancellationRegistration; } /// <summary>Represents an incoming request to be processed by the agent.</summary> internal struct IncomingRequest { public IncomingRequestType Type; public EasyRequest Easy; } /// <summary>The type of an incoming request to be processed by the agent.</summary> internal enum IncomingRequestType : byte { /// <summary>A new request that's never been submitted to an agent.</summary> New, /// <summary>A request to cancel a request previously submitted to the agent.</summary> Cancel, /// <summary>A request to unpause the connection associated with a request previously submitted to the agent.</summary> Unpause } } } }
using UnityEngine; using UnityEngine.UI; using System.Collections; using System.Collections.Generic; using System; namespace SimpleEasing { public static class ExtensionMethods{ #region Transform Easing /// <summary> /// Eases the transform position to the target position. If no start position value is provided, the current position /// will serve as the start value /// </summary> /// <returns>The to.</returns> /// <param name="trans">Trans.</param> /// <param name="start">Start.</param> /// <param name="target">Target.</param> /// <param name="length">Length.</param> /// <param name="easingChoice">Easing choice.</param> /// <param name="unscaled">If set to <c>true</c> unscaled.</param> /// <param name="repeat">Repeat.</param> /// <param name="onComplete">On complete.</param> public static Coroutine MoveTo(this Transform trans, Vector3 start, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return TweenManager.instance.PlayTween(new Tween((Vector3 pos)=>{ trans.position = pos; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine MoveTo(this Transform trans, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector3 start = trans.position; return SimpleEasing.ExtensionMethods.MoveTo(trans, start, target, length, easingChoice, unscaled, repeat, onComplete); } /// <summary> /// Eases the local transform position to the target local position. If no start position value is provided, the current position /// will serve as the start value /// </summary> /// <returns>The move to.</returns> /// <param name="trans">Trans.</param> /// <param name="start">Start.</param> /// <param name="target">Target.</param> /// <param name="length">Length.</param> /// <param name="easingChoice">Easing choice.</param> /// <param name="unscaled">If set to <c>true</c> unscaled.</param> /// <param name="repeat">Repeat.</param> /// <param name="onComplete">On complete.</param> public static Coroutine LocalMoveTo(this Transform trans, Vector3 start, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return TweenManager.instance.PlayTween(new Tween((Vector3 pos)=>{ trans.localPosition = pos; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine LocalMoveTo(this Transform trans, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector3 start = trans.localPosition; return SimpleEasing.ExtensionMethods.LocalMoveTo(trans, start, target, length, easingChoice, unscaled, repeat, onComplete); } /// <summary> /// Eases the transform scale to the target scale. If no start scale value is provided, the current transform /// scale will serve as the start value /// </summary> /// <returns>The to.</returns> /// <param name="trans">Trans.</param> /// <param name="start">Start.</param> /// <param name="target">Target.</param> /// <param name="length">Length.</param> /// <param name="easingChoice">Easing choice.</param> /// <param name="unscaled">If set to <c>true</c> unscaled.</param> /// <param name="repeat">Repeat.</param> /// <param name="onComplete">On complete.</param> public static Coroutine ScaleTo(this Transform trans, Vector3 start, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return TweenManager.instance.PlayTween(new Tween((Vector3 scale)=>{ trans.localScale = scale; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine ScaleTo(this Transform trans, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector3 start = trans.localScale; return ExtensionMethods.ScaleTo(trans, start, target, length, easingChoice, unscaled, repeat, onComplete); } public static Coroutine RotateTo(this Transform trans, Vector3 start, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return SimpleEasing.ExtensionMethods.RotateTo(trans, Quaternion.Euler(start), Quaternion.Euler(target), length, easingChoice, unscaled, repeat, onComplete); } public static Coroutine RotateTo(this Transform trans, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return SimpleEasing.ExtensionMethods.RotateTo(trans, Quaternion.Euler(target), length, easingChoice, unscaled, repeat, onComplete); } public static Coroutine RotateTo(this Transform trans, Quaternion target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Quaternion start = trans.localRotation; return ExtensionMethods.RotateTo(trans, start, target, length, easingChoice, unscaled, repeat, onComplete); } /// <summary> /// Eases the transform rotation to the target rotation. If no start rotation value is provided, the current transform /// rotation will serve as the start value /// </summary> /// <returns>The to.</returns> /// <param name="trans">Trans.</param> /// <param name="start">Start.</param> /// <param name="target">Target.</param> /// <param name="length">Length.</param> /// <param name="easingChoice">Easing choice.</param> /// <param name="unscaled">If set to <c>true</c> unscaled.</param> /// <param name="repeat">Repeat.</param> /// <param name="onComplete">On complete.</param> public static Coroutine RotateTo(this Transform trans, Quaternion start, Quaternion target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { return TweenManager.instance.PlayTween(new Tween((Quaternion rot)=>{ trans.localRotation = rot; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } #endregion public static Coroutine FadeTo(this CanvasGroup cgroup, float target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { float start = cgroup.alpha; return TweenManager.instance.PlayTween(new Tween((float newAlpha)=>{ cgroup.alpha = newAlpha; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine EaseFill(this Image image, float target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { float start = image.fillAmount; return TweenManager.instance.PlayTween(new Tween((float newFill)=>{ image.fillAmount = newFill; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine EaseLayoutMinValues(this LayoutElement layout, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = new Vector2(layout.minWidth, layout.minHeight); return TweenManager.instance.PlayTween(new Tween((Vector2 newDimensions)=>{ layout.minWidth = newDimensions.x; layout.minHeight = newDimensions.y; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine Jump(this ScrollRect scrollRect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = scrollRect.normalizedPosition; return TweenManager.instance.PlayTween(new Tween((Vector2 newPosition)=>{ scrollRect.normalizedPosition = newPosition; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine JumpVertical(this ScrollRect scrollRect, float target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { float start = scrollRect.verticalNormalizedPosition; return TweenManager.instance.PlayTween(new Tween((float newPosition)=>{ scrollRect.verticalNormalizedPosition = newPosition; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine JumpHorizontal(this ScrollRect scrollRect, float target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { float start = scrollRect.horizontalNormalizedPosition; return TweenManager.instance.PlayTween(new Tween((float newPosition)=>{ scrollRect.horizontalNormalizedPosition = newPosition; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine AnchoredPositionEase(this RectTransform rect, Vector3 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector3 start = rect.anchoredPosition3D; return TweenManager.instance.PlayTween(new Tween((Vector3 newPosition)=>{ rect.anchoredPosition3D = newPosition; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine PivotEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.pivot; return TweenManager.instance.PlayTween(new Tween((Vector2 newPivot)=>{ rect.pivot = newPivot; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine AnchorMaxEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.anchorMax; return TweenManager.instance.PlayTween(new Tween((Vector2 newMax)=>{ rect.anchorMax = newMax; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine AnchorMinEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.anchorMin; return TweenManager.instance.PlayTween(new Tween((Vector2 newMin)=>{ rect.anchorMin = newMin; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine OffsetMaxEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.offsetMax; return TweenManager.instance.PlayTween(new Tween((Vector2 newMax)=>{ rect.offsetMax = newMax; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine OffsetMinEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.offsetMin; return TweenManager.instance.PlayTween(new Tween((Vector2 newMin)=>{ rect.anchorMin = newMin; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine SizeDeltaEase(this RectTransform rect, Vector2 target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Vector2 start = rect.sizeDelta; return TweenManager.instance.PlayTween(new Tween((Vector2 newDelta)=>{ rect.sizeDelta = newDelta; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } public static Coroutine EaseMainColor(this Material material, Color target, float length, EasingTypes easingChoice, bool unscaled = false, TweenRepeat repeat = TweenRepeat.Once, Action onComplete = null) { Color start = material.color; return TweenManager.instance.PlayTween(new Tween((Color newColor)=>{ material.color = newColor; }, start, target, length, easingChoice, unscaled, repeat, onComplete)); } } }
// *********************************************************************** // Copyright (c) 2016 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Xml; namespace TestCentric.Gui.Presenters { using Model; using Views; public class TestPropertiesPresenter { private readonly ITestPropertiesView _view; private readonly ITestModel _model; private ITestItem _selectedItem; public TestPropertiesPresenter(ITestPropertiesView view, ITestModel model) { _view = view; _model = model; _view.Visible = false; WireUpEvents(); } private void WireUpEvents() { _model.Events.TestLoaded += (ea) => _view.Visible = true; _model.Events.TestReloaded += (ea) => _view.Visible = true; _model.Events.TestUnloaded += (ea) => _view.Visible = false; _model.Events.RunFinished += (ea) => DisplaySelectedItem(); _model.Events.SelectedItemChanged += (ea) => OnSelectedItemChanged(ea.TestItem); _view.DisplayHiddenPropertiesChanged += () => DisplaySelectedItem(); } private void OnSelectedItemChanged(ITestItem testItem) { _selectedItem = testItem; DisplaySelectedItem(); } private void DisplaySelectedItem() { TestNode testNode = _selectedItem as TestNode; ResultNode resultNode = null; // TODO: Insert checks for errors in the XML if (_selectedItem != null) { _view.Header = _selectedItem.Name; if (testNode != null) { _view.TestPanel.Visible = true; _view.SuspendLayout(); DisplayTestInfo(testNode); resultNode = _model.GetResultForTest(testNode.Id); if (resultNode != null) DisplayResultInfo(resultNode); _view.ResumeLayout(); } } _view.TestPanel.Visible = testNode != null; // HACK: results won't display on Linux otherwise if (Path.DirectorySeparatorChar == '/') // Running on Linux or Unix _view.ResultPanel.Visible = true; else _view.ResultPanel.Visible = resultNode != null; // TODO: We should actually try to set the font for bold items // dynamically, since the global application font may be changed. } private void DisplayTestInfo(TestNode testNode) { _view.TestType = GetTestType(testNode); _view.FullName = testNode.FullName; _view.Description = testNode.GetProperty("Description"); _view.Categories = testNode.GetPropertyList("Category"); _view.TestCount = testNode.TestCount.ToString(); _view.RunState = testNode.RunState.ToString(); _view.SkipReason = testNode.GetProperty("_SKIPREASON"); DisplayTestProperties(testNode); } private void DisplayTestProperties(TestNode testNode) { var sb = new StringBuilder(); foreach (string item in testNode.GetAllProperties(_view.DisplayHiddenProperties)) { if (sb.Length > 0) sb.Append(Environment.NewLine); sb.Append(item); } _view.Properties = sb.ToString(); } private void DisplayResultInfo(ResultNode resultNode) { _view.Outcome = resultNode.Outcome.ToString(); _view.ElapsedTime = resultNode.Duration.ToString("f3"); _view.AssertCount = resultNode.AssertCount.ToString(); DisplayAssertionResults(resultNode); DisplayOutput(resultNode); } private void DisplayAssertionResults(ResultNode resultNode) { StringBuilder sb; var assertionNodes = resultNode.Xml.SelectNodes("assertions/assertion"); var assertionResults = new List<AssertionResult>(); foreach (XmlNode assertion in assertionNodes) assertionResults.Add(new AssertionResult(assertion)); // If there were no actual assertionresult entries, we fake // one if there is a message to display if (assertionResults.Count == 0) { if (resultNode.Outcome.Status == TestStatus.Failed) { string status = resultNode.Outcome.Label ?? "Failed"; XmlNode failure = resultNode.Xml.SelectSingleNode("failure"); if (failure != null) assertionResults.Add(new AssertionResult(failure, status)); } else { string status = resultNode.Outcome.Label ?? "Skipped"; XmlNode reason = resultNode.Xml.SelectSingleNode("reason"); if (reason != null) assertionResults.Add(new AssertionResult(reason, status)); } } sb = new StringBuilder(); int index = 0; foreach (var assertion in assertionResults) { sb.AppendFormat("{0}) {1}\n", ++index, assertion.Status); sb.AppendLine(assertion.Message); if (assertion.StackTrace != null) sb.AppendLine(AdjustStackTrace(assertion.StackTrace)); } _view.Assertions = sb.ToString(); } // Some versions of the framework return the stacktrace // without leading spaces, so we add them if needed. // TODO: Make sure this is valid across various cultures. private const string LEADING_SPACES = " "; private static string AdjustStackTrace(string stackTrace) { // Check if no adjustment needed. We assume that all // lines start the same - either with or without spaces. if (stackTrace.StartsWith(LEADING_SPACES)) return stackTrace; var sr = new StringReader(stackTrace); var sb = new StringBuilder(); string line = sr.ReadLine(); while (line != null) { sb.Append(LEADING_SPACES); sb.AppendLine(line); line = sr.ReadLine(); } return sb.ToString(); } private void DisplayOutput(ResultNode resultNode) { var output = resultNode.Xml.SelectSingleNode("output"); _view.Output = output != null ? output.InnerText : ""; } public static string GetTestType(TestNode testNode) { if (testNode.RunState == RunState.NotRunnable && testNode.Type == "Assembly" && !String.IsNullOrEmpty(testNode.FullName)) { var fi = new FileInfo(testNode.FullName); string extension = fi.Extension.ToLower(); if (extension != ".exe" && extension != ".dll") return "Unknown"; } return testNode.Type; } #region Helper Methods // Sometimes, the message may have leading blank lines and/or // may be longer than Windows really wants to display. private string TrimMessage(string message) { if (message != null) { if (message.Length > 64000) message = message.Substring(0, 64000); int start = 0; for (int i = 0; i < message.Length; i++) { switch (message[i]) { case ' ': case '\t': break; case '\r': case '\n': start = i + 1; break; default: return start == 0 ? message : message.Substring(start); } } } return message; } #endregion #region Nested AssertionResult Class public struct AssertionResult { public AssertionResult(XmlNode assertion, string status) : this(assertion) { Status = status; } public AssertionResult(XmlNode assertion) { Status = assertion.GetAttribute("label") ?? assertion.GetAttribute("result"); Message = assertion.SelectSingleNode("message")?.InnerText; StackTrace = assertion.SelectSingleNode("stack-trace")?.InnerText; } public string Status { get; } public string Message { get; } public string StackTrace { get; } } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Security.Cryptography; using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Xtc.Common.Cli.Commands; using Microsoft.Xtc.Common.Services; using Microsoft.Xtc.Common.Services.Logging; using Microsoft.Xtc.TestCloud.ObjectModel; using Microsoft.Xtc.TestCloud.Services; using Microsoft.Xtc.TestCloud.Utilities; using Newtonsoft.Json; using LoggerExtensions = Microsoft.Xtc.Common.Services.Logging.LoggerExtensions; namespace Microsoft.Xtc.TestCloud.Commands { /// <summary> /// Generic command executor for currently supported XTC tests /// </summary> public class UploadXTCTestCommandExecutor : ICommandExecutor { private const string TestCloudEndpointEnvironmentVariable = "XTC_ENDPOINT"; public static readonly EventId PackagingFileEventId = 1; public static readonly EventId CheckHashResultEventId = 2; public static readonly EventId UploadTestsResultEventId = 3; public static readonly EventId CheckStatusResultEventId = 4; public static readonly EventId CustomEndpointEventId = 5; protected static readonly TimeSpan _defaultWaitTime = TimeSpan.FromSeconds(10); protected static readonly Uri _defaultTestCloudUri = new Uri("https://testcloud.xamarin.com/"); private readonly TestCloudProxy _testCloudProxy; private readonly LogsRecorder _logsRecorder; private readonly DSymDirectory _dSymDirectory; protected readonly IUploadTestsCommandOptions _options; private ILogger _logger; private IWorkspace _workspace; private string _testName; protected ILogger Logger { get { return _logger; } set { _logger = value; } } protected IWorkspace Workspace { get { return _workspace; } set { _workspace = value; } } protected string TestName { get { return _testName; } set { _testName = value; } } public UploadXTCTestCommandExecutor(IUploadTestsCommandOptions options, ILoggerService loggerService, LogsRecorder logsRecorder) { if (options == null) throw new ArgumentNullException(nameof(options)); if (loggerService == null) throw new ArgumentNullException(nameof(loggerService)); _options = options; _logsRecorder = logsRecorder; _logger = loggerService.CreateLogger<UploadXTCTestCommandExecutor>(); var testCloudUri = GetTestCloudUri(); _testCloudProxy = new TestCloudProxy(testCloudUri, loggerService); _dSymDirectory = options.DSymDirectory != null ? new DSymDirectory(options.DSymDirectory) : null; } protected virtual void ValidateOptions() { _options.Validate(); if (ValidationHelper.IsAndroidApp(_options.AppFile)) { if (ValidationHelper.UsesSharedRuntime(_options.AppFile)) { throw new CommandException( UploadTestsCommand.CommandName, @"Xamarin Test Cloud doesn't yet support shared runtime apps. To test your app it needs to be compiled for release. You can learn how to compile you app for release here: http://docs.xamarin.com/guides/android/deployment%2C_testing%2C_and_metrics/publishing_an_application/part_1_-_preparing_an_application_for_release", (int) UploadCommandExitCodes.InvalidAppFile); } } else if (!ValidationHelper.IsIosApp(_options.AppFile)) { throw new CommandException( UploadTestsCommand.CommandName, @"Provided file with application must be either Android or iOS application", (int) UploadCommandExitCodes.InvalidAppFile); } _workspace.Validate(); _dSymDirectory?.Validate(); } private Uri GetTestCloudUri() { var customEndpoint = Environment.GetEnvironmentVariable(TestCloudEndpointEnvironmentVariable); if (string.IsNullOrEmpty(customEndpoint)) { return _defaultTestCloudUri; } _logger.LogDebug( CustomEndpointEventId, $"Environment variable {TestCloudEndpointEnvironmentVariable} was set. " + $"Using custom Test Cloud endpoint URI: {customEndpoint}."); try { return new Uri(customEndpoint); } catch (UriFormatException) { throw new CommandException( UploadTestsCommand.CommandName, $"Invalid custom Test Cloud endpoint URI: {customEndpoint}", (int)UploadCommandExitCodes.InvalidTestCloudEndpoint); } } public async Task ExecuteAsync() { ValidateOptions(); await CheckVersionAsync(); using (var sha256 = SHA256.Create()) { var allFilesToUpload = GetAllFilesToUpload(sha256); var appFile = new UploadFileInfo(_options.AppFile, _options.AppFile, sha256.GetFileHash(_options.AppFile)); var dSymFile = _dSymDirectory != null ? new UploadFileInfo( _dSymDirectory.GetDSymFile(), _dSymDirectory.GetDSymFile(), sha256.GetFileHash(_dSymDirectory.GetDSymFile())) : null; var checkHashesResult = await CheckFileHashesAsync(appFile, dSymFile, allFilesToUpload); var uploadResult = await UploadTestsToTestCloud(checkHashesResult.AppFile, checkHashesResult.DSymFile, checkHashesResult.UploadFiles); if (!(_options.Async || _options.AsyncJson)) { var exitCode = await WaitForJob(uploadResult); Environment.Exit(exitCode); } else if (_options.AsyncJson) { WriteAsyncJsonResultToConsole(uploadResult.JobId); } } } private async Task CheckVersionAsync() { using (_logger.BeginScope("Checking version")) { try { var request = new CheckVersionRequest(_options.ToArgumentsArray()); var result = await _testCloudProxy.CheckVersionAsync(request); if (result.ErrorMessage != null) { throw new CommandException( UploadTestsCommand.CommandName, result.ErrorMessage, (int)UploadCommandExitCodes.ServerError); } } catch (Exception ex) { throw TranslateException(ex); } } } private IList<UploadFileInfo> GetAllFilesToUpload(HashAlgorithm hashAlgorithm) { using (_logger.BeginScope("Packaging")) { var result = _workspace.GetFilesToUpload(hashAlgorithm); foreach (var file in result) { _logger.LogDebug(PackagingFileEventId, $"Packaging file {file.RelativePath}"); } return result; } } private async Task<CheckHashesResult> CheckFileHashesAsync( UploadFileInfo appFile, UploadFileInfo dSymFile, IList<UploadFileInfo> allFilesToUpload) { using (_logger.BeginScope("Negotiating upload")) { try { var request = new CheckFileHashesRequest(appFile, dSymFile, allFilesToUpload); var result = await _testCloudProxy.CheckFileHashesAsync(request); LogCheckHashesResponse(result); return result; } catch (Exception ex) { throw TranslateException(ex); } } } private async Task<UploadTestsResult> UploadTestsToTestCloud( UploadFileInfo appFile, UploadFileInfo dSymFile, IList<UploadFileInfo> otherFiles) { using (_logger.BeginScope("Uploading negotiated files")) { try { var request = new UploadTestsRequest(appFile, dSymFile, otherFiles); request.TestCloudOptions["user"] = _options.User; request.TestCloudOptions["device_selection"] = _options.Devices; request.TestCloudOptions["locale"] = _options.Locale; request.TestCloudOptions[_testName] = "true"; request.TestCloudOptions["series"] = _options.Series; request.TestCloudOptions["api_key"] = _options.ApiKey; if (_options.AppName != null) { request.TestCloudOptions["app"] = _options.AppName; } if (_dSymDirectory != null) { request.TestCloudOptions["crash_reporting"] = "true"; } foreach (var testParameter in _options.TestParameters) { request.TestParameters[testParameter.Key] = testParameter.Value; } var result = await _testCloudProxy.UploadTestsAsync(request); LogUploadTestsResponse(result); return result; } catch (Exception ex) { throw TranslateException(ex); } } } private async Task<int> WaitForJob(UploadTestsResult uploadTestsResult) { using (_logger.BeginScope("Waiting for test results")) { try { var checkStatusRequest = new CheckStatusRequest(uploadTestsResult.JobId) { ApiKey = _options.ApiKey, User = _options.User }; while (true) { var checkStatusResult = await _testCloudProxy.CheckStatusAsync(checkStatusRequest); LogCheckStatusResponse(checkStatusResult); if (checkStatusResult.ExitCode != null) { return checkStatusResult.ExitCode.Value; } else { var waitTime = checkStatusResult.WaitTime != null ? TimeSpan.FromSeconds(checkStatusResult.WaitTime.Value) : _defaultWaitTime; await Task.Delay(waitTime); } } } catch (Exception ex) { throw TranslateException(ex); } } } private void LogCheckHashesResponse(CheckHashesResult response) { foreach (var result in response.UploadFiles.OrderBy(fileInfo => fileInfo.FullPath)) { var relativePath = FileHelper.GetRelativePath(result.FullPath, _workspace.WorkspacePath(), new PlatformService()); _logger.LogDebug( CheckHashResultEventId, $"File {relativePath} was " + (result.WasAlreadyUploaded ? "already uploaded." : "not uploaded.")); } } private void LogUploadTestsResponse(UploadTestsResult response) { var logLines = new List<string> { "Tests enqueued", $"User: {response.UserEmail}", $"Test Type: {response.TestType}" }; if (response.Team != null) { logLines.Add($"Team: {response.Team}"); } if (response.RejectedDevices != null && response.RejectedDevices.Count > 0) { logLines.Add($"Skipping devices (you can update your selections via https://testcloud.xamarin.com):"); logLines.Add(GetDevicesListLog(response.RejectedDevices)); } logLines.Add($"Running on devices:"); logLines.Add(GetDevicesListLog(response.AcceptedDevices)); _logger.LogInformation(UploadTestsResultEventId, logLines); } private void LogCheckStatusResponse(CheckStatusResult response) { LoggerExtensions.LogInformation(_logger, CheckStatusResultEventId, response.Messages.Select(m => $"{DateTimeOffset.UtcNow.ToString("s")} {m}")); } private void WriteAsyncJsonResultToConsole(string jobId) { var asyncJsonResult = new AsyncJsonResult() { TestRunId = jobId }; var allLogs = _logsRecorder.GetRecordedLogs(); asyncJsonResult.Logs = allLogs .Where(log => log.LogLevel <= LogLevel.Warning) .Select(log => _options.Debug ? log.ToDiagnosticString() : log.ToString()) .ToList(); asyncJsonResult.Errors = allLogs .Where(log => log.LogLevel > LogLevel.Warning) .Select(log => _options.Debug ? log.ToDiagnosticString() : log.ToString()) .ToList(); Console.WriteLine(JsonConvert.SerializeObject(asyncJsonResult)); } private string GetDevicesListLog(IEnumerable<string> devices) { return devices.Aggregate(new StringBuilder(), (sb, d) => sb.AppendLine($" {d}"), sb => sb.ToString()); } private Exception TranslateException(Exception ex) { var exitCode = ex is HttpRequestException ? UploadCommandExitCodes.ServerError : UploadCommandExitCodes.UnknownError; return new CommandException(UploadTestsCommand.CommandName, ex.Message, ex, (int)exitCode); } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.ComponentModel; using System.Diagnostics; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; namespace DotSpatial.Data.Forms { /// <summary> /// ScrollingControl that provides autoscroll and custom draw that won't crash mono. /// </summary> [ToolboxItem(false)] public class ScrollingControl : Control { #region Fields private readonly Brush _controlBrush; private Brush _backcolorBrush; private Brush _backImageBrush; private Rectangle _controlRectangle; private Rectangle _documentRectangle; private bool _firstDrawing; private Size _pageSize; private Label _lblCorner; private HScrollBar _scrHorizontal; private VScrollBar _scrVertical; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="ScrollingControl"/> class. /// </summary> public ScrollingControl() { InitializeComponent(); _backcolorBrush = new SolidBrush(base.BackColor); _controlBrush = new SolidBrush(SystemColors.Control); if (base.BackgroundImage != null) { _backImageBrush = new TextureBrush(base.BackgroundImage); } MinimumSize = new Size(5, 5); } #endregion #region Events /// <summary> /// Occurs after the base drawing content has been rendered to the page. /// </summary> public event EventHandler<PaintEventArgs> Initialized; #endregion #region Properties /// <summary> /// Gets or sets the background color to use for this control /// </summary> public override Color BackColor { get { return base.BackColor; } set { _backcolorBrush?.Dispose(); _backcolorBrush = new SolidBrush(value); base.BackColor = value; } } /// <summary> /// Gets or sets the background image for this control. /// </summary> public override Image BackgroundImage { get { return base.BackgroundImage; } set { base.BackgroundImage = value; _backImageBrush?.Dispose(); if (value == null) return; _backImageBrush = new TextureBrush(BackgroundImage); Size s = _pageSize; if (s.Width < BackgroundImage.Width) s.Width = BackgroundImage.Width; if (s.Height < BackgroundImage.Height) s.Height = BackgroundImage.Height; _pageSize = s; } } /// <summary> /// Gets or sets the rectangular region of the control in page coordinates. /// </summary> public Rectangle ControlRectangle { get { return _controlRectangle; } set { _controlRectangle = value; } } /// <summary> /// Gets or sets the rectangle for the entire content, whether on the page buffer or not. X and Y for this are always 0. /// </summary> public virtual Rectangle DocumentRectangle { get { return _documentRectangle; } set { _documentRectangle = value; } } /// <summary> /// Gets or sets a value indicating whether or not horizontal scrolling is enabled /// </summary> public bool HorizontalScrollEnabled { get { return _scrHorizontal.Enabled; } set { _scrHorizontal.Enabled = value; } } /// <summary> /// Gets or sets a value indicating whether or not the page for this control has been drawn. /// </summary> public bool IsInitialized { get; set; } /// <summary> /// Gets or sets the page image being used as a buffer. This is useful /// for content changes that need to be made rapidly. First refresh /// a small region of this page, and then invalidate the client rectangle. /// </summary> [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public Bitmap Page { get; set; } /// <summary> /// Gets or sets a value indicating whether or not the scrolling should be reset on every resize or not. /// </summary> public bool ResetOnResize { get; set; } /// <summary> /// Gets or sets a value indicating whether the vertical scroll should be permitted. /// </summary> public bool VerticalScrollEnabled { get { return _scrVertical.Enabled; } set { _scrVertical.Enabled = value; } } #endregion #region Methods /// <summary> /// Gets a rectangle in document coordinates for the specified rectangle in client coordinates. /// </summary> /// <param name="rect">Rectangle in client coordinates.</param> /// <returns>Rectangle in document coordinates.</returns> public Rectangle ClientToDocument(Rectangle rect) { return new Rectangle(rect.X + _controlRectangle.X, rect.Y + _controlRectangle.Y, rect.Width, rect.Height); } /// <summary> /// Translates a rectangle from document coordinates to coordinates relative to the client control. /// </summary> /// <param name="rect">Rectangle in document coordinates.</param> /// <returns>Rectangle in client coordinates.</returns> public Rectangle DocumentToClient(Rectangle rect) { return new Rectangle(rect.X - _controlRectangle.X, rect.Y - _controlRectangle.Y, rect.Width, rect.Height); } /// <summary> /// Recalculates the size and visibility of the scroll bars based on the current document. /// </summary> public void ResetScroll() { _controlRectangle.Width = ClientRectangle.Width; _controlRectangle.Height = ClientRectangle.Height; int dw = _documentRectangle.Width; int dh = _documentRectangle.Height; int cw = Width; int ch = Height; if (dw == 0 || dh == 0) return; // prevent divide by 0 if (cw == 0 || ch == 0) return; _scrHorizontal.LargeChange = (cw * cw) / dw; _scrVertical.LargeChange = (ch * ch) / dh; _scrHorizontal.Maximum = dw; _scrVertical.Maximum = dh; if (dw <= cw) { _controlRectangle.X = 0; _scrHorizontal.Value = _scrHorizontal.Minimum; _scrHorizontal.Visible = false; } else { if (_scrHorizontal.Enabled) _scrHorizontal.Visible = true; } if (dh <= ch) { _controlRectangle.Y = 0; _scrVertical.Value = _scrVertical.Minimum; _scrVertical.Visible = false; } else { if (_scrVertical.Enabled) _scrVertical.Visible = true; } _lblCorner.Visible = _scrVertical.Visible || _scrHorizontal.Visible; } /// <summary> /// Disposes the unmanaged memory objects and optionally disposes the managed memory objects. /// </summary> /// <param name="disposing">Indicates whether managed objects should be disposed.</param> protected override void Dispose(bool disposing) { if (disposing) { _backcolorBrush?.Dispose(); _controlBrush?.Dispose(); _backImageBrush?.Dispose(); Page?.Dispose(); } base.Dispose(disposing); } /// <summary> /// Occurs during custom drawing. /// </summary> /// <param name="e">The paint event args.</param> protected virtual void OnDraw(PaintEventArgs e) { if (_firstDrawing == false) { ResetScroll(); _firstDrawing = true; } e.Graphics.FillRectangle(_backcolorBrush, e.ClipRectangle); // in client coordinates, the clip-rectangle is the area to clear e.Graphics.DrawImage(Page, e.ClipRectangle, e.ClipRectangle, GraphicsUnit.Pixel); } /// <summary> /// Occurs during custom drawing when erasing things. /// </summary> /// <param name="e">The paint event args.</param> protected virtual void OnDrawBackground(PaintEventArgs e) { } /// <summary> /// Occurs when scrolling horizontally. /// </summary> /// <param name="sender">The sender that raised the event.</param> /// <param name="e">The event args.</param> protected virtual void OnHorizontalScroll(object sender, ScrollEventArgs e) { _controlRectangle.Y = _scrVertical.Value; Initialize(); Invalidate(); } /// <summary> /// Fires the Initialized event. /// </summary> /// <param name="e">The paint event args.</param> protected virtual void OnInitialize(PaintEventArgs e) { Initialized?.Invoke(this, e); } /// <summary> /// On Paint only paints the specified clip rectangle, but paints it from the page buffer. /// </summary> /// <param name="e">The paint event args.</param> protected override void OnPaint(PaintEventArgs e) { // CGX TRY CATCH try { Rectangle clip = e.ClipRectangle; if (clip.IsEmpty) clip = ClientRectangle; if (IsInitialized == false || Page == null) { Initialize(); // redraw the entire page buffer if necessary } using (var buffer = new Bitmap(clip.Width, clip.Height)) using (var g = Graphics.FromImage(buffer)) using (var mat = new Matrix()) { mat.Translate(-clip.X, -clip.Y); // draw in "client" coordinates g.Transform = mat; OnDraw(new PaintEventArgs(g, clip)); // draw content to the small temporary buffer. e.Graphics.DrawImage(buffer, clip); // draw from our small, temporary buffer to the screen } } catch (Exception ex) { Debug.WriteLine(ex.Message); } } /// <summary> /// Prevent flicker by preventing this. /// </summary> /// <param name="e">The paint event args.</param> protected override void OnPaintBackground(PaintEventArgs e) { // Do Nothing } /// <summary> /// Occurens when resizing. /// </summary> /// <param name="e">The event args.</param> protected override void OnResize(EventArgs e) { ResetScroll(); base.OnResize(e); } /// <summary> /// Occurs when scrolling vertically. /// </summary> /// <param name="sender">The sender that raised the event.</param> /// <param name="e">The event args.</param> protected virtual void OnVerticalScroll(object sender, ScrollEventArgs e) { _controlRectangle.X = _scrHorizontal.Value; Initialize(); Invalidate(); } // Redraws the entire contents of the control, even if the clip rectangle is smaller. private void Initialize() { if (_documentRectangle.IsEmpty) { _documentRectangle = ClientRectangle; } if (_controlRectangle.IsEmpty) { _controlRectangle = ClientRectangle; } else { _controlRectangle.Width = ClientRectangle.Width; _controlRectangle.Height = ClientRectangle.Height; } Page = new Bitmap(Width, Height); Graphics g = Graphics.FromImage(Page); g.Clear(BackColor); if (BackgroundImage != null) { if (BackgroundImageLayout == ImageLayout.None) { g.DrawImage(BackgroundImage, ClientRectangle, _controlRectangle, GraphicsUnit.Pixel); } if (BackgroundImageLayout == ImageLayout.Center) { int x = (Width - BackgroundImage.Width) / 2; int y = (Height - BackgroundImage.Height) / 2; g.DrawImage(BackgroundImage, new Point(x, y)); } if (BackgroundImageLayout == ImageLayout.Stretch || BackgroundImageLayout == ImageLayout.Zoom) { g.DrawImage(BackgroundImage, ClientRectangle); } if (BackgroundImageLayout == ImageLayout.Tile) { g.FillRectangle(_backImageBrush, ClientRectangle); } } Matrix mat = g.Transform; Matrix oldMat = g.Transform; mat.Translate(-_controlRectangle.X, -_controlRectangle.Y); g.Transform = mat; OnInitialize(new PaintEventArgs(g, ClientRectangle)); g.Transform = oldMat; g.Dispose(); } private void InitializeComponent() { _scrVertical = new VScrollBar(); _scrHorizontal = new HScrollBar(); _lblCorner = new Label(); SuspendLayout(); // scrVertical _scrVertical.Anchor = (AnchorStyles.Top | AnchorStyles.Bottom) | AnchorStyles.Right; _scrVertical.Location = new Point(170, 0); _scrVertical.Name = "_scrVertical"; _scrVertical.Size = new Size(17, 411); _scrVertical.TabIndex = 0; _scrVertical.Scroll += ScrVerticalScroll; // scrHorizontal _scrHorizontal.Anchor = (AnchorStyles.Bottom | AnchorStyles.Left) | AnchorStyles.Right; _scrHorizontal.Location = new Point(0, 411); _scrHorizontal.Name = "_scrHorizontal"; _scrHorizontal.Size = new Size(169, 17); _scrHorizontal.TabIndex = 1; _scrHorizontal.Scroll += ScrHorizontalScroll; // lblCorner _lblCorner.Anchor = AnchorStyles.Bottom | AnchorStyles.Right; _lblCorner.Location = new Point(169, 411); _lblCorner.AutoSize = false; _lblCorner.Text = null; _lblCorner.Size = new Size(18, 17); _lblCorner.BackColor = SystemColors.Control; // ScrollingControl Controls.Add(_scrHorizontal); Controls.Add(_scrVertical); Controls.Add(_lblCorner); Name = "ScrollingControl"; Size = new Size(187, 428); ResumeLayout(false); } private void ScrHorizontalScroll(object sender, ScrollEventArgs e) { OnVerticalScroll(sender, e); } private void ScrVerticalScroll(object sender, ScrollEventArgs e) { OnHorizontalScroll(sender, e); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Test.Utilities; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests { public partial class IOperationTests : SemanticModelTestBase { [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_DynamicArgument() { string source = @" class C { public C(int i) { } void M(dynamic d) { var x = /*<bind>*/new C(d)/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(d)') Arguments(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(0) ArgumentRefKinds(0) Initializer: null "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_MultipleApplicableSymbols() { string source = @" class C { public C(int i) { } public C(long i) { } void M(dynamic d) { var x = /*<bind>*/new C(d)/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(d)') Arguments(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(0) ArgumentRefKinds(0) Initializer: null "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_MultipleArgumentsAndApplicableSymbols() { string source = @" class C { public C(int i, char c) { } public C(long i, char c) { } void M(dynamic d) { char c = 'c'; var x = /*<bind>*/new C(d, c)/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(d, c)') Arguments(2): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ILocalReferenceOperation: c (OperationKind.LocalReference, Type: System.Char) (Syntax: 'c') ArgumentNames(0) ArgumentRefKinds(0) Initializer: null "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_ArgumentNames() { string source = @" class C { public C(int i, char c) { } public C(long i, char c) { } void M(dynamic d, dynamic e) { var x = /*<bind>*/new C(i: d, c: e)/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(i: d, c: e)') Arguments(2): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') IParameterReferenceOperation: e (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'e') ArgumentNames(2): ""i"" ""c"" ArgumentRefKinds(0) Initializer: null "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_ArgumentRefKinds() { string source = @" class C { public C(ref object i, out int j, char c) { j = 0; } void M(object d, dynamic e) { int k; var x = /*<bind>*/new C(ref d, out k, e)/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(ref d, out k, e)') Arguments(3): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: System.Object) (Syntax: 'd') ILocalReferenceOperation: k (OperationKind.LocalReference, Type: System.Int32) (Syntax: 'k') IParameterReferenceOperation: e (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'e') ArgumentNames(0) ArgumentRefKinds(3): Ref Out None Initializer: null "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_Initializer() { string source = @" class C { public int X; public C(char c) { } void M(dynamic d) { var x = /*<bind>*/new C(d) { X = 0 }/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(d) { X = 0 }') Arguments(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(0) ArgumentRefKinds(0) Initializer: IObjectOrCollectionInitializerOperation (OperationKind.ObjectOrCollectionInitializer, Type: C) (Syntax: '{ X = 0 }') Initializers(1): ISimpleAssignmentOperation (OperationKind.SimpleAssignment, Type: System.Int32) (Syntax: 'X = 0') Left: IFieldReferenceOperation: System.Int32 C.X (OperationKind.FieldReference, Type: System.Int32) (Syntax: 'X') Instance Receiver: IInstanceReferenceOperation (OperationKind.InstanceReference, Type: C, IsImplicit) (Syntax: 'X') Right: ILiteralOperation (OperationKind.Literal, Type: System.Int32, Constant: 0) (Syntax: '0') "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_AllFields() { string source = @" class C { public int X; public C(ref int i, char c) { } public C(ref int i, long c) { } void M(dynamic d) { int i = 0; var x = /*<bind>*/new C(ref i, c: d) { X = 0 }/*</bind>*/; } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C) (Syntax: 'new C(ref i ... ) { X = 0 }') Arguments(2): ILocalReferenceOperation: i (OperationKind.LocalReference, Type: System.Int32) (Syntax: 'i') IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') ArgumentNames(2): ""null"" ""c"" ArgumentRefKinds(2): Ref None Initializer: IObjectOrCollectionInitializerOperation (OperationKind.ObjectOrCollectionInitializer, Type: C) (Syntax: '{ X = 0 }') Initializers(1): ISimpleAssignmentOperation (OperationKind.SimpleAssignment, Type: System.Int32) (Syntax: 'X = 0') Left: IFieldReferenceOperation: System.Int32 C.X (OperationKind.FieldReference, Type: System.Int32) (Syntax: 'X') Instance Receiver: IInstanceReferenceOperation (OperationKind.InstanceReference, Type: C, IsImplicit) (Syntax: 'X') Right: ILiteralOperation (OperationKind.Literal, Type: System.Int32, Constant: 0) (Syntax: '0') "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_ErrorBadDynamicMethodArgLambda() { string source = @" using System; class C { static void Main() { dynamic y = null; /*<bind>*/new C(delegate { }, y)/*</bind>*/; } public C(Action a, Action y) { } } "; string expectedOperationTree = @" IDynamicObjectCreationOperation (OperationKind.DynamicObjectCreation, Type: C, IsInvalid) (Syntax: 'new C(delegate { }, y)') Arguments(2): IAnonymousFunctionOperation (Symbol: lambda expression) (OperationKind.AnonymousFunction, Type: null, IsInvalid) (Syntax: 'delegate { }') IBlockOperation (1 statements) (OperationKind.Block, Type: null, IsInvalid) (Syntax: '{ }') IReturnOperation (OperationKind.Return, Type: null, IsInvalid, IsImplicit) (Syntax: '{ }') ReturnedValue: null ILocalReferenceOperation: y (OperationKind.LocalReference, Type: dynamic) (Syntax: 'y') ArgumentNames(0) ArgumentRefKinds(0) Initializer: null "; var expectedDiagnostics = new DiagnosticDescription[] { // CS1977: Cannot use a lambda expression as an argument to a dynamically dispatched operation without first casting it to a delegate or expression tree type. // /*<bind>*/new C(delegate { }, y)/*</bind>*/; Diagnostic(ErrorCode.ERR_BadDynamicMethodArgLambda, "delegate { }").WithLocation(9, 25) }; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void DynamicObjectCreation_OVerloadResolutionFailure() { string source = @" class C { public C() { } public C(int i, int j) { } void M(dynamic d) { var x = /*<bind>*/new C(d)/*</bind>*/; } } "; string expectedOperationTree = @" IInvalidOperation (OperationKind.Invalid, Type: C, IsInvalid) (Syntax: 'new C(d)') Children(1): IParameterReferenceOperation: d (OperationKind.ParameterReference, Type: dynamic) (Syntax: 'd') "; var expectedDiagnostics = new DiagnosticDescription[] { // CS7036: There is no argument given that corresponds to the required formal parameter 'j' of 'C.C(int, int)' // var x = /*<bind>*/new C(d)/*</bind>*/; Diagnostic(ErrorCode.ERR_NoCorrespondingArgument, "C").WithArguments("j", "C.C(int, int)").WithLocation(14, 31) }; VerifyOperationTreeAndDiagnosticsForTest<ObjectCreationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } } }
using System; using MongoDB.Bson.Serialization.Attributes; using Microsoft.AspNet.Identity; using Common.Data; using Newtonsoft.Json; using System.Security.Principal; using System.Security.Claims; using System.Collections.Generic; using System.Linq; namespace KT.Data.Models { public class KTClaimTypes { public const string FirstName = "KT:FirstName"; public const string LastName = "KT:LastName"; public const string Roles = "KT:Roles"; public const string OrganismRole = "KT:OrganismRole"; } public class UserIdentity : IUser<string>, IModel<string> { public UserIdentity() { Id = Guid.NewGuid().ToString(); } public UserIdentity(IPrincipal principal) { Id = Guid.NewGuid().ToString(); if (principal != null) LoadPrincipal(principal); } public UserIdentity(ClaimsIdentity claimsIdentity) { Id = Guid.NewGuid().ToString(); if (claimsIdentity != null && claimsIdentity.Claims != null) LoadClaims(claimsIdentity.Claims); } // Interface IUser private string _id; [BsonId] public string Id { get { return _id; } set { _id = value; SetClaim(ClaimTypes.NameIdentifier, value); } } private string _userName = string.Empty; public string UserName { get { return _userName; } set { _userName = value.ToLower(); SetClaim(ClaimTypes.Name, _userName); } } private string _email = string.Empty; public string Email { get { return _email; } set { _email = value.ToLower(); SetClaim(ClaimTypes.Email, _email); } } // KT specific data [JsonIgnore] public string PasswordHash { get; set; } [JsonIgnore] public string SecurityStamp { get; set; } private string _firstName = string.Empty; public string FirstName { get { return _firstName; } set { _firstName = (value != null ? value : string.Empty); SetClaim(KTClaimTypes.FirstName, _firstName); } } private string _lastName = string.Empty; public string LastName { get { return _lastName; } set { _lastName = (value != null ? value : string.Empty); SetClaim(KTClaimTypes.LastName, _lastName); } } public bool IsArchived { get; set; } [BsonIgnore] [JsonIgnore] public string Name { get { return $"{FirstName} {LastName}"; } } private HashSet<string> _roles = new HashSet<string>(); public IEnumerable<string> Roles { get { return _roles; } set { if (value != null) _roles = new HashSet<string>(value); else _roles = new HashSet<string>(); string strPerm = string.Join(",", _roles.AsEnumerable()); SetClaim(KTClaimTypes.Roles, strPerm); } } private string _organismRole; public string OrganismRole { get { return _organismRole; } set { _organismRole = (value != null ? value : string.Empty); ; SetClaim(KTClaimTypes.OrganismRole, _organismRole); } } public bool HasPermission(string permissionName) { return _roles.Contains(permissionName); } public bool HasAnyPermission(IEnumerable<string> permissions) { return permissions.Any(p => _roles.Contains(p)); } private List<Claim> _claims = new List<Claim>(); [BsonIgnore] [JsonIgnore] public IEnumerable<Claim> Claims { get { return _claims; } } public DateTime CreatedOn { get; set; } public DateTime ModifiedOn { get; set; } private void LoadPrincipal(IPrincipal principal) { if (principal != null && principal.Identity != null) { ClaimsIdentity claimsIdentity = principal.Identity as ClaimsIdentity; if (claimsIdentity.Claims != null) LoadClaims(claimsIdentity.Claims); } } private void LoadClaims(IEnumerable<Claim> claims) { if (claims != null) { foreach (var claim in claims) { switch (claim.Type) { case KTClaimTypes.FirstName: this.FirstName = claim.Value; break; case KTClaimTypes.LastName: this.LastName = claim.Value; break; case KTClaimTypes.OrganismRole: this.OrganismRole = claim.Value; break; case ClaimTypes.NameIdentifier: this.Id = claim.Value; break; case ClaimTypes.Name: this.UserName = claim.Value; break; case KTClaimTypes.Roles: string[] permissions = claim.Value.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); _roles = new HashSet<string>(permissions); break; default: SetClaim(claim.Type, claim.Value); break; } } } } private void SetClaim(string type, object value) { _claims.RemoveAll(c => c.Type == type); string val = value != null ? value.ToString() : string.Empty; _claims.Add(new Claim(type, val)); } } }
using System; using System.Collections.Generic; using Abp.Application.Services; using Abp.AspNetCore.Configuration; using Abp.Extensions; using Castle.Windsor.MsDependencyInjection; using Abp.Reflection; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.ApplicationModels; using Microsoft.Extensions.DependencyInjection; using System.Linq; using System.Reflection; using Abp.Collections.Extensions; using Abp.Web.Api.ProxyScripting.Generators; using JetBrains.Annotations; using Microsoft.AspNetCore.Mvc.ModelBinding; using Microsoft.AspNetCore.Mvc.ActionConstraints; namespace Abp.AspNetCore.Mvc.Conventions { public class AbpAppServiceConvention : IApplicationModelConvention { private readonly Lazy<AbpAspNetCoreConfiguration> _configuration; public AbpAppServiceConvention(IServiceCollection services) { _configuration = new Lazy<AbpAspNetCoreConfiguration>(() => { return services .GetSingletonService<AbpBootstrapper>() .IocManager .Resolve<AbpAspNetCoreConfiguration>(); }, true); } public void Apply(ApplicationModel application) { foreach (var controller in application.Controllers) { var type = controller.ControllerType.AsType(); var configuration = GetControllerSettingOrNull(type); if (typeof(IApplicationService).GetTypeInfo().IsAssignableFrom(type)) { controller.ControllerName = controller.ControllerName.RemovePostFix(ApplicationService.CommonPostfixes); configuration?.ControllerModelConfigurer(controller); ConfigureArea(controller, configuration); ConfigureRemoteService(controller, configuration); } else { var remoteServiceAtt = ReflectionHelper.GetSingleAttributeOrDefault<RemoteServiceAttribute>(type.GetTypeInfo()); if (remoteServiceAtt != null && remoteServiceAtt.IsEnabledFor(type)) { ConfigureRemoteService(controller, configuration); } } } } private void ConfigureArea(ControllerModel controller, [CanBeNull] AbpControllerAssemblySetting configuration) { if (configuration == null) { return; } if (controller.RouteValues.ContainsKey("area")) { return; } controller.RouteValues["area"] = configuration.ModuleName; } private void ConfigureRemoteService(ControllerModel controller, [CanBeNull] AbpControllerAssemblySetting configuration) { ConfigureApiExplorer(controller); ConfigureSelector(controller, configuration); ConfigureParameters(controller); } private void ConfigureParameters(ControllerModel controller) { foreach (var action in controller.Actions) { foreach (var prm in action.Parameters) { if (prm.BindingInfo != null) { continue; } if (!TypeHelper.IsPrimitiveExtendedIncludingNullable(prm.ParameterInfo.ParameterType)) { if (CanUseFormBodyBinding(action, prm)) { prm.BindingInfo = BindingInfo.GetBindingInfo(new[] { new FromBodyAttribute() }); } } } } } private bool CanUseFormBodyBinding(ActionModel action, ParameterModel parameter) { if (_configuration.Value.FormBodyBindingIgnoredTypes.Any(t => t.IsAssignableFrom(parameter.ParameterInfo.ParameterType))) { return false; } foreach (var selector in action.Selectors) { if (selector.ActionConstraints == null) { continue; } foreach (var actionConstraint in selector.ActionConstraints) { var httpMethodActionConstraint = actionConstraint as HttpMethodActionConstraint; if (httpMethodActionConstraint == null) { continue; } if (httpMethodActionConstraint.HttpMethods.All(hm => hm.IsIn("GET", "DELETE", "TRACE", "HEAD"))) { return false; } } } return true; } private void ConfigureApiExplorer(ControllerModel controller) { if (controller.ApiExplorer.GroupName.IsNullOrEmpty()) { controller.ApiExplorer.GroupName = controller.ControllerName; } if (controller.ApiExplorer.IsVisible == null) { var controllerType = controller.ControllerType.AsType(); var remoteServiceAtt = ReflectionHelper.GetSingleAttributeOrDefault<RemoteServiceAttribute>(controllerType.GetTypeInfo()); if (remoteServiceAtt != null) { controller.ApiExplorer.IsVisible = remoteServiceAtt.IsEnabledFor(controllerType) && remoteServiceAtt.IsMetadataEnabledFor(controllerType); } else { controller.ApiExplorer.IsVisible = true; } } foreach (var action in controller.Actions) { ConfigureApiExplorer(action); } } private void ConfigureApiExplorer(ActionModel action) { if (action.ApiExplorer.IsVisible == null) { var remoteServiceAtt = ReflectionHelper.GetSingleAttributeOrDefault<RemoteServiceAttribute>(action.ActionMethod); if (remoteServiceAtt != null) { action.ApiExplorer.IsVisible = remoteServiceAtt.IsEnabledFor(action.ActionMethod) && remoteServiceAtt.IsMetadataEnabledFor(action.ActionMethod); } } } private void ConfigureSelector(ControllerModel controller, [CanBeNull] AbpControllerAssemblySetting configuration) { RemoveEmptySelectors(controller.Selectors); if (controller.Selectors.Any(selector => selector.AttributeRouteModel != null)) { return; } var moduleName = GetModuleNameOrDefault(controller.ControllerType.AsType()); foreach (var action in controller.Actions) { ConfigureSelector(moduleName, controller.ControllerName, action, configuration); } } private void ConfigureSelector(string moduleName, string controllerName, ActionModel action, [CanBeNull] AbpControllerAssemblySetting configuration) { RemoveEmptySelectors(action.Selectors); var remoteServiceAtt = ReflectionHelper.GetSingleAttributeOrDefault<RemoteServiceAttribute>(action.ActionMethod); if (remoteServiceAtt != null && !remoteServiceAtt.IsEnabledFor(action.ActionMethod)) { return; } if (!action.Selectors.Any()) { AddAbpServiceSelector(moduleName, controllerName, action, configuration); } else { NormalizeSelectorRoutes(moduleName, controllerName, action); } } private void AddAbpServiceSelector(string moduleName, string controllerName, ActionModel action, [CanBeNull] AbpControllerAssemblySetting configuration) { var abpServiceSelectorModel = new SelectorModel { AttributeRouteModel = CreateAbpServiceAttributeRouteModel(moduleName, controllerName, action) }; var verb = configuration?.UseConventionalHttpVerbs == true ? ProxyScriptingHelper.GetConventionalVerbForMethodName(action.ActionName) : ProxyScriptingHelper.DefaultHttpVerb; abpServiceSelectorModel.ActionConstraints.Add(new HttpMethodActionConstraint(new[] { verb })); action.Selectors.Add(abpServiceSelectorModel); } private static void NormalizeSelectorRoutes(string moduleName, string controllerName, ActionModel action) { foreach (var selector in action.Selectors) { if (selector.AttributeRouteModel == null) { selector.AttributeRouteModel = CreateAbpServiceAttributeRouteModel( moduleName, controllerName, action ); } } } private string GetModuleNameOrDefault(Type controllerType) { return GetControllerSettingOrNull(controllerType)?.ModuleName ?? AbpControllerAssemblySetting.DefaultServiceModuleName; } [CanBeNull] private AbpControllerAssemblySetting GetControllerSettingOrNull(Type controllerType) { var settings = _configuration.Value.ControllerAssemblySettings.GetSettings(controllerType); return settings.FirstOrDefault(setting => setting.TypePredicate(controllerType)); } private static AttributeRouteModel CreateAbpServiceAttributeRouteModel(string moduleName, string controllerName, ActionModel action) { return new AttributeRouteModel( new RouteAttribute( $"api/services/{moduleName}/{controllerName}/{action.ActionName}" ) ); } private static void RemoveEmptySelectors(IList<SelectorModel> selectors) { selectors .Where(IsEmptySelector) .ToList() .ForEach(s => selectors.Remove(s)); } private static bool IsEmptySelector(SelectorModel selector) { return selector.AttributeRouteModel == null && selector.ActionConstraints.IsNullOrEmpty(); } } }
// // AuthenticodeBase.cs: Authenticode signature base class // // Author: // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2003 Motus Technologies Inc. (http://www.motus.com) // Copyright (C) 2004, 2006 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.IO; using System.Security.Cryptography; /* FIXME There are a number of problems and deficiencies in this code. - It requires the PE header to fit in 4K. This is not guaranteed by the file format and it is easy to construct valid files that violate it. i.e. with a large MS-DOS header. The code should just read the entire file into memory. - It has a number of missing or incorrect range checks. Incorrect, as in, checking that record or field starts within range, but does not end within range. - It removes/ignores COFF symbols. These rarely/never occur, but removing them is not likely correct. It is not mentioned in either of the two specifications. This seems to be extra unnecessary incorrect code. - There are two specifications, Authenticode and PE: https://download.microsoft.com/download/9/c/5/9c5b2167-8017-4bae-9fde-d599bac8184a/Authenticode_PE.docx https://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx https://msdn.microsoft.com/library/windows/desktop/ms680547(v=vs.85).aspx These are in contradiction regarding hashing of data after the sections. Such data is usually absent. More comparison is need between Mono runtime and desktop runtime/tools. The most common such data is an older signature, which is supposed to be ignored. The next most common is appended setup data, which isn't likely with managed code. However this code has nothing to do with signing managed code specifically, just PEs. There is a slight inconsistency in the Authenticode_PE.doc around the location of the signature vs. other data past sections. The picture has the signature first, the text puts last. - A buffer size of 4K is small and probably not performant. Buffering makes the code harder to update and correct, vs. reading the entire file into memory. - It does not validate NumberOfRvasAndSizes field. Usually it is valid. - It is missing a number of other validations. For example, the optional header magic was ignored, so in the interest of small change, we treat all non-0x20B values the same. Mail with Microsoft confirms the documents do not agree, and that the PE document is outdated and/or incorrect and/or referring to no longer supported v1 Authenticode, and that the intent is for the signature to be at the end, per the text and not the picture. And that data past the sections is to be hashed -- there rarely is any. The plan is therefore: read the entire file into memory add missing validation hash, excluding checksum, security directory, and security content place security content at the end, replacing what is there if anything remove the symbol code (here and in formatter) expose more offsets from here to cleanup the formatter code There is also no unit tests for this code it seems. */ namespace Mono.Security.Authenticode { // References: // a. http://www.cs.auckland.ac.nz/~pgut001/pubs/authenticode.txt #if INSIDE_CORLIB internal #else public #endif enum Authority { Individual, Commercial, Maximum } #if INSIDE_CORLIB internal #else public #endif class AuthenticodeBase { public const string spcIndirectDataContext = "1.3.6.1.4.1.311.2.1.4"; private byte[] fileblock; private FileStream fs; private int blockNo; private int blockLength; private int peOffset; private int dirSecurityOffset; private int dirSecuritySize; private int coffSymbolTableOffset; private bool pe64; internal bool PE64 { get { if (blockNo < 1) ReadFirstBlock (); return pe64; } } public AuthenticodeBase () { // FIXME Read the entire file into memory. // See earlier comments. fileblock = new byte [4096]; } internal int PEOffset { get { if (blockNo < 1) ReadFirstBlock (); return peOffset; } } internal int CoffSymbolTableOffset { get { if (blockNo < 1) ReadFirstBlock (); return coffSymbolTableOffset; } } internal int SecurityOffset { get { if (blockNo < 1) ReadFirstBlock (); return dirSecurityOffset; } } internal void Open (string filename) { if (fs != null) Close (); fs = new FileStream (filename, FileMode.Open, FileAccess.Read, FileShare.Read); blockNo = 0; } internal void Close () { if (fs != null) { fs.Close (); fs = null; } } internal void ReadFirstBlock () { int error = ProcessFirstBlock (); if (error != 0) { string msg = string.Format ("Cannot sign non PE files, e.g. .CAB or .MSI files (error {0}).", error); throw new NotSupportedException (msg); } } internal int ProcessFirstBlock () { if (fs == null) return 1; fs.Position = 0; // read first block - it will include (100% sure) // the MZ header and (99.9% sure) the PE header blockLength = fs.Read (fileblock, 0, fileblock.Length); blockNo = 1; if (blockLength < 64) return 2; // invalid PE file // 1. Validate the MZ header informations // 1.1. Check for magic MZ at start of header if (BitConverterLE.ToUInt16 (fileblock, 0) != 0x5A4D) return 3; // 1.2. Find the offset of the PE header peOffset = BitConverterLE.ToInt32 (fileblock, 60); if (peOffset > fileblock.Length) { // just in case (0.1%) this can actually happen // FIXME This does not mean the file is invalid, // just that this code cannot handle it. // FIXME Read the entire file into memory. // See earlier comments. string msg = String.Format ("Header size too big (> {0} bytes).", fileblock.Length); throw new NotSupportedException (msg); } // FIXME This verifies that PE starts within the file, // but not that it fits. if (peOffset > fs.Length) return 4; // 2. Read between DOS header and first part of PE header // 2.1. Check for magic PE at start of header // PE - NT header ('P' 'E' 0x00 0x00) if (BitConverterLE.ToUInt32 (fileblock, peOffset) != 0x4550) return 5; // PE signature is followed by 20 byte file header, and // then 2 byte magic 0x10B for PE32 or 0x20B for PE32+, // or 0x107 for the obscure ROM case. // FIXME The code historically ignored this magic value // entirely, so we only treat 0x20B differently to maintain // this dubious behavior. // FIXME The code also lacks range checks in a number of places, // and will access arrays out of bounds for valid files. ushort magic = BitConverterLE.ToUInt16 (fileblock, peOffset + 24); const int IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20B; pe64 = magic == IMAGE_NT_OPTIONAL_HDR64_MAGIC; // FIXME This fails to validate NumberOfRvasAndSizes. // 2.2. Locate IMAGE_DIRECTORY_ENTRY_SECURITY (offset and size) // These offsets are from the documentation, but add 24 for // PE signature and file header. if (pe64) { dirSecurityOffset = BitConverterLE.ToInt32 (fileblock, peOffset + 168); dirSecuritySize = BitConverterLE.ToInt32 (fileblock, peOffset + 168 + 4); } else { dirSecurityOffset = BitConverterLE.ToInt32 (fileblock, peOffset + 152); dirSecuritySize = BitConverterLE.ToInt32 (fileblock, peOffset + 156); } // FIXME Remove this code and the dependency on it. coffSymbolTableOffset = BitConverterLE.ToInt32 (fileblock, peOffset + 12); return 0; } internal byte[] GetSecurityEntry () { if (blockNo < 1) ReadFirstBlock (); if (dirSecuritySize > 8) { // remove header from size (not ASN.1 based) byte[] secEntry = new byte [dirSecuritySize - 8]; // position after header and read entry fs.Position = dirSecurityOffset + 8; fs.Read (secEntry, 0, secEntry.Length); return secEntry; } return null; } internal byte[] GetHash (HashAlgorithm hash) { if (blockNo < 1) ReadFirstBlock (); fs.Position = blockLength; // hash the rest of the file long n; int addsize = 0; // minus any authenticode signature (with 8 bytes header) if (dirSecurityOffset > 0) { // it is also possible that the signature block // starts within the block in memory (small EXE) if (dirSecurityOffset < blockLength) { blockLength = dirSecurityOffset; n = 0; } else { n = dirSecurityOffset - blockLength; } } else if (coffSymbolTableOffset > 0) { fileblock[PEOffset + 12] = 0; fileblock[PEOffset + 13] = 0; fileblock[PEOffset + 14] = 0; fileblock[PEOffset + 15] = 0; fileblock[PEOffset + 16] = 0; fileblock[PEOffset + 17] = 0; fileblock[PEOffset + 18] = 0; fileblock[PEOffset + 19] = 0; // it is also possible that the signature block // starts within the block in memory (small EXE) if (coffSymbolTableOffset < blockLength) { blockLength = coffSymbolTableOffset; n = 0; } else { n = coffSymbolTableOffset - blockLength; } } else { addsize = (int) (fs.Length & 7); if (addsize > 0) addsize = 8 - addsize; n = fs.Length - blockLength; } // Authenticode(r) gymnastics // Hash from (generally) 0 to 215 (216 bytes) // 88 = 64 + 24 // 64 is the offset of Checksum within OptionalHeader. // 24 is offset of OptionalHeader within PEHeader. int pe = peOffset + 88; hash.TransformBlock (fileblock, 0, pe, fileblock, 0); // then skip 4 for checksum pe += 4; if (pe64) { // security_directory, if present, is at offset 144 within OptionalHeader64 // FIXME This code fails to check if the security_directory is present. // If it is absent, it may or may not be difficult to add, and reject // the file as valid but unsignable. // Checksum is at [64, 68]. // 144 - 68 = 76 // Hash from checksum to security_directory. hash.TransformBlock (fileblock, pe, 76, fileblock, pe); // then skip 8 bytes for IMAGE_DIRECTORY_ENTRY_SECURITY pe += 76 + 8; } else { // security_directory, if present, is at offset 128 within OptionalHeader32 // FIXME This code fails to check if the security_directory is present. // If it is absent, it may or may not be difficult to add, and reject // the file as valid but unsignable. // Checksum is at [64, 68]. // 128 - 68 = 60 // Continue hashing from (generally) 220 to 279 (60 bytes) hash.TransformBlock (fileblock, pe, 60, fileblock, pe); // then skip 8 bytes for IMAGE_DIRECTORY_ENTRY_SECURITY pe += 68; } // everything is present so start the hashing if (n == 0) { // hash the (only) block hash.TransformFinalBlock (fileblock, pe, blockLength - pe); } else { // hash the last part of the first (already in memory) block hash.TransformBlock (fileblock, pe, blockLength - pe, fileblock, pe); // hash by blocks of 4096 bytes long blocks = (n >> 12); int remainder = (int)(n - (blocks << 12)); if (remainder == 0) { blocks--; remainder = 4096; } // blocks while (blocks-- > 0) { fs.Read (fileblock, 0, fileblock.Length); hash.TransformBlock (fileblock, 0, fileblock.Length, fileblock, 0); } // remainder if (fs.Read (fileblock, 0, remainder) != remainder) return null; if (addsize > 0) { hash.TransformBlock (fileblock, 0, remainder, fileblock, 0); hash.TransformFinalBlock (new byte [addsize], 0, addsize); } else { hash.TransformFinalBlock (fileblock, 0, remainder); } } return hash.Hash; } // for compatibility only protected byte[] HashFile (string fileName, string hashName) { try { Open (fileName); HashAlgorithm hash = HashAlgorithm.Create (hashName); byte[] result = GetHash (hash); Close (); return result; } catch { return null; } } } }
#region Copyright // Copyright 2016 OSIsoft, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Linq; using OSIsoft.AF; using OSIsoft.AF.Asset; using OSIsoft.AF.UnitsOfMeasure; using System.Collections.Generic; namespace Ex4_Building_An_AF_Hierarchy_Sln { static class Bonus { public static void Run() { AFDatabase database = GetOrCreateDatabase("PISRV01", "Ethical Power Company"); CreateCategories(database); CreateEnumerationSets(database); CreateTemplates(database); CreateElements(database); SetAttributeValues(database); CreateCityElements(database); CreateWeakReferences(database); } private static AFDatabase GetOrCreateDatabase(string servername, string databasename) { PISystem assetServer = new PISystems()[servername]; if (assetServer == null) return null; AFDatabase database = assetServer.Databases[databasename]; if (database == null) database = assetServer.Databases.Add(databasename); return database; } private static void CreateCategories(AFDatabase database) { if (database == null) return; var items = new List<string> { "Measures Energy", "Shows Status", "Building Info", "Location", "Time - Series Data" }; foreach (var item in items) { if (!database.ElementCategories.Contains(item)) database.ElementCategories.Add(item); } if (database.IsDirty) database.CheckIn(); } private static void CreateEnumerationSets(AFDatabase database) { if (database == null) return; if (!database.EnumerationSets.Contains("Building Type")) { AFEnumerationSet bTypeEnum = database.EnumerationSets.Add("Building Type"); bTypeEnum.Add("Residential", 0); bTypeEnum.Add("Business", 1); } if (!database.EnumerationSets.Contains("Meter Status")) { AFEnumerationSet mStatusEnum = database.EnumerationSets.Add("Meter Status"); mStatusEnum.Add("Good", 0); mStatusEnum.Add("Bad", 1); } if (database.IsDirty) database.CheckIn(); } private static void CreateTemplates(AFDatabase database) { if (database == null) return; AFElementTemplate meterBasicTemplate = CreateMeterBasicTemplate(database); CreateMeterAdvancedTemplate(meterBasicTemplate); CreateCityTemplate(database); if (database.IsDirty) database.CheckIn(); } private static AFElementTemplate CreateMeterBasicTemplate(AFDatabase database) { AFElementTemplate meterBasicTemplate = database.ElementTemplates["MeterBasic"]; if (meterBasicTemplate != null) return meterBasicTemplate; UOM uom = database.PISystem.UOMDatabase.UOMs["kilowatt hour"]; AFCategory mEnergyE = database.ElementCategories["Measures Energy"]; AFCategory bInfoA = database.AttributeCategories["Building Info"]; AFCategory locationA = database.AttributeCategories["Location"]; AFCategory tsDataA = database.AttributeCategories["Time-Series Data"]; AFEnumerationSet bTypeNum = database.EnumerationSets["Building Type"]; meterBasicTemplate = database.ElementTemplates.Add("MeterBasic"); meterBasicTemplate.Categories.Add(mEnergyE); AFAttributeTemplate substationAttrTemp = meterBasicTemplate.AttributeTemplates.Add("Substation"); substationAttrTemp.Type = typeof(string); AFAttributeTemplate usageLimitAttrTemp = meterBasicTemplate.AttributeTemplates.Add("Usage Limit"); usageLimitAttrTemp.Type = typeof(string); usageLimitAttrTemp.DefaultUOM = uom; AFAttributeTemplate buildingAttrTemp = meterBasicTemplate.AttributeTemplates.Add("Building"); buildingAttrTemp.Type = typeof(string); buildingAttrTemp.Categories.Add(bInfoA); AFAttributeTemplate bTypeAttrTemp = meterBasicTemplate.AttributeTemplates.Add("Building Type"); bTypeAttrTemp.TypeQualifier = bTypeNum; bTypeAttrTemp.Categories.Add(bInfoA); AFAttributeTemplate cityAttrTemp = meterBasicTemplate.AttributeTemplates.Add("City"); cityAttrTemp.Type = typeof(string); cityAttrTemp.Categories.Add(locationA); AFAttributeTemplate energyUsageAttrTemp = meterBasicTemplate.AttributeTemplates.Add("Energy Usage"); energyUsageAttrTemp.Type = typeof(Single); energyUsageAttrTemp.Categories.Add(tsDataA); energyUsageAttrTemp.DefaultUOM = uom; energyUsageAttrTemp.DataReferencePlugIn = database.PISystem.DataReferencePlugIns["PI Point"]; energyUsageAttrTemp.ConfigString = @"\\%@\Configuration|PIDataArchiveName%\%Element%.%Attribute%;UOM=kWh"; return meterBasicTemplate; } private static void CreateMeterAdvancedTemplate(AFElementTemplate meterBasicTemplate) { AFDatabase database = meterBasicTemplate.Database; AFElementTemplate meterAdvancedTemplate = database.ElementTemplates["MeterAdvanced"]; if (meterAdvancedTemplate == null) database.ElementTemplates.Add("MeterAdvanced"); AFCategory tsDataA = database.AttributeCategories["Time-Series Data"]; AFEnumerationSet mStatusEnum = database.EnumerationSets["Meter Status"]; meterAdvancedTemplate.BaseTemplate = meterBasicTemplate; AFAttributeTemplate statusAttrTemp = meterAdvancedTemplate.AttributeTemplates["Status"]; if (statusAttrTemp == null) meterAdvancedTemplate.AttributeTemplates.Add("Status"); statusAttrTemp.TypeQualifier = mStatusEnum; if (!statusAttrTemp.Categories.Contains(tsDataA)) statusAttrTemp.Categories.Add(tsDataA); statusAttrTemp.DataReferencePlugIn = database.PISystem.DataReferencePlugIns["PI Point"]; statusAttrTemp.ConfigString = @"\\%@\Configuration|PIDataArchiveName%\%Element%.%Attribute%"; } private static void CreateCityTemplate(AFDatabase database) { AFElementTemplate cityTemplate = database.ElementTemplates["City"]; if (cityTemplate != null) return; AFAttributeTemplate cityEnergyUsageAttrTemp = cityTemplate.AttributeTemplates.Add("Energy Usage"); cityEnergyUsageAttrTemp.Type = typeof(Single); cityEnergyUsageAttrTemp.DefaultUOM = database.PISystem.UOMDatabase.UOMs["kilowatt hour"]; cityEnergyUsageAttrTemp.DataReferencePlugIn = database.PISystem.DataReferencePlugIns["PI Point"]; cityEnergyUsageAttrTemp.ConfigString = @"\\%@\Configuration|PIDataArchiveName%\%Element%.%Attribute%"; } private static void CreateElements(AFDatabase database) { if (database == null) return; // here we create the configuration element // we do a small exception creating an attribute in this method. AFElement configuration; if (!database.Elements.Contains("Configuration")) { configuration = database.Elements.Add("Configuration"); AFAttribute name= configuration.Attributes.Add("PIDataArchiveName"); name.SetValue(new AFValue("PISRV01")); } AFElement meters = database.Elements["Meters"]; if (meters == null) meters = database.Elements.Add("Meters"); AFElementTemplate basic = database.ElementTemplates["MeterBasic"]; AFElementTemplate advanced = database.ElementTemplates["MeterAdvanced"]; foreach (int i in Enumerable.Range(1, 12)) { string name = "Meter" + i.ToString("D3"); if (!meters.Elements.Contains(name)) { AFElementTemplate eTemp = i <= 8 ? basic : advanced; AFElement e = meters.Elements.Add(name, eTemp); } } if (database.IsDirty) database.CheckIn(); } private static void SetAttributeValues(AFDatabase database) { if (database == null) return; AFElement meter001 = database.Elements["Meters"].Elements["Meter001"]; meter001.Attributes["Substation"].SetValue(new AFValue("SSA-01")); meter001.Attributes["Usage Limit"].SetValue(new AFValue(350)); meter001.Attributes["Building"].SetValue(new AFValue("The Shard")); AFEnumerationValue bTypeValue = database.EnumerationSets["Building Type"]["Residential"]; meter001.Attributes["Building Type"].SetValue(new AFValue(bTypeValue)); meter001.Attributes["City"].SetValue(new AFValue("London")); } private static void CreateCityElements(AFDatabase database) { if (database == null) return; if (!database.Elements.Contains("Geographical Locations")) { AFElement geoLocations = database.Elements.Add("Geographical Locations"); AFElementTemplate cityTemplate = database.ElementTemplates["City"]; geoLocations.Elements.Add("London", cityTemplate); geoLocations.Elements.Add("Montreal", cityTemplate); geoLocations.Elements.Add("San Francisco", cityTemplate); } if (database.IsDirty) database.CheckIn(); } private static void CreateWeakReferences(AFDatabase database) { if (database == null) return; AFReferenceType weakRefType = database.ReferenceTypes["Weak Reference"]; AFElement meters = database.Elements["Meters"]; AFElement locations = database.Elements["Geographical Locations"]; AFElementTemplate cityTemplate = database.ElementTemplates["City"]; foreach (AFElement meter in meters.Elements) { string cityName = meter.Attributes["City"].GetValue().ToString(); if (string.IsNullOrEmpty(cityName)) continue; AFElement city = locations.Elements[cityName]; if (city == null) locations.Elements.Add(cityName, cityTemplate); if (!city.Elements.Contains(meter.Name)) city.Elements.Add(meter, weakRefType); } if (database.IsDirty) database.CheckIn(); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/protobuf/duration.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Protobuf.WellKnownTypes { /// <summary>Holder for reflection information generated from google/protobuf/duration.proto</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public static partial class DurationReflection { #region Descriptor /// <summary>File descriptor for google/protobuf/duration.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static DurationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Ch5nb29nbGUvcHJvdG9idWYvZHVyYXRpb24ucHJvdG8SD2dvb2dsZS5wcm90", "b2J1ZiIqCghEdXJhdGlvbhIPCgdzZWNvbmRzGAEgASgDEg0KBW5hbm9zGAIg", "ASgFQlAKE2NvbS5nb29nbGUucHJvdG9idWZCDUR1cmF0aW9uUHJvdG9QAaAB", "AaICA0dQQqoCHkdvb2dsZS5Qcm90b2J1Zi5XZWxsS25vd25UeXBlc2IGcHJv", "dG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Duration), global::Google.Protobuf.WellKnownTypes.Duration.Parser, new[]{ "Seconds", "Nanos" }, null, null, null) })); } #endregion } #region Messages /// <summary> /// A Duration represents a signed, fixed-length span of time represented /// as a count of seconds and fractions of seconds at nanosecond /// resolution. It is independent of any calendar and concepts like "day" /// or "month". It is related to Timestamp in that the difference between /// two Timestamp values is a Duration and it can be added or subtracted /// from a Timestamp. Range is approximately +-10,000 years. /// /// Example 1: Compute Duration from two Timestamps in pseudo code. /// /// Timestamp start = ...; /// Timestamp end = ...; /// Duration duration = ...; /// /// duration.seconds = end.seconds - start.seconds; /// duration.nanos = end.nanos - start.nanos; /// /// if (duration.seconds &lt; 0 &amp;&amp; duration.nanos > 0) { /// duration.seconds += 1; /// duration.nanos -= 1000000000; /// } else if (durations.seconds > 0 &amp;&amp; duration.nanos &lt; 0) { /// duration.seconds -= 1; /// duration.nanos += 1000000000; /// } /// /// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. /// /// Timestamp start = ...; /// Duration duration = ...; /// Timestamp end = ...; /// /// end.seconds = start.seconds + duration.seconds; /// end.nanos = start.nanos + duration.nanos; /// /// if (end.nanos &lt; 0) { /// end.seconds -= 1; /// end.nanos += 1000000000; /// } else if (end.nanos >= 1000000000) { /// end.seconds += 1; /// end.nanos -= 1000000000; /// } /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class Duration : pb::IMessage<Duration> { private static readonly pb::MessageParser<Duration> _parser = new pb::MessageParser<Duration>(() => new Duration()); public static pb::MessageParser<Duration> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor.MessageTypes[0]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public Duration() { OnConstruction(); } partial void OnConstruction(); public Duration(Duration other) : this() { seconds_ = other.seconds_; nanos_ = other.nanos_; } public Duration Clone() { return new Duration(this); } /// <summary>Field number for the "seconds" field.</summary> public const int SecondsFieldNumber = 1; private long seconds_; /// <summary> /// Signed seconds of the span of time. Must be from -315,576,000,000 /// to +315,576,000,000 inclusive. /// </summary> public long Seconds { get { return seconds_; } set { seconds_ = value; } } /// <summary>Field number for the "nanos" field.</summary> public const int NanosFieldNumber = 2; private int nanos_; /// <summary> /// Signed fractions of a second at nanosecond resolution of the span /// of time. Durations less than one second are represented with a 0 /// `seconds` field and a positive or negative `nanos` field. For durations /// of one second or more, a non-zero value for the `nanos` field must be /// of the same sign as the `seconds` field. Must be from -999,999,999 /// to +999,999,999 inclusive. /// </summary> public int Nanos { get { return nanos_; } set { nanos_ = value; } } public override bool Equals(object other) { return Equals(other as Duration); } public bool Equals(Duration other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Seconds != other.Seconds) return false; if (Nanos != other.Nanos) return false; return true; } public override int GetHashCode() { int hash = 1; if (Seconds != 0L) hash ^= Seconds.GetHashCode(); if (Nanos != 0) hash ^= Nanos.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } public void WriteTo(pb::CodedOutputStream output) { if (Seconds != 0L) { output.WriteRawTag(8); output.WriteInt64(Seconds); } if (Nanos != 0) { output.WriteRawTag(16); output.WriteInt32(Nanos); } } public int CalculateSize() { int size = 0; if (Seconds != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(Seconds); } if (Nanos != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Nanos); } return size; } public void MergeFrom(Duration other) { if (other == null) { return; } if (other.Seconds != 0L) { Seconds = other.Seconds; } if (other.Nanos != 0) { Nanos = other.Nanos; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 8: { Seconds = input.ReadInt64(); break; } case 16: { Nanos = input.ReadInt32(); break; } } } } } #endregion } #endregion Designer generated code
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.Azure.SignalR; using Microsoft.Azure.SignalR.Tests.Common; using Microsoft.Azure.WebJobs.Extensions.SignalRService; using Moq; using Xunit; namespace SignalRServiceExtension.Tests { public class SignalRAsyncCollectorTests { public static IEnumerable<object[]> GetEndpoints() { yield return new object[] { FakeEndpointUtils.GetFakeEndpoint(2).ToArray() }; yield return new object[] { null }; } [Theory] [MemberData(nameof(GetEndpoints))] public async Task AddAsync_WithBroadcastMessage_CallsSendToAll(ServiceEndpoint[] endpoints) { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRMessage>(signalRSenderMock.Object); await collector.AddAsync(new SignalRMessage { Target = "newMessage", Arguments = new object[] { "arg1", "arg2" }, Endpoints = endpoints }); signalRSenderMock.Verify(c => c.SendToAll(It.IsAny<SignalRData>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = (SignalRData)signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal("newMessage", actualData.Target); Assert.Equal("arg1", actualData.Arguments[0]); Assert.Equal("arg2", actualData.Arguments[1]); Assert.Equal(endpoints, actualData.Endpoints); } [Fact] public async Task AddAsync_WithUserId_CallsSendToUser() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRMessage>(signalRSenderMock.Object); await collector.AddAsync(new SignalRMessage { UserId = "userId1", Target = "newMessage", Arguments = new object[] { "arg1", "arg2" } }); signalRSenderMock.Verify( c => c.SendToUser("userId1", It.IsAny<SignalRData>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = (SignalRData)signalRSenderMock.Invocations[0].Arguments[1]; Assert.Equal("newMessage", actualData.Target); Assert.Equal("arg1", actualData.Arguments[0]); Assert.Equal("arg2", actualData.Arguments[1]); } [Fact] public async Task AddAsync_WithUserId_CallsSendToGroup() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRMessage>(signalRSenderMock.Object); await collector.AddAsync(new SignalRMessage { GroupName = "group1", Target = "newMessage", Arguments = new object[] { "arg1", "arg2" } }); signalRSenderMock.Verify( c => c.SendToGroup("group1", It.IsAny<SignalRData>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = (SignalRData)signalRSenderMock.Invocations[0].Arguments[1]; Assert.Equal("newMessage", actualData.Target); Assert.Equal("arg1", actualData.Arguments[0]); Assert.Equal("arg2", actualData.Arguments[1]); } [Theory] [MemberData(nameof(GetEndpoints))] public async Task AddAsync_WithUserId_CallsAddUserToGroup(ServiceEndpoint[] endpoints) { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var action = new SignalRGroupAction { UserId = "userId1", GroupName = "group1", Action = GroupAction.Add, Endpoints = endpoints }; await collector.AddAsync(action); signalRSenderMock.Verify( c => c.AddUserToGroup(action), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal(action, actualData); } [Fact] public async Task AddAsync_WithUserId_CallsRemoveUserFromGroup() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var action = new SignalRGroupAction { UserId = "userId1", GroupName = "group1", Action = GroupAction.Remove }; await collector.AddAsync(action); signalRSenderMock.Verify( c => c.RemoveUserFromGroup(action), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal(action, actualData); } [Fact] public async Task AddAsync_WithUserId_CallsRemoveUserFromAllGroups() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var action = new SignalRGroupAction { UserId = "userId1", Action = GroupAction.RemoveAll }; await collector.AddAsync(action); signalRSenderMock.Verify( c => c.RemoveUserFromAllGroups(action), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal(action, actualData); } [Fact] public async Task AddAsync_InvalidTypeThrowException() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<object[]>(signalRSenderMock.Object); var item = new object[] { "arg1", "arg2" }; await Assert.ThrowsAsync<ArgumentException>(() => collector.AddAsync(item)); } [Fact] public async Task AddAsync_SendMessage_WithBothUserIdAndGroupName_UsePriorityOrder() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRMessage>(signalRSenderMock.Object); await collector.AddAsync(new SignalRMessage { UserId = "user1", GroupName = "group1", Target = "newMessage", Arguments = new object[] { "arg1", "arg2" } }); signalRSenderMock.Verify( c => c.SendToUser("user1", It.IsAny<SignalRData>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = (SignalRData)signalRSenderMock.Invocations[0].Arguments[1]; Assert.Equal("newMessage", actualData.Target); Assert.Equal("arg1", actualData.Arguments[0]); Assert.Equal("arg2", actualData.Arguments[1]); } [Fact] public async Task AddAsync_WithConnectionId_CallsSendToUser() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRMessage>(signalRSenderMock.Object); await collector.AddAsync(new SignalRMessage { ConnectionId = "connection1", Target = "newMessage", Arguments = new object[] { "arg1", "arg2" } }); signalRSenderMock.Verify( c => c.SendToConnection("connection1", It.IsAny<SignalRData>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = (SignalRData)signalRSenderMock.Invocations[0].Arguments[1]; Assert.Equal("newMessage", actualData.Target); Assert.Equal("arg1", actualData.Arguments[0]); Assert.Equal("arg2", actualData.Arguments[1]); } [Fact] public async Task AddAsync_WithConnectionId_CallsAddConnectionToGroup() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var action = new SignalRGroupAction { ConnectionId = "connection1", GroupName = "group1", Action = GroupAction.Add }; await collector.AddAsync(action); signalRSenderMock.Verify( c => c.AddConnectionToGroup(It.IsAny<SignalRGroupAction>()), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal(action, actualData); } [Fact] public async Task AddAsync_WithConnectionId_CallsRemoveConnectionFromGroup() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var action = new SignalRGroupAction { ConnectionId = "connection1", GroupName = "group1", Action = GroupAction.Remove }; await collector.AddAsync(action); signalRSenderMock.Verify( c => c.RemoveConnectionFromGroup(action), Times.Once); signalRSenderMock.VerifyNoOtherCalls(); var actualData = signalRSenderMock.Invocations[0].Arguments[0]; Assert.Equal(action, actualData); } [Fact] public async Task AddAsync_GroupOperation_WithoutParametersThrowException() { var signalRSenderMock = new Mock<IAzureSignalRSender>(); var collector = new SignalRAsyncCollector<SignalRGroupAction>(signalRSenderMock.Object); var item = new SignalRGroupAction { GroupName = "group1", Action = GroupAction.Add }; await Assert.ThrowsAsync<ArgumentException>(() => collector.AddAsync(item)); } } }
using System; using System.Data; using System.Configuration; using System.Collections; using System.Collections.Generic; using System.Text; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using Vevo; using Vevo.Domain; using Vevo.Domain.Users; using Vevo.Shared.DataAccess; using Vevo.WebAppLib; using Vevo.WebUI.Ajax; using Vevo.WebUI.Users; public partial class AdminAdvanced_MainControls_CountryList : AdminAdvancedBaseUserControl { private bool _emptyRow = false; private int _checkBoxColumn = 0; private int _editColunm = 4; private bool IsContainingOnlyEmptyRow { get { return _emptyRow; } set { _emptyRow = value; } } protected void uxGrid_Sorting( object sender, GridViewSortEventArgs e ) { GridHelper.SelectSorting( e.SortExpression ); RefreshGrid(); } private GridViewHelper GridHelper { get { if (ViewState["GridHelper"] == null) ViewState["GridHelper"] = new GridViewHelper( uxGrid, "CommonName" ); return (GridViewHelper) ViewState["GridHelper"]; } } private string CurrentCountryCode { get { if (ViewState["CountryCode"] == null) return string.Empty; else return ViewState["CountryCode"].ToString(); } set { ViewState["CountryCode"] = value; } } private void SetFooterRowFocus() { Control textBox = uxGrid.FooterRow.FindControl( "uxCommonNameText" ); AjaxUtilities.GetScriptManager( this ).SetFocus( textBox ); } private void CreateDummyRow( IList<Country> countryList ) { Country country = new Country(); country.CommonName = "Dummy"; country.Enabled = true; countryList.Add( country ); } protected void PopulateControls() { if (IsAdminModifiable()) { DeleteVisible( true ); } else { uxAddButton.Visible = false; DeleteVisible( false ); } RefreshGrid(); } private void DeleteVisible( bool value ) { uxDeleteButton.Visible = value; uxEnabledButton.Visible = value; uxDisableButton.Visible = value; uxResetButton.Visible = value; if (value) { if (AdminConfig.CurrentTestMode == AdminConfig.TestMode.Normal) { uxDeleteConfirmButton.TargetControlID = "uxDeleteButton"; uxConfirmModalPopup.TargetControlID = "uxDeleteButton"; uxResetConfirmButton.TargetControlID = "uxResetButton"; uxReSetConfirmModalPopup.TargetControlID = "uxResetButton"; } else { uxDeleteConfirmButton.TargetControlID = "uxDummyButton"; uxConfirmModalPopup.TargetControlID = "uxDummyButton"; uxResetConfirmButton.TargetControlID = "uxResetDummyButton"; uxReSetConfirmModalPopup.TargetControlID = "uxResetDummyButton"; } } else { uxDeleteConfirmButton.TargetControlID = "uxDummyButton"; uxConfirmModalPopup.TargetControlID = "uxDummyButton"; } } protected void uxGrid_DataBound( object sender, EventArgs e ) { } protected void uxDeleteButton_Click( object sender, EventArgs e ) { bool deleted = false; foreach (GridViewRow row in uxGrid.Rows) { CheckBox deleteCheck = (CheckBox) row.FindControl( "uxCheck" ); if (deleteCheck != null && deleteCheck.Checked) { string countryCode = uxGrid.DataKeys[row.RowIndex]["CountryCode"].ToString(); DataAccessContext.CountryRepository.Delete( countryCode ); deleted = true; } } uxGrid.EditIndex = -1; if (deleted) { uxMessage.DisplayMessage( Resources.CustomerMessages.DeleteSuccess ); } RefreshGrid(); } protected void uxEnabledButton_Click( object sender, EventArgs e ) { bool enabled = false; foreach (GridViewRow row in uxGrid.Rows) { CheckBox deleteCheck = (CheckBox) row.FindControl( "uxCheck" ); if (deleteCheck != null && deleteCheck.Checked) { string countryCode = uxGrid.DataKeys[row.RowIndex]["CountryCode"].ToString(); Country country = DataAccessContext.CountryRepository.GetOne( countryCode ); country.Enabled = true; DataAccessContext.CountryRepository.Update( country ); enabled = true; } } uxGrid.EditIndex = -1; if (enabled) { uxMessage.DisplayMessage( Resources.CustomerMessages.EnabledSuccess ); } RefreshGrid(); } protected void uxDisableButton_Click( object sender, EventArgs e ) { bool enabled = false; foreach (GridViewRow row in uxGrid.Rows) { CheckBox deleteCheck = (CheckBox) row.FindControl( "uxCheck" ); if (deleteCheck != null && deleteCheck.Checked) { string countryCode = uxGrid.DataKeys[row.RowIndex]["CountryCode"].ToString(); Country country = DataAccessContext.CountryRepository.GetOne( countryCode ); country.Enabled = false; DataAccessContext.CountryRepository.Update( country ); enabled = true; } } uxGrid.EditIndex = -1; if (enabled) { uxMessage.DisplayMessage( Resources.CustomerMessages.DisableSuccess ); } RefreshGrid(); } protected void Page_Load( object sender, EventArgs e ) { } protected void Page_PreRender( object sender, EventArgs e ) { if (!MainContext.IsPostBack) PopulateControls(); } protected void uxResetButton_Click( object sender, EventArgs e ) { ResetCountryData(); RefreshGrid(); } protected void uxAddButton_Click( object sender, EventArgs e ) { uxGrid.EditIndex = -1; uxGrid.ShowFooter = true; uxGrid.ShowHeader = true; RefreshGrid(); uxAddButton.Visible = false; SetFooterRowFocus(); } private IList<Country> CreateSourceList() { IList<Country> countryList = DataAccessContext.CountryRepository.GetAll( BoolFilter.ShowAll, GridHelper.GetFullSortText() ); IList<Country> countryListSource = new List<Country>(); for (int i = 0; i < countryList.Count; i++) { countryListSource.Add( countryList[i] ); } return countryListSource; } private void RefreshGrid() { IList<Country> countryList = CreateSourceList(); if (countryList.Count == 0) { IsContainingOnlyEmptyRow = true; CreateDummyRow( countryList ); } else { IsContainingOnlyEmptyRow = false; uxGrid.ShowHeader = true; } uxGrid.DataSource = countryList; uxGrid.DataBind(); if (IsContainingOnlyEmptyRow) { uxGrid.Rows[0].Visible = false; uxGrid.Rows[0].Controls.Clear(); } RefreshDeleteButton(); if (uxGrid.ShowFooter) { Control commonNameText = uxGrid.FooterRow.FindControl( "uxCommonNameText" ); Control countryCodeText = uxGrid.FooterRow.FindControl( "uxCountryCodeText" ); Control addButton = uxGrid.FooterRow.FindControl( "uxAddButton" ); WebUtilities.TieButton( this.Page, commonNameText, addButton ); WebUtilities.TieButton( this.Page, countryCodeText, addButton ); } } private void RefreshDeleteButton() { if (IsAdminModifiable()) { if (IsContainingOnlyEmptyRow) DeleteVisible( false ); else DeleteVisible( true ); } else { DeleteVisible( false ); uxGrid.Columns[_checkBoxColumn].Visible = false; uxGrid.Columns[_editColunm].Visible = false; } } private bool IsCountryAlreadyExisted( string countryCode ) { Country country = DataAccessContext.CountryRepository.GetOne( countryCode ); return !country.IsNull; } protected void uxGrid_RowEditing( object sender, GridViewEditEventArgs e ) { uxGrid.EditIndex = e.NewEditIndex; RefreshGrid(); } protected void uxGrid_CancelingEdit( object sender, GridViewCancelEditEventArgs e ) { uxGrid.EditIndex = -1; CurrentCountryCode = ""; RefreshGrid(); } protected void uxGrid_RowCommand( object sender, GridViewCommandEventArgs e ) { if (e.CommandName == "Add") { try { string commonName = ((TextBox) uxGrid.FooterRow.FindControl( "uxCommonNameText" )).Text; string countryCode = ((TextBox) uxGrid.FooterRow.FindControl( "uxCountryCodeText" )).Text; bool enabled = ((CheckBox) uxGrid.FooterRow.FindControl( "uxEnabledCheck" )).Checked; Country country = DataAccessContext.CountryRepository.GetOne( countryCode ); if (country.IsNull) { Country newCountry = new Country(); newCountry.CountryCode = countryCode; newCountry.CommonName = commonName; newCountry.SortOrder = DataAccessContext.CountryRepository.GetAll( BoolFilter.ShowAll, "CommonName" ).Count; newCountry.Enabled = enabled; DataAccessContext.CountryRepository.Create( newCountry ); ((TextBox) uxGrid.FooterRow.FindControl( "uxCommonNameText" )).Text = ""; ((TextBox) uxGrid.FooterRow.FindControl( "uxCountryCodeText" )).Text = ""; uxMessage.DisplayMessage( Resources.CountryListMessages.AddSuccess ); } else uxMessage.DisplayError( "Country code can't duplicate." ); } catch (Exception ex) { string message; if (ex.InnerException is DuplicatedPrimaryKeyException) message = Resources.CountryListMessages.AddErrorDuplicated; else message = Resources.CountryListMessages.AddError; throw new VevoException( message ); } finally { } RefreshGrid(); } if (e.CommandName == "Edit") { try { CurrentCountryCode = e.CommandArgument.ToString(); } catch (Exception ex) { uxMessage.DisplayError( ex.Message ); } } } protected void uxGrid_RowUpdating( object sender, GridViewUpdateEventArgs e ) { try { string commonName = ((TextBox) uxGrid.Rows[e.RowIndex].FindControl( "uxCommonNameText" )).Text; string countryCode = ((TextBox) uxGrid.Rows[e.RowIndex].FindControl( "uxCountryCodeText" )).Text; bool enabled = ((CheckBox) uxGrid.Rows[e.RowIndex].FindControl( "uxEnabledCheck" )).Checked; if (!String.IsNullOrEmpty( CurrentCountryCode )) { if (CurrentCountryCode == countryCode || !IsCountryAlreadyExisted( countryCode )) { Country country = DataAccessContext.CountryRepository.GetOne( CurrentCountryCode ); country.CommonName = commonName; country.Enabled = enabled; DataAccessContext.CountryRepository.Update( country, countryCode ); uxMessage.DisplayMessage( Resources.CountryListMessages.UpdateSuccess ); } else uxMessage.DisplayError(Resources.CountryListMessages.UpdateErrorDuplicated); } // End editing uxGrid.EditIndex = -1; CurrentCountryCode = ""; RefreshGrid(); } catch (Exception ex) { string message; if (ex.InnerException is DuplicatedPrimaryKeyException) message = Resources.CountryListMessages.UpdateErrorDuplicated; else message = Resources.CountryListMessages.UpdateError; throw new ApplicationException( message ); } finally { // Avoid calling Update() automatically by GridView e.Cancel = true; } } private void ResetCountryData() { try { AddressUtilities.RestoreCountryCode(); uxMessage.DisplayMessage( "Reset country list successfully." ); } catch (Exception ex) { uxMessage.DisplayError( ex.Message ); } } }
using Signum.Utilities.DataStructures; using System.Collections.ObjectModel; using Signum.Utilities.Reflection; namespace Signum.Engine.Linq; /// <summary> /// An extended expression comparer including custom DbExpression nodes /// </summary> internal class DbExpressionComparer : ExpressionComparer { ScopedDictionary<Alias, Alias>? aliasMap; protected IDisposable AliasScope() { var saved = aliasMap; aliasMap = new ScopedDictionary<Alias, Alias>(aliasMap); return new Disposable(() => aliasMap = saved); } protected DbExpressionComparer(ScopedDictionary<ParameterExpression, ParameterExpression>? parameterScope, ScopedDictionary<Alias, Alias>? aliasScope, bool checkParameterNames) : base(parameterScope, checkParameterNames) { this.aliasMap = aliasScope; } public static bool AreEqual(Expression? a, Expression? b, ScopedDictionary<ParameterExpression, ParameterExpression>? parameterScope = null, ScopedDictionary<Alias, Alias>? aliasScope = null, bool checkParameterNames = false) { return new DbExpressionComparer(parameterScope, aliasScope, checkParameterNames ).Compare(a, b); } protected override bool Compare(Expression? a, Expression? b) { bool result = ComparePrivate(a, b); if (result == false) result = !!result; //Breakpoint here to check the first offender return result; } private bool ComparePrivate(Expression? a, Expression? b) { if (a == b) return true; if (a == null || b == null) return false; if (a.NodeType != b.NodeType) return false; if (a.Type != b.Type) return false; if (!(a is DbExpression)) return base.Compare(a, b); if (((DbExpression)a).DbNodeType != ((DbExpression)b).DbNodeType) return false; return ((DbExpression)a).DbNodeType switch { DbExpressionType.Table => CompareTable((TableExpression)a, (TableExpression)b), DbExpressionType.Column => CompareColumn((ColumnExpression)a, (ColumnExpression)b), DbExpressionType.Select => CompareSelect((SelectExpression)a, (SelectExpression)b), DbExpressionType.Join => CompareJoin((JoinExpression)a, (JoinExpression)b), DbExpressionType.SetOperator => CompareSetOperator((SetOperatorExpression)a, (SetOperatorExpression)b), DbExpressionType.Projection => CompareProjection((ProjectionExpression)a, (ProjectionExpression)b), DbExpressionType.ChildProjection => CompareChildProjection((ChildProjectionExpression)a, (ChildProjectionExpression)b), DbExpressionType.Aggregate => CompareAggregate((AggregateExpression)a, (AggregateExpression)b), DbExpressionType.AggregateRequest => CompareAggregateSubquery((AggregateRequestsExpression)a, (AggregateRequestsExpression)b), DbExpressionType.SqlCast => CompareSqlCast((SqlCastExpression)a, (SqlCastExpression)b), DbExpressionType.SqlFunction => CompareSqlFunction((SqlFunctionExpression)a, (SqlFunctionExpression)b), DbExpressionType.SqlTableValuedFunction => CompareTableValuedSqlFunction((SqlTableValuedFunctionExpression)a, (SqlTableValuedFunctionExpression)b), DbExpressionType.SqlConstant => CompareSqlConstant((SqlConstantExpression)a, (SqlConstantExpression)b), DbExpressionType.SqlLiteral => CompareSqlLiteral((SqlLiteralExpression)a, (SqlLiteralExpression)b), DbExpressionType.Case => CompareCase((CaseExpression)a, (CaseExpression)b), DbExpressionType.RowNumber => CompareRowNumber((RowNumberExpression)a, (RowNumberExpression)b), DbExpressionType.Like => CompareLike((LikeExpression)a, (LikeExpression)b), DbExpressionType.Scalar or DbExpressionType.Exists or DbExpressionType.In => CompareSubquery((SubqueryExpression)a, (SubqueryExpression)b), DbExpressionType.IsNull => CompareIsNull((IsNullExpression)a, (IsNullExpression)b), DbExpressionType.IsNotNull => CompareIsNotNull((IsNotNullExpression)a, (IsNotNullExpression)b), DbExpressionType.Delete => CompareDelete((DeleteExpression)a, (DeleteExpression)b), DbExpressionType.Update => CompareUpdate((UpdateExpression)a, (UpdateExpression)b), DbExpressionType.InsertSelect => CompareInsertSelect((InsertSelectExpression)a, (InsertSelectExpression)b), DbExpressionType.CommandAggregate => CompareCommandAggregate((CommandAggregateExpression)a, (CommandAggregateExpression)b), DbExpressionType.Entity => CompareEntityInit((EntityExpression)a, (EntityExpression)b), DbExpressionType.EmbeddedInit => CompareEmbeddedFieldInit((EmbeddedEntityExpression)a, (EmbeddedEntityExpression)b), DbExpressionType.MixinInit => CompareMixinFieldInit((MixinEntityExpression)a, (MixinEntityExpression)b), DbExpressionType.ImplementedBy => CompareImplementedBy((ImplementedByExpression)a, (ImplementedByExpression)b), DbExpressionType.ImplementedByAll => CompareImplementedByAll((ImplementedByAllExpression)a, (ImplementedByAllExpression)b), DbExpressionType.LiteReference => CompareLiteReference((LiteReferenceExpression)a, (LiteReferenceExpression)b), DbExpressionType.LiteValue => CompareLiteValue((LiteValueExpression)a, (LiteValueExpression)b), DbExpressionType.TypeEntity => CompareTypeFieldInit((TypeEntityExpression)a, (TypeEntityExpression)b), DbExpressionType.TypeImplementedBy => CompareTypeImplementedBy((TypeImplementedByExpression)a, (TypeImplementedByExpression)b), DbExpressionType.TypeImplementedByAll => CompareTypeImplementedByAll((TypeImplementedByAllExpression)a, (TypeImplementedByAllExpression)b), DbExpressionType.MList => CompareMList((MListExpression)a, (MListExpression)b), DbExpressionType.MListElement => CompareMListElement((MListElementExpression)a, (MListElementExpression)b), DbExpressionType.PrimaryKey => ComparePrimaryKey((PrimaryKeyExpression)a, (PrimaryKeyExpression)b), _ => throw new InvalidOperationException("Unexpected " + ((DbExpression)a).DbNodeType), }; } protected virtual bool CompareTable(TableExpression a, TableExpression b) { return object.Equals(a.Name, b.Name); } protected virtual bool CompareColumn(ColumnExpression a, ColumnExpression b) { return CompareAlias(a.Alias, b.Alias) && a.Name == b.Name; } protected virtual bool CompareAlias(Alias? a, Alias? b) { if (a == null && b == null) return true; if (a == null || b == null) return false; if (aliasMap != null) { if (aliasMap.TryGetValue(a, out Alias? mapped)) return mapped == b; } return a == b; } protected virtual bool CompareSelect(SelectExpression a, SelectExpression b) { if (!Compare(a.From, b.From)) return false; using (AliasScope()) { MapAliases(a.From!, b.From!); return Compare(a.Where, b.Where) && CompareList(a.OrderBy, b.OrderBy, CompareOrder) && CompareList(a.GroupBy, b.GroupBy, Compare) && a.IsDistinct == b.IsDistinct && CompareColumnDeclarations(a.Columns, b.Columns); } } protected virtual void MapAliases(SourceExpression sourceA, SourceExpression sourceB) { for (int i = 0, n = sourceA.KnownAliases.Length; i < n; i++) { aliasMap!.Add(sourceA.KnownAliases[i], sourceB.KnownAliases[i]); } } protected virtual bool CompareOrder(OrderExpression a, OrderExpression b) { return a.OrderType == b.OrderType && Compare(a.Expression, b.Expression); } protected virtual bool CompareColumnDeclarations(ReadOnlyCollection<ColumnDeclaration> a, ReadOnlyCollection<ColumnDeclaration> b) { if (a == b) return true; if (a == null || b == null) return false; if (a.Count != b.Count) return false; for (int i = 0, n = a.Count; i < n; i++) { if (!CompareColumnDeclaration(a[i], b[i])) return false; } return true; } protected virtual bool CompareColumnDeclaration(ColumnDeclaration a, ColumnDeclaration b) { return a.Name == b.Name && Compare(a.Expression, b.Expression); } protected virtual bool CompareJoin(JoinExpression a, JoinExpression b) { if (a.JoinType != b.JoinType) return false; if (!Compare(a.Left, b.Left)) return false; if (a.JoinType == JoinType.CrossApply || a.JoinType == JoinType.OuterApply) { using (AliasScope()) { MapAliases(a.Left, b.Left); return Compare(a.Right, b.Right) && Compare(a.Condition, b.Condition); } } else { if (!Compare(a.Left, b.Left)) return false; if (!Compare(a.Right, b.Right)) return false; using (AliasScope()) { MapAliases(a.Left, b.Left); MapAliases(a.Right, b.Right); return Compare(a.Condition, b.Condition); } } } protected virtual bool CompareSetOperator(SetOperatorExpression a, SetOperatorExpression b) { if (a.Operator != b.Operator) return false; if (!CompareAlias(a.Alias, b.Alias)) return false; if (!Compare(a.Left, b.Left)) return false; if (!Compare(a.Right, b.Right)) return false; return true; } protected virtual bool CompareProjection(ProjectionExpression a, ProjectionExpression b) { if (a.UniqueFunction != b.UniqueFunction) return false; if (!Compare(a.Select, b.Select)) return false; using (AliasScope()) { MapAliases(a.Select, b.Select); return Compare(a.Projector, b.Projector); } } private bool CompareChildProjection(ChildProjectionExpression a, ChildProjectionExpression b) { return Compare(a.Projection, b.Projection) && Compare(a.OuterKey, b.OuterKey) && a.IsLazyMList == b.IsLazyMList; } protected virtual bool CompareAggregate(AggregateExpression a, AggregateExpression b) { return a.AggregateFunction == b.AggregateFunction && CompareList(a.Arguments, b.Arguments, Compare); } protected virtual bool CompareAggregateSubquery(AggregateRequestsExpression a, AggregateRequestsExpression b) { return Compare(a.Aggregate, b.Aggregate) && a.GroupByAlias == b.GroupByAlias; } protected virtual bool CompareSqlCast(SqlCastExpression a, SqlCastExpression b) { return a.DbType.Equals(b.DbType) && Compare(a.Expression, b.Expression); } protected virtual bool CompareSqlFunction(SqlFunctionExpression a, SqlFunctionExpression b) { return a.SqlFunction == b.SqlFunction && Compare(a.Object, b.Object) && CompareList(a.Arguments, b.Arguments, Compare); } private bool CompareTableValuedSqlFunction(SqlTableValuedFunctionExpression a, SqlTableValuedFunctionExpression b) { return a.ViewTable == b.ViewTable && CompareAlias(a.Alias, b.Alias) && CompareList(a.Arguments, b.Arguments, Compare); } protected virtual bool CompareSqlConstant(SqlConstantExpression a, SqlConstantExpression b) { return object.Equals(a.Value, b.Value); } protected virtual bool CompareSqlLiteral(SqlLiteralExpression a, SqlLiteralExpression b) { return a.Value == b.Value; } protected virtual bool CompareCase(CaseExpression a, CaseExpression b) { return CompareList(a.Whens, b.Whens, CompareWhen) && Compare(a.DefaultValue, b.DefaultValue); } protected virtual bool CompareWhen(When a, When b) { return Compare(a.Condition, b.Condition) && Compare(a.Value, b.Value); } protected virtual bool CompareRowNumber(RowNumberExpression a, RowNumberExpression b) { return CompareList(a.OrderBy, b.OrderBy, CompareOrder); } protected virtual bool CompareLike(LikeExpression a, LikeExpression b) { return Compare(a.Expression, b.Expression) && Compare(a.Pattern, b.Pattern); } protected virtual bool CompareSubquery(SubqueryExpression a, SubqueryExpression b) { if (a.NodeType != b.NodeType) return false; return a.DbNodeType switch { DbExpressionType.Scalar => CompareScalar((ScalarExpression)a, (ScalarExpression)b), DbExpressionType.Exists => CompareExists((ExistsExpression)a, (ExistsExpression)b), DbExpressionType.In => CompareIn((InExpression)a, (InExpression)b), _ => false, }; } protected virtual bool CompareScalar(ScalarExpression a, ScalarExpression b) { return Compare(a.Select, b.Select); } protected virtual bool CompareExists(ExistsExpression a, ExistsExpression b) { return Compare(a.Select, b.Select); } protected virtual bool CompareIn(InExpression a, InExpression b) { return Compare(a.Expression, b.Expression) && Compare(a.Select, b.Select) && CompareValues(a.Values, b.Values); } protected virtual bool CompareValues(object[]? a, object[]? b) { if (a == b) return true; if (a == null || b == null) return false; return a.SequenceEqual(b); } protected virtual bool CompareIsNull(IsNullExpression a, IsNullExpression b) { return Compare(a.Expression, b.Expression); } protected virtual bool CompareIsNotNull(IsNotNullExpression a, IsNotNullExpression b) { return Compare(a.Expression, b.Expression); } protected virtual bool CompareDelete(DeleteExpression a, DeleteExpression b) { return a.Table == b.Table && a.UseHistoryTable == b.UseHistoryTable && Compare(a.Source, b.Source) && Compare(a.Where, b.Where) && a.ReturnRowCount == b.ReturnRowCount; } protected virtual bool CompareUpdate(UpdateExpression a, UpdateExpression b) { return a.Table == b.Table && a.UseHistoryTable == b.UseHistoryTable && CompareList(a.Assigments, b.Assigments, CompareAssigment) && Compare(a.Source, b.Source) && Compare(a.Where, b.Where) && a.ReturnRowCount == b.ReturnRowCount; } protected virtual bool CompareInsertSelect(InsertSelectExpression a, InsertSelectExpression b) { return a.Table == b.Table && a.UseHistoryTable == b.UseHistoryTable && CompareList(a.Assigments, b.Assigments, CompareAssigment) && Compare(a.Source, b.Source) && a.ReturnRowCount == b.ReturnRowCount; } protected virtual bool CompareAssigment(ColumnAssignment a, ColumnAssignment b) { return a.Column == b.Column && Compare(a.Expression, b.Expression); } protected virtual bool CompareCommandAggregate(CommandAggregateExpression a, CommandAggregateExpression b) { return CompareList(a.Commands, b.Commands, Compare); } protected virtual bool CompareEntityInit(EntityExpression a, EntityExpression b) { return a.Table == b.Table && CompareAlias(a.TableAlias, b.TableAlias) && Compare(a.ExternalId, b.ExternalId) && CompareList(a.Bindings, b.Bindings, CompareFieldBinding) && CompareList(a.Mixins, b.Mixins, CompareMixinFieldInit); } protected virtual bool CompareEmbeddedFieldInit(EmbeddedEntityExpression a, EmbeddedEntityExpression b) { return Compare(a.HasValue, b.HasValue) && a.FieldEmbedded == b.FieldEmbedded && CompareList(a.Bindings, b.Bindings, CompareFieldBinding); } protected virtual bool CompareMixinFieldInit(MixinEntityExpression a, MixinEntityExpression b) { return a.FieldMixin == b.FieldMixin && CompareList(a.Bindings, b.Bindings, CompareFieldBinding); } protected virtual bool CompareFieldBinding(FieldBinding a, FieldBinding b) { return ReflectionTools.FieldEquals(a.FieldInfo, b.FieldInfo) && Compare(a.Binding, b.Binding); } protected virtual bool CompareImplementedBy(ImplementedByExpression a, ImplementedByExpression b) { return CompareDictionaries(a.Implementations, b.Implementations, Compare); } protected virtual bool CompareImplementedByAll(ImplementedByAllExpression a, ImplementedByAllExpression b) { return Compare(a.TypeId, b.TypeId) && Compare(a.Id, b.Id); } protected virtual bool CompareLiteReference(LiteReferenceExpression a, LiteReferenceExpression b) { return Compare(a.Reference, b.Reference) && Compare(a.CustomToStr, b.CustomToStr); } protected virtual bool CompareLiteValue(LiteValueExpression a, LiteValueExpression b) { return Compare(a.Id, b.Id) && Compare(a.ToStr, b.ToStr) && Compare(a.TypeId, b.TypeId); } protected virtual bool CompareTypeFieldInit(TypeEntityExpression a, TypeEntityExpression b) { return a.TypeValue == b.TypeValue && Compare(a.ExternalId, b.ExternalId); } protected virtual bool CompareTypeImplementedBy(TypeImplementedByExpression a, TypeImplementedByExpression b) { return CompareDictionaries(a.TypeImplementations, b.TypeImplementations, Compare); } protected virtual bool CompareTypeImplementedByAll(TypeImplementedByAllExpression a, TypeImplementedByAllExpression b) { return Compare(a.TypeColumn, b.TypeColumn); } protected virtual bool CompareMList(MListExpression a, MListExpression b) { return a.TableMList == b.TableMList && Compare(a.BackID, b.BackID); } protected virtual bool CompareMList(AdditionalFieldExpression a, AdditionalFieldExpression b) { return a.Route.Equals(b.Route) && Compare(a.BackID, b.BackID); } protected virtual bool CompareMListElement(MListElementExpression a, MListElementExpression b) { return a.Table == b.Table && Compare(a.RowId, b.RowId) && Compare(a.Element, b.Element) && Compare(a.Order, b.Order) && Compare(a.Parent, b.Parent); } protected virtual bool ComparePrimaryKey(PrimaryKeyExpression a, PrimaryKeyExpression b) { return Compare(a.Value, b.Value); } public static new IEqualityComparer<E> GetComparer<E>(bool checkParameterNames) where E : Expression { return new DbExpressionsEqualityComparer<E>(checkParameterNames); } class DbExpressionsEqualityComparer<E> : IEqualityComparer<E> where E : Expression { bool checkParameterNames; public DbExpressionsEqualityComparer(bool checkParameterNames) { this.checkParameterNames = checkParameterNames; } public bool Equals(E? x, E? y) { return DbExpressionComparer.AreEqual(x, y, checkParameterNames: this.checkParameterNames); } public int GetHashCode(E obj) { return obj.Type.GetHashCode() ^ obj.NodeType.GetHashCode() ^ SpacialHash(obj); } private static int SpacialHash(Expression obj) { if (obj is MethodCallExpression mce) return mce.Method.Name.GetHashCode(); if (obj is MemberExpression me) return me.Member.Name.GetHashCode(); return 0; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace VanillaJq.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // using System; using System.Collections.Generic; using System.Text; using System.Drawing; using Autodesk.Revit.DB; using System.Windows.Forms; using Point = System.Drawing.Point; namespace Revit.SDK.Samples.NewHostedSweep.CS { /// <summary> /// This class is intent to convenience the geometry transformations. /// It can produce rotation and scale transformations. /// </summary> public class TrackBall { /// <summary> /// Canvas width. /// </summary> private float m_canvasWidth; /// <summary> /// Canvas height. /// </summary> private float m_canvasHeight; /// <summary> /// Previous position in 2D. /// </summary> private Point m_previousPosition2D; /// <summary> /// Previous position in 3D. /// </summary> private XYZ m_previousPosition3D; /// <summary> /// Current rotation transform. /// </summary> private Transform m_rotation = Transform.Identity; /// <summary> /// Current scale transform. /// </summary> private double m_scale; /// <summary> /// Current rotation transform. /// </summary> public Transform Rotation { get { return m_rotation; } set { m_rotation = value; } } /// <summary> /// Current scale transform. /// </summary> public double Scale { get { return m_scale; } set { m_scale = value; } } /// <summary> /// Project canvas 2D point to the track ball. /// </summary> /// <param name="width">Canvas width</param> /// <param name="height">Canvas height</param> /// <param name="point">2D point</param> /// <returns>Projected point in track ball</returns> private XYZ ProjectToTrackball(double width, double height, Point point) { double x = point.X / (width / 2); // Scale so bounds map to [0,0] - [2,2] double y = point.Y / (height / 2); x = x - 1; // Translate 0,0 to the center y = 1 - y; // Flip so +Y is up instead of down double d, t, z; d = Math.Sqrt(x * x + y * y); if (d < 0.70710678118654752440) { /* Inside sphere */ z = Math.Sqrt(1 - d * d); } else { /* On hyperbola */ t = 1 / 1.41421356237309504880; z = t * t / d; } return new XYZ (x, y, z); } /// <summary> /// Yield the rotation transform according to current 2D point in canvas. /// </summary> /// <param name="currentPosition">2D point in canvas</param> private void Track(Point currentPosition) { XYZ currentPosition3D = ProjectToTrackball( m_canvasWidth, m_canvasHeight, currentPosition); XYZ axis = m_previousPosition3D.CrossProduct(currentPosition3D); if (axis.GetLength() == 0) return; double angle = m_previousPosition3D.AngleTo(currentPosition3D); m_rotation = Transform.get_Rotation(XYZ.Zero, axis, -angle); m_previousPosition3D = currentPosition3D; } /// <summary> /// Yield the scale transform according to current 2D point in canvas. /// </summary> /// <param name="currentPosition">2D point in canvas</param> private void Zoom(Point currentPosition) { double yDelta = currentPosition.Y - m_previousPosition2D.Y; double scale = Math.Exp(yDelta / 100); // e^(yDelta/100) is fairly arbitrary. m_scale = scale; } /// <summary> /// Mouse down, initialize the transformation to identity. /// </summary> /// <param name="width">Canvas width</param> /// <param name="height">Canvas height</param> /// <param name="e"></param> public void OnMouseDown(float width, float height, MouseEventArgs e) { m_rotation = Transform.Identity; m_scale = 1.0; m_canvasWidth = width; m_canvasHeight = height; m_previousPosition2D = e.Location; m_previousPosition3D = ProjectToTrackball(m_canvasWidth, m_canvasHeight, m_previousPosition2D); } /// <summary> /// Mouse move with left button press will yield the rotation transform, /// with right button press will yield scale transform. /// </summary> /// <param name="e"></param> public void OnMouseMove(MouseEventArgs e) { Point currentPosition = e.Location; // avoid any zero axis conditions if (currentPosition == m_previousPosition2D) return; // Prefer tracking to zooming if both buttons are pressed. if (e.Button == MouseButtons.Left) { Track(currentPosition); } else if (e.Button == MouseButtons.Right) { Zoom(currentPosition); } m_previousPosition2D = currentPosition; } /// <summary> /// Arrows key down will also yield the rotation transform. /// </summary> /// <param name="e"></param> public void OnKeyDown(KeyEventArgs e) { XYZ axis = new XYZ (1.0, 0, 0); double angle = 0.1; switch(e.KeyCode) { case Keys.Down: break; case Keys.Up: angle = -angle; break; case Keys.Left: axis = new XYZ (0, 1.0, 0); angle = -angle; break; case Keys.Right: axis = new XYZ (0, 1.0, 0); break; default: break; } m_rotation = Transform.get_Rotation(XYZ.Zero, axis, angle); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Reflection; namespace System.ComponentModel.DataAnnotations { /// <summary> /// Cache of <see cref="ValidationAttribute" />s /// </summary> /// <remarks> /// This internal class serves as a cache of validation attributes and [Display] attributes. /// It exists both to help performance as well as to abstract away the differences between /// Reflection and TypeDescriptor. /// </remarks> internal class ValidationAttributeStore { private static readonly ValidationAttributeStore _singleton = new ValidationAttributeStore(); private readonly Dictionary<Type, TypeStoreItem> _typeStoreItems = new Dictionary<Type, TypeStoreItem>(); /// <summary> /// Gets the singleton <see cref="ValidationAttributeStore" /> /// </summary> internal static ValidationAttributeStore Instance { get { return _singleton; } } /// <summary> /// Retrieves the type level validation attributes for the given type. /// </summary> /// <param name="validationContext">The context that describes the type. It cannot be null.</param> /// <returns>The collection of validation attributes. It could be empty.</returns> internal IEnumerable<ValidationAttribute> GetTypeValidationAttributes(ValidationContext validationContext) { EnsureValidationContext(validationContext); var item = GetTypeStoreItem(validationContext.ObjectType); return item.ValidationAttributes; } /// <summary> /// Retrieves the <see cref="DisplayAttribute" /> associated with the given type. It may be null. /// </summary> /// <param name="validationContext">The context that describes the type. It cannot be null.</param> /// <returns>The display attribute instance, if present.</returns> internal DisplayAttribute GetTypeDisplayAttribute(ValidationContext validationContext) { EnsureValidationContext(validationContext); var item = GetTypeStoreItem(validationContext.ObjectType); return item.DisplayAttribute; } /// <summary> /// Retrieves the set of validation attributes for the property /// </summary> /// <param name="validationContext">The context that describes the property. It cannot be null.</param> /// <returns>The collection of validation attributes. It could be empty.</returns> internal IEnumerable<ValidationAttribute> GetPropertyValidationAttributes(ValidationContext validationContext) { EnsureValidationContext(validationContext); var typeItem = GetTypeStoreItem(validationContext.ObjectType); var item = typeItem.GetPropertyStoreItem(validationContext.MemberName); return item.ValidationAttributes; } /// <summary> /// Retrieves the <see cref="DisplayAttribute" /> associated with the given property /// </summary> /// <param name="validationContext">The context that describes the property. It cannot be null.</param> /// <returns>The display attribute instance, if present.</returns> internal DisplayAttribute GetPropertyDisplayAttribute(ValidationContext validationContext) { EnsureValidationContext(validationContext); var typeItem = GetTypeStoreItem(validationContext.ObjectType); var item = typeItem.GetPropertyStoreItem(validationContext.MemberName); return item.DisplayAttribute; } /// <summary> /// Retrieves the Type of the given property. /// </summary> /// <param name="validationContext">The context that describes the property. It cannot be null.</param> /// <returns>The type of the specified property</returns> internal Type GetPropertyType(ValidationContext validationContext) { EnsureValidationContext(validationContext); var typeItem = GetTypeStoreItem(validationContext.ObjectType); var item = typeItem.GetPropertyStoreItem(validationContext.MemberName); return item.PropertyType; } /// <summary> /// Determines whether or not a given <see cref="ValidationContext" />'s /// <see cref="ValidationContext.MemberName" /> references a property on /// the <see cref="ValidationContext.ObjectType" />. /// </summary> /// <param name="validationContext">The <see cref="ValidationContext" /> to check.</param> /// <returns><c>true</c> when the <paramref name="validationContext" /> represents a property, <c>false</c> otherwise.</returns> internal bool IsPropertyContext(ValidationContext validationContext) { EnsureValidationContext(validationContext); var typeItem = GetTypeStoreItem(validationContext.ObjectType); PropertyStoreItem item; return typeItem.TryGetPropertyStoreItem(validationContext.MemberName, out item); } /// <summary> /// Retrieves or creates the store item for the given type /// </summary> /// <param name="type">The type whose store item is needed. It cannot be null</param> /// <returns>The type store item. It will not be null.</returns> private TypeStoreItem GetTypeStoreItem(Type type) { if (type == null) { throw new ArgumentNullException("type"); } lock (_typeStoreItems) { TypeStoreItem item = null; if (!_typeStoreItems.TryGetValue(type, out item)) { // use CustomAttributeExtensions.GetCustomAttributes() to get inherited attributes as well as direct ones var attributes = CustomAttributeExtensions.GetCustomAttributes(type.GetTypeInfo(), true); item = new TypeStoreItem(type, attributes); _typeStoreItems[type] = item; } return item; } } /// <summary> /// Throws an ArgumentException of the validation context is null /// </summary> /// <param name="validationContext">The context to check</param> private static void EnsureValidationContext(ValidationContext validationContext) { if (validationContext == null) { throw new ArgumentNullException("validationContext"); } } internal static bool IsPublic(PropertyInfo p) { return (p.GetMethod != null && p.GetMethod.IsPublic) || (p.SetMethod != null && p.SetMethod.IsPublic); } internal static bool IsStatic(PropertyInfo p) { return (p.GetMethod != null && p.GetMethod.IsStatic) || (p.SetMethod != null && p.SetMethod.IsStatic); } /// <summary> /// Private abstract class for all store items /// </summary> private abstract class StoreItem { private readonly IEnumerable<ValidationAttribute> _validationAttributes; internal StoreItem(IEnumerable<Attribute> attributes) { _validationAttributes = attributes.OfType<ValidationAttribute>(); DisplayAttribute = attributes.OfType<DisplayAttribute>().SingleOrDefault(); } internal IEnumerable<ValidationAttribute> ValidationAttributes { get { return _validationAttributes; } } internal DisplayAttribute DisplayAttribute { get; set; } } /// <summary> /// Private class to store data associated with a type /// </summary> private class TypeStoreItem : StoreItem { private readonly object _syncRoot = new object(); private readonly Type _type; private Dictionary<string, PropertyStoreItem> _propertyStoreItems; internal TypeStoreItem(Type type, IEnumerable<Attribute> attributes) : base(attributes) { _type = type; } internal PropertyStoreItem GetPropertyStoreItem(string propertyName) { PropertyStoreItem item = null; if (!TryGetPropertyStoreItem(propertyName, out item)) { throw new ArgumentException( string.Format(CultureInfo.CurrentCulture, SR.AttributeStore_Unknown_Property, _type.Name, propertyName), "propertyName"); } return item; } internal bool TryGetPropertyStoreItem(string propertyName, out PropertyStoreItem item) { if (string.IsNullOrEmpty(propertyName)) { throw new ArgumentNullException("propertyName"); } if (_propertyStoreItems == null) { lock (_syncRoot) { if (_propertyStoreItems == null) { _propertyStoreItems = CreatePropertyStoreItems(); } } } return _propertyStoreItems.TryGetValue(propertyName, out item); } private Dictionary<string, PropertyStoreItem> CreatePropertyStoreItems() { var propertyStoreItems = new Dictionary<string, PropertyStoreItem>(); // exclude index properties to match old TypeDescriptor functionality var properties = _type.GetRuntimeProperties() .Where(prop => IsPublic(prop) && !prop.GetIndexParameters().Any()); foreach (PropertyInfo property in properties) { // use CustomAttributeExtensions.GetCustomAttributes() to get inherited attributes as well as direct ones var item = new PropertyStoreItem(property.PropertyType, CustomAttributeExtensions.GetCustomAttributes(property, true)); propertyStoreItems[property.Name] = item; } return propertyStoreItems; } } /// <summary> /// Private class to store data associated with a property /// </summary> private class PropertyStoreItem : StoreItem { private readonly Type _propertyType; internal PropertyStoreItem(Type propertyType, IEnumerable<Attribute> attributes) : base(attributes) { _propertyType = propertyType; } internal Type PropertyType { get { return _propertyType; } } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/speech/v1beta1/cloud_speech.proto // Original file comments: // Copyright 2016 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #region Designer generated code using System; using System.Threading; using System.Threading.Tasks; using Grpc.Core; namespace Google.Cloud.Speech.V1Beta1 { /// <summary> /// Service that implements Google Cloud Speech API. /// </summary> public static class Speech { static readonly string __ServiceName = "google.cloud.speech.v1beta1.Speech"; static readonly Marshaller<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest> __Marshaller_SyncRecognizeRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest.Parser.ParseFrom); static readonly Marshaller<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse> __Marshaller_SyncRecognizeResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse.Parser.ParseFrom); static readonly Marshaller<global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest> __Marshaller_AsyncRecognizeRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest.Parser.ParseFrom); static readonly Marshaller<global::Google.LongRunning.Operation> __Marshaller_Operation = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.LongRunning.Operation.Parser.ParseFrom); static readonly Marshaller<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest> __Marshaller_StreamingRecognizeRequest = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest.Parser.ParseFrom); static readonly Marshaller<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse> __Marshaller_StreamingRecognizeResponse = Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse.Parser.ParseFrom); static readonly Method<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse> __Method_SyncRecognize = new Method<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse>( MethodType.Unary, __ServiceName, "SyncRecognize", __Marshaller_SyncRecognizeRequest, __Marshaller_SyncRecognizeResponse); static readonly Method<global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest, global::Google.LongRunning.Operation> __Method_AsyncRecognize = new Method<global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest, global::Google.LongRunning.Operation>( MethodType.Unary, __ServiceName, "AsyncRecognize", __Marshaller_AsyncRecognizeRequest, __Marshaller_Operation); static readonly Method<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse> __Method_StreamingRecognize = new Method<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse>( MethodType.DuplexStreaming, __ServiceName, "StreamingRecognize", __Marshaller_StreamingRecognizeRequest, __Marshaller_StreamingRecognizeResponse); /// <summary>Service descriptor</summary> public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor { get { return global::Google.Cloud.Speech.V1Beta1.CloudSpeechReflection.Descriptor.Services[0]; } } /// <summary>Base class for server-side implementations of Speech</summary> public abstract class SpeechBase { /// <summary> /// Perform synchronous speech-recognition: receive results after all audio /// has been sent and processed. /// </summary> public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse> SyncRecognize(global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest request, ServerCallContext context) { throw new RpcException(new Status(StatusCode.Unimplemented, "")); } /// <summary> /// Perform asynchronous speech-recognition: receive results via the /// google.longrunning.Operations interface. Returns either an /// `Operation.error` or an `Operation.response` which contains /// an `AsyncRecognizeResponse` message. /// </summary> public virtual global::System.Threading.Tasks.Task<global::Google.LongRunning.Operation> AsyncRecognize(global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest request, ServerCallContext context) { throw new RpcException(new Status(StatusCode.Unimplemented, "")); } /// <summary> /// Perform bidirectional streaming speech-recognition: receive results while /// sending audio. This method is only available via the gRPC API (not REST). /// </summary> public virtual global::System.Threading.Tasks.Task StreamingRecognize(IAsyncStreamReader<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest> requestStream, IServerStreamWriter<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse> responseStream, ServerCallContext context) { throw new RpcException(new Status(StatusCode.Unimplemented, "")); } } /// <summary>Client for Speech</summary> public class SpeechClient : ClientBase<SpeechClient> { /// <summary>Creates a new client for Speech</summary> /// <param name="channel">The channel to use to make remote calls.</param> public SpeechClient(Channel channel) : base(channel) { } /// <summary>Creates a new client for Speech that uses a custom <c>CallInvoker</c>.</summary> /// <param name="callInvoker">The callInvoker to use to make remote calls.</param> public SpeechClient(CallInvoker callInvoker) : base(callInvoker) { } /// <summary>Protected parameterless constructor to allow creation of test doubles.</summary> protected SpeechClient() : base() { } /// <summary>Protected constructor to allow creation of configured clients.</summary> /// <param name="configuration">The client configuration.</param> protected SpeechClient(ClientBaseConfiguration configuration) : base(configuration) { } /// <summary> /// Perform synchronous speech-recognition: receive results after all audio /// has been sent and processed. /// </summary> public virtual global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse SyncRecognize(global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return SyncRecognize(request, new CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Perform synchronous speech-recognition: receive results after all audio /// has been sent and processed. /// </summary> public virtual global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse SyncRecognize(global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest request, CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_SyncRecognize, null, options, request); } /// <summary> /// Perform synchronous speech-recognition: receive results after all audio /// has been sent and processed. /// </summary> public virtual AsyncUnaryCall<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse> SyncRecognizeAsync(global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return SyncRecognizeAsync(request, new CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Perform synchronous speech-recognition: receive results after all audio /// has been sent and processed. /// </summary> public virtual AsyncUnaryCall<global::Google.Cloud.Speech.V1Beta1.SyncRecognizeResponse> SyncRecognizeAsync(global::Google.Cloud.Speech.V1Beta1.SyncRecognizeRequest request, CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_SyncRecognize, null, options, request); } /// <summary> /// Perform asynchronous speech-recognition: receive results via the /// google.longrunning.Operations interface. Returns either an /// `Operation.error` or an `Operation.response` which contains /// an `AsyncRecognizeResponse` message. /// </summary> public virtual global::Google.LongRunning.Operation AsyncRecognize(global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return AsyncRecognize(request, new CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Perform asynchronous speech-recognition: receive results via the /// google.longrunning.Operations interface. Returns either an /// `Operation.error` or an `Operation.response` which contains /// an `AsyncRecognizeResponse` message. /// </summary> public virtual global::Google.LongRunning.Operation AsyncRecognize(global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest request, CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_AsyncRecognize, null, options, request); } /// <summary> /// Perform asynchronous speech-recognition: receive results via the /// google.longrunning.Operations interface. Returns either an /// `Operation.error` or an `Operation.response` which contains /// an `AsyncRecognizeResponse` message. /// </summary> public virtual AsyncUnaryCall<global::Google.LongRunning.Operation> AsyncRecognizeAsync(global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest request, Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return AsyncRecognizeAsync(request, new CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Perform asynchronous speech-recognition: receive results via the /// google.longrunning.Operations interface. Returns either an /// `Operation.error` or an `Operation.response` which contains /// an `AsyncRecognizeResponse` message. /// </summary> public virtual AsyncUnaryCall<global::Google.LongRunning.Operation> AsyncRecognizeAsync(global::Google.Cloud.Speech.V1Beta1.AsyncRecognizeRequest request, CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_AsyncRecognize, null, options, request); } /// <summary> /// Perform bidirectional streaming speech-recognition: receive results while /// sending audio. This method is only available via the gRPC API (not REST). /// </summary> public virtual AsyncDuplexStreamingCall<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse> StreamingRecognize(Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return StreamingRecognize(new CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Perform bidirectional streaming speech-recognition: receive results while /// sending audio. This method is only available via the gRPC API (not REST). /// </summary> public virtual AsyncDuplexStreamingCall<global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeRequest, global::Google.Cloud.Speech.V1Beta1.StreamingRecognizeResponse> StreamingRecognize(CallOptions options) { return CallInvoker.AsyncDuplexStreamingCall(__Method_StreamingRecognize, null, options); } protected override SpeechClient NewInstance(ClientBaseConfiguration configuration) { return new SpeechClient(configuration); } } /// <summary>Creates service definition that can be registered with a server</summary> public static ServerServiceDefinition BindService(SpeechBase serviceImpl) { return ServerServiceDefinition.CreateBuilder() .AddMethod(__Method_SyncRecognize, serviceImpl.SyncRecognize) .AddMethod(__Method_AsyncRecognize, serviceImpl.AsyncRecognize) .AddMethod(__Method_StreamingRecognize, serviceImpl.StreamingRecognize).Build(); } } } #endregion
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Net; using System.Text; using System.Threading.Tasks; using Azure; using Azure.Data.Tables; using Microsoft.Extensions.Logging; using Orleans.AzureUtils.Utilities; using Orleans.Reminders.AzureStorage; namespace Orleans.Runtime.ReminderService { internal class ReminderTableEntry : ITableEntity { public string GrainReference { get; set; } // Part of RowKey public string ReminderName { get; set; } // Part of RowKey public string ServiceId { get; set; } // Part of PartitionKey public string DeploymentId { get; set; } public string StartAt { get; set; } public string Period { get; set; } public string GrainRefConsistentHash { get; set; } // Part of PartitionKey public string PartitionKey { get; set; } public string RowKey { get; set; } public DateTimeOffset? Timestamp { get; set; } public ETag ETag { get; set; } public static string ConstructRowKey(GrainReference grainRef, string reminderName) { var key = string.Format("{0}-{1}", grainRef.ToKeyString(), reminderName); return AzureTableUtils.SanitizeTableProperty(key); } public static (string LowerBound, string UpperBound) ConstructRowKeyBounds(GrainReference grainRef) { var baseKey = AzureTableUtils.SanitizeTableProperty(grainRef.ToKeyString()); return (baseKey + '-', baseKey + (char)('-' + 1)); } public static string ConstructPartitionKey(string serviceId, GrainReference grainRef) { return ConstructPartitionKey(serviceId, grainRef.GetUniformHashCode()); } public static string ConstructPartitionKey(string serviceId, uint number) { // IMPORTANT NOTE: Other code using this return data is very sensitive to format changes, // so take great care when making any changes here!!! // this format of partition key makes sure that the comparisons in FindReminderEntries(begin, end) work correctly // the idea is that when converting to string, negative numbers start with 0, and positive start with 1. Now, // when comparisons will be done on strings, this will ensure that positive numbers are always greater than negative // string grainHash = number < 0 ? string.Format("0{0}", number.ToString("X")) : string.Format("1{0:d16}", number); return AzureTableUtils.SanitizeTableProperty($"{serviceId}_{number:X8}"); } public static (string LowerBound, string UpperBound) ConstructPartitionKeyBounds(string serviceId) { var baseKey = AzureTableUtils.SanitizeTableProperty(serviceId); return (baseKey + '_', baseKey + (char)('_' + 1)); } public override string ToString() { var sb = new StringBuilder(); sb.Append("Reminder ["); sb.Append(" PartitionKey=").Append(PartitionKey); sb.Append(" RowKey=").Append(RowKey); sb.Append(" GrainReference=").Append(GrainReference); sb.Append(" ReminderName=").Append(ReminderName); sb.Append(" Deployment=").Append(DeploymentId); sb.Append(" ServiceId=").Append(ServiceId); sb.Append(" StartAt=").Append(StartAt); sb.Append(" Period=").Append(Period); sb.Append(" GrainRefConsistentHash=").Append(GrainRefConsistentHash); sb.Append("]"); return sb.ToString(); } } internal class RemindersTableManager : AzureTableDataManager<ReminderTableEntry> { public string ServiceId { get; private set; } public string ClusterId { get; private set; } public static async Task<RemindersTableManager> GetManager(string serviceId, string clusterId, ILoggerFactory loggerFactory, AzureStorageOperationOptions options) { var singleton = new RemindersTableManager(serviceId, clusterId, options, loggerFactory); try { singleton.Logger.Info("Creating RemindersTableManager for service id {0} and clusterId {1}.", serviceId, clusterId); await singleton.InitTableAsync(); } catch (Exception ex) { string errorMsg = $"Exception trying to create or connect to the Azure table: {ex.Message}"; singleton.Logger.Error((int)AzureReminderErrorCode.AzureTable_39, errorMsg, ex); throw new OrleansException(errorMsg, ex); } return singleton; } private RemindersTableManager( string serviceId, string clusterId, AzureStorageOperationOptions options, ILoggerFactory loggerFactory) : base(options, loggerFactory.CreateLogger<RemindersTableManager>()) { ClusterId = clusterId; ServiceId = serviceId; } internal async Task<List<(ReminderTableEntry Entity, string ETag)>> FindReminderEntries(uint begin, uint end) { string sBegin = ReminderTableEntry.ConstructPartitionKey(ServiceId, begin); string sEnd = ReminderTableEntry.ConstructPartitionKey(ServiceId, end); string query; if (begin < end) { // Query between the specified lower and upper bounds. // Note that the lower bound is exclusive and the upper bound is inclusive in the below query. query = TableClient.CreateQueryFilter($"(PartitionKey gt {sBegin}) and (PartitionKey le {sEnd})"); } else { var (partitionKeyLowerBound, partitionKeyUpperBound) = ReminderTableEntry.ConstructPartitionKeyBounds(ServiceId); if (begin == end) { // Query the entire range query = TableClient.CreateQueryFilter($"(PartitionKey gt {partitionKeyLowerBound}) and (PartitionKey lt {partitionKeyUpperBound})"); } else { // (begin > end) // Query wraps around the ends of the range, so the query is the union of two disjunct queries // Include everything outside of the (begin, end] range, which wraps around to become: // [partitionKeyLowerBound, end] OR (begin, partitionKeyUpperBound] Debug.Assert(begin > end); query = TableClient.CreateQueryFilter($"((PartitionKey gt {partitionKeyLowerBound}) and (PartitionKey le {sEnd})) or ((PartitionKey gt {sBegin}) and (PartitionKey lt {partitionKeyUpperBound}))"); } } var queryResults = await ReadTableEntriesAndEtagsAsync(query); return queryResults.ToList(); } internal async Task<List<(ReminderTableEntry Entity, string ETag)>> FindReminderEntries(GrainReference grainRef) { var partitionKey = ReminderTableEntry.ConstructPartitionKey(ServiceId, grainRef); var (rowKeyLowerBound, rowKeyUpperBound) = ReminderTableEntry.ConstructRowKeyBounds(grainRef); var query = TableClient.CreateQueryFilter($"(PartitionKey eq {partitionKey}) and ((RowKey gt {rowKeyLowerBound}) and (RowKey le {rowKeyUpperBound}))"); var queryResults = await ReadTableEntriesAndEtagsAsync(query); return queryResults.ToList(); } internal async Task<(ReminderTableEntry Entity, string ETag)> FindReminderEntry(GrainReference grainRef, string reminderName) { string partitionKey = ReminderTableEntry.ConstructPartitionKey(ServiceId, grainRef); string rowKey = ReminderTableEntry.ConstructRowKey(grainRef, reminderName); return await ReadSingleTableEntryAsync(partitionKey, rowKey); } private Task<List<(ReminderTableEntry Entity, string ETag)>> FindAllReminderEntries() { return FindReminderEntries(0, 0); } internal async Task<string> UpsertRow(ReminderTableEntry reminderEntry) { try { return await UpsertTableEntryAsync(reminderEntry); } catch(Exception exc) { HttpStatusCode httpStatusCode; string restStatus; if (AzureTableUtils.EvaluateException(exc, out httpStatusCode, out restStatus)) { if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("UpsertRow failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus); if (AzureTableUtils.IsContentionError(httpStatusCode)) return null; // false; } throw; } } internal async Task<bool> DeleteReminderEntryConditionally(ReminderTableEntry reminderEntry, string eTag) { try { await DeleteTableEntryAsync(reminderEntry, eTag); return true; }catch(Exception exc) { HttpStatusCode httpStatusCode; string restStatus; if (AzureTableUtils.EvaluateException(exc, out httpStatusCode, out restStatus)) { if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("DeleteReminderEntryConditionally failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus); if (AzureTableUtils.IsContentionError(httpStatusCode)) return false; } throw; } } internal async Task DeleteTableEntries() { List<(ReminderTableEntry Entity, string ETag)> entries = await FindAllReminderEntries(); // return manager.DeleteTableEntries(entries); // this doesnt work as entries can be across partitions, which is not allowed // group by grain hashcode so each query goes to different partition var tasks = new List<Task>(); var groupedByHash = entries .Where(tuple => tuple.Entity.ServiceId.Equals(ServiceId)) .Where(tuple => tuple.Entity.DeploymentId.Equals(ClusterId)) // delete only entries that belong to our DeploymentId. .GroupBy(x => x.Entity.GrainRefConsistentHash).ToDictionary(g => g.Key, g => g.ToList()); foreach (var entriesPerPartition in groupedByHash.Values) { foreach (var batch in entriesPerPartition.BatchIEnumerable(this.StoragePolicyOptions.MaxBulkUpdateRows)) { tasks.Add(DeleteTableEntriesAsync(batch)); } } await Task.WhenAll(tasks); } } }
// Copyright (c) 2010-2013 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // Extract from MonoDevelop.TextTemplating engine. // We are only using the tokenizer here and a simplified T4 implementation. // Tokeniser.cs // // Author: // Michael Hutchinson <mhutchinson@novell.com> // // Copyright (c) 2009 Novell, Inc. (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; namespace SharpGen.TextTemplating { public struct Location : IEquatable<Location> { public Location(string fileName, int line, int column) : this() { FileName = fileName; Column = column; Line = line; } public int Line { get; private set; } public int Column { get; private set; } public string FileName { get; private set; } public static Location Empty { get { return new Location(null, -1, -1); } } public Location AddLine() { return new Location(this.FileName, this.Line + 1, 1); } public Location AddCol() { return AddCols(1); } public Location AddCols(int number) { return new Location(this.FileName, this.Line, this.Column + number); } public override string ToString() { return string.Format(System.Globalization.CultureInfo.InvariantCulture, "[{0} ({1},{2})]", FileName, Line, Column); } public bool Equals(Location other) { return other.Line == Line && other.Column == Column && other.FileName == FileName; } } public class Tokeniser { string content; int position = 0; string value; State nextState = State.Content; Location nextStateLocation; Location nextStateTagStartLocation; public Tokeniser(string fileName, string content) { State = State.Content; this.content = content; this.Location = this.nextStateLocation = this.nextStateTagStartLocation = new Location(fileName, 1, 1); } public bool Advance() { value = null; State = nextState; Location = nextStateLocation; TagStartLocation = nextStateTagStartLocation; if (nextState == State.EOF) return false; nextState = GetNextStateAndCurrentValue(); return true; } State GetNextStateAndCurrentValue() { switch (State) { case State.Block: case State.Expression: case State.Helper: return GetBlockEnd(); case State.Directive: return NextStateInDirective(); case State.Content: return NextStateInContent(); case State.DirectiveName: return GetDirectiveName(); case State.DirectiveValue: return GetDirectiveValue(); default: throw new InvalidOperationException("Unexpected state '" + State.ToString() + "'"); } } State GetBlockEnd() { int start = position; for (; position < content.Length; position++) { char c = content[position]; nextStateTagStartLocation = nextStateLocation; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (c == '>' && content[position - 1] == '#' && content[position - 2] != '\\') { value = content.Substring(start, position - start - 1); position++; TagEndLocation = nextStateLocation; //skip newlines directly after blocks, unless they're expressions if (State != State.Expression && (position += IsNewLine()) > 0) { nextStateLocation = nextStateLocation.AddLine(); } return State.Content; } } throw new ParserException("Unexpected end of file.", nextStateLocation); } State GetDirectiveName() { int start = position; for (; position < content.Length; position++) { char c = content[position]; if (!Char.IsLetterOrDigit(c)) { value = content.Substring(start, position - start); return State.Directive; } else { nextStateLocation = nextStateLocation.AddCol(); } } throw new ParserException("Unexpected end of file.", nextStateLocation); } State GetDirectiveValue() { int start = position; int delimiter = '\0'; for (; position < content.Length; position++) { char c = content[position]; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') nextStateLocation = nextStateLocation.AddLine(); if (delimiter == '\0') { if (c == '\'' || c == '"') { start = position; delimiter = c; } else if (!Char.IsWhiteSpace(c)) { throw new ParserException("Unexpected character '" + c + "'. Expecting attribute value.", nextStateLocation); } continue; } if (c == delimiter) { value = content.Substring(start + 1, position - start - 1); position++; return State.Directive; } } throw new ParserException("Unexpected end of file.", nextStateLocation); ; } State NextStateInContent() { int start = position; for (; position < content.Length; position++) { char c = content[position]; nextStateTagStartLocation = nextStateLocation; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (c == '<' && position + 2 < content.Length && content[position + 1] == '#') { TagEndLocation = nextStateLocation; char type = content[position + 2]; if (type == '@') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Directive; } else if (type == '=') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Expression; } else if (type == '+') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Helper; } else { value = content.Substring(start, position - start); nextStateLocation = nextStateLocation.AddCol(); position += 2; return State.Block; } } } //EOF is only valid when we're in content value = content.Substring(start); return State.EOF; } int IsNewLine() { int found = 0; if (position < content.Length && content[position] == '\r') { found++; } if (position + found < content.Length && content[position + found] == '\n') { found++; } return found; } State NextStateInDirective() { for (; position < content.Length; position++) { char c = content[position]; if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (Char.IsLetter(c)) { return State.DirectiveName; } else if (c == '=') { nextStateLocation = nextStateLocation.AddCol(); position++; return State.DirectiveValue; } else if (c == '#' && position + 1 < content.Length && content[position + 1] == '>') { position += 2; TagEndLocation = nextStateLocation.AddCols(2); nextStateLocation = nextStateLocation.AddCols(3); //skip newlines directly after directives if ((position += IsNewLine()) > 0) { nextStateLocation = nextStateLocation.AddLine(); } return State.Content; } else if (!Char.IsWhiteSpace(c)) { throw new ParserException("Directive ended unexpectedly with character '" + c + "'", nextStateLocation); } else { nextStateLocation = nextStateLocation.AddCol(); } } throw new ParserException("Unexpected end of file.", nextStateLocation); } public State State { get; private set; } public int Position { get { return position; } } public string Content { get { return content; } } public string Value { get { return value; } } public Location Location { get; private set; } public Location TagStartLocation { get; private set; } public Location TagEndLocation { get; private set; } } public enum State { Content = 0, Directive, Expression, Block, Helper, DirectiveName, DirectiveValue, Name, EOF } public class ParserException : Exception { public ParserException(string message, Location location) : base(message) { Location = location; } public Location Location { get; private set; } } }
using System; using System.Collections.Generic; using System.Linq; using Foundation; using UIKit; using MonoTouch.Dialog; using PubNubMessaging.Core; using CoreGraphics; namespace PubnubMessagingExample { public partial class Pubnub_MessagingMain : DialogViewController { PubnubProxy proxy = null; Pubnub pubnub = null; public override void ViewDidAppear (bool animated) { AppDelegate.navigation.ToolbarHidden = true; base.ViewDidAppear (animated); } public Pubnub_MessagingMain () : base (UITableViewStyle.Grouped, null) { UIView labelView = new UIView (new CGRect (0, 0, this.View.Bounds.Width, 24)); int left = 20; string hardwareVer = DeviceHardware.Version.ToString ().ToLower (); if (hardwareVer.IndexOf ("ipad") >= 0) { left = 55; } labelView.AddSubview (new UILabel (new CGRect (left, 10, this.View.Bounds.Width - left, 24)) { Font = UIFont.BoldSystemFontOfSize (16), BackgroundColor = UIColor.Clear, TextColor = UIColor.FromRGB (76, 86, 108), Text = "Basic Settings" }); var headerMultipleChannels = new UILabel (new CGRect (0, 0, this.View.Bounds.Width, 30)) { Font = UIFont.SystemFontOfSize (12), TextColor = UIColor.Brown, BackgroundColor = UIColor.Clear, LineBreakMode = UILineBreakMode.WordWrap, Lines = 0, TextAlignment = UITextAlignment.Center }; headerMultipleChannels.Text = "Enter multiple channel/channelgroup names separated by comma"; EntryElement entrySubscribeKey = new EntryElement ("Subscribe Key", "Enter Subscribe Key", "demo"); entrySubscribeKey.AutocapitalizationType = UITextAutocapitalizationType.None; entrySubscribeKey.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryPublishKey = new EntryElement ("Publish Key", "Enter Publish Key", "demo"); entryPublishKey.AutocapitalizationType = UITextAutocapitalizationType.None; entryPublishKey.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entrySecretKey = new EntryElement ("Secret Key", "Enter Secret Key", "demo"); entrySecretKey.AutocapitalizationType = UITextAutocapitalizationType.None; entrySecretKey.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryChannelName = new EntryElement ("Channel(s)", "Enter Channel Name", ""); entryChannelName.AutocapitalizationType = UITextAutocapitalizationType.None; entryChannelName.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryChannelGroupName = new EntryElement ("ChannelGroup(s)", "Enter ChannelGroup Name", ""); entryChannelGroupName.AutocapitalizationType = UITextAutocapitalizationType.None; entryChannelGroupName.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryCipher = new EntryElement ("Cipher", "Enter Cipher", ""); entryCipher.AutocapitalizationType = UITextAutocapitalizationType.None; entryCipher.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryProxyServer = new EntryElement ("Server", "Enter Server", ""); entryProxyServer.AutocapitalizationType = UITextAutocapitalizationType.None; entryProxyServer.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryProxyPort = new EntryElement ("Port", "Enter Port", ""); EntryElement entryProxyUser = new EntryElement ("Username", "Enter Username", ""); entryProxyUser.AutocapitalizationType = UITextAutocapitalizationType.None; entryProxyUser.AutocorrectionType = UITextAutocorrectionType.No; EntryElement entryProxyPassword = new EntryElement ("Password", "Enter Password", "", true); EntryElement entryCustonUuid = new EntryElement ("CustomUuid", "Enter Custom UUID", ""); entryCustonUuid.AutocapitalizationType = UITextAutocapitalizationType.None; entryCustonUuid.AutocorrectionType = UITextAutocorrectionType.No; BooleanElement proxyEnabled = new BooleanElement ("Proxy", false); BooleanElement sslEnabled = new BooleanElement ("Enable SSL", false); Root = new RootElement ("Pubnub Messaging") { new Section (labelView) { }, new Section (headerMultipleChannels) { }, new Section ("Enter Subscribe Key.") { entrySubscribeKey }, new Section ("Enter Publish key.") { entryPublishKey }, new Section ("Enter Secret key.") { entrySecretKey }, new Section () { entryChannelName, sslEnabled }, new Section(){ entryChannelGroupName }, new Section ("Enter cipher key for encryption. Leave blank for unencrypted transfer.") { entryCipher }, new Section ("Enter custom UUID or leave blank to use the default UUID") { entryCustonUuid }, new Section () { new RootElement ("Proxy Settings", 0, 0) { new Section () { proxyEnabled }, new Section ("Configuration") { entryProxyServer, entryProxyPort, entryProxyUser, entryProxyPassword }, } }, new Section () { new StyledStringElement ("Launch Example", () => { bool errorFree = true; errorFree = ValidateAndInitPubnub (entryChannelName.Value, entryChannelGroupName.Value, entryCipher.Value, sslEnabled.Value, entryCustonUuid.Value, proxyEnabled.Value, entryProxyPort.Value, entryProxyUser.Value, entryProxyServer.Value, entryProxyPassword.Value, entrySubscribeKey.Value, entryPublishKey.Value, entrySecretKey.Value ); if (errorFree) { new Pubnub_MessagingSub (entryChannelName.Value, entryChannelGroupName.Value, entryCipher.Value, sslEnabled.Value, pubnub); } }) { BackgroundColor = UIColor.Blue, TextColor = UIColor.White, Alignment = UITextAlignment.Center }, }, /*new Section() { new StyledStringElement ("Launch Speed Test", () => { bool errorFree = true; errorFree = ValidateAndInitPubnub(entryChannelName.Value, entryCipher.Value, sslEnabled.Value, entryCustonUuid.Value, proxyEnabled.Value, entryProxyPort.Value, entryProxyUser.Value, entryProxyServer.Value, entryProxyPassword.Value ); if(errorFree) { new Pubnub_MessagingSpeedTest(entryChannelName.Value, entryCipher.Value, sslEnabled.Value, pubnub); } }) { BackgroundColor = UIColor.Blue, TextColor = UIColor.White, Alignment = UITextAlignment.Center }, }*/ }; } bool ValidateAndInitPubnub (string channelName, string channelGroupName, string cipher, bool ssl, string customUuid, bool proxyEnabled, string proxyPort, string proxyUser, string proxyServer, string proxyPass, string subscribeKey, string publishKey, string secretKey ) { bool errorFree = true; if (String.IsNullOrWhiteSpace (channelName) && String.IsNullOrWhiteSpace (channelGroupName)) { errorFree = false; new UIAlertView ("Error!", "Please enter either channel name or channelgroup name or both", null, "OK").Show (); } if (errorFree) { pubnub = new Pubnub (publishKey, subscribeKey, secretKey, cipher, ssl); if (!String.IsNullOrWhiteSpace (customUuid.Trim ())) { pubnub.SessionUUID = customUuid.Trim (); } } if ((errorFree) && (proxyEnabled)) { int port; if (Int32.TryParse (proxyPort, out port) && ((port >= 1) && (port <= 65535))) { proxy = new PubnubProxy (); proxy.ProxyServer = proxyServer; proxy.ProxyPort = port; proxy.ProxyUserName = proxyUser; proxy.ProxyPassword = proxyPass; try { pubnub.Proxy = proxy; } catch (MissingMemberException mse) { errorFree = false; proxyEnabled = false; Console.WriteLine (mse.Message); new UIAlertView ("Error!", "Proxy settings invalid, please re-enter the details.", null, "OK").Show (); } } else { errorFree = false; new UIAlertView ("Error!", "Proxy port must be a valid integer between 1 to 65535", null, "OK").Show (); } } return errorFree; } } }
//------------------------------------------------------------------------------ // <copyright file="BitVector32.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Collections.Specialized { using System.Diagnostics; using System.Text; using System; using Microsoft.Win32; /// <devdoc> /// <para>Provides a simple light bit vector with easy integer or Boolean access to /// a 32 bit storage.</para> /// </devdoc> public struct BitVector32 { private uint data; /// <devdoc> /// <para>Initializes a new instance of the BitVector32 structure with the specified internal data.</para> /// </devdoc> public BitVector32(int data) { this.data = (uint)data; } /// <devdoc> /// <para>Initializes a new instance of the BitVector32 structure with the information in the specified /// value.</para> /// </devdoc> public BitVector32(BitVector32 value) { this.data = value.data; } /// <devdoc> /// <para>Gets or sets a value indicating whether all the specified bits are set.</para> /// </devdoc> public bool this[int bit] { get { return (data & bit) == (uint)bit; } set { if (value) { data |= (uint)bit; } else { data &= ~(uint)bit; } } } /// <devdoc> /// <para>Gets or sets the value for the specified section.</para> /// </devdoc> public int this[Section section] { get { return (int)((data & (uint)(section.Mask << section.Offset)) >> section.Offset); } set { #if DEBUG if ((value & section.Mask) != value) { Debug.Fail("Value out of bounds on BitVector32 Section Set!"); } #endif value <<= section.Offset; int offsetMask = (0xFFFF & (int)section.Mask) << section.Offset; data = (data & ~(uint)offsetMask) | ((uint)value & (uint)offsetMask); } } /// <devdoc> /// returns the raw data stored in this bit vector... /// </devdoc> public int Data { get { return (int)data; } } private static short CountBitsSet(short mask) { // yes, I know there are better algorithms, however, we know the // bits are always right aligned, with no holes (i.e. always 00000111, // never 000100011), so this is just fine... // short value = 0; while ((mask & 0x1) != 0) { value++; mask >>= 1; } return value; } /// <devdoc> /// <para> Creates the first mask in a series.</para> /// </devdoc> public static int CreateMask() { return CreateMask(0); } /// <devdoc> /// Creates the next mask in a series. /// </devdoc> public static int CreateMask(int previous) { if (previous == 0) { return 1; } if (previous == unchecked((int)0x80000000)) { throw new InvalidOperationException(SR.GetString(SR.BitVectorFull)); } return previous << 1; } /// <devdoc> /// Given a highValue, creates the mask /// </devdoc> private static short CreateMaskFromHighValue(short highValue) { short required = 16; while ((highValue & 0x8000) == 0) { required--; highValue <<= 1; } ushort value = 0; while (required > 0) { required--; value <<= 1; value |= 0x1; } return unchecked((short) value); } /// <devdoc> /// <para>Creates the first section in a series, with the specified maximum value.</para> /// </devdoc> public static Section CreateSection(short maxValue) { return CreateSectionHelper(maxValue, 0, 0); } /// <devdoc> /// <para>Creates the next section in a series, with the specified maximum value.</para> /// </devdoc> public static Section CreateSection(short maxValue, Section previous) { return CreateSectionHelper(maxValue, previous.Mask, previous.Offset); } private static Section CreateSectionHelper(short maxValue, short priorMask, short priorOffset) { if (maxValue < 1) { throw new ArgumentException(SR.GetString(SR.Argument_InvalidValue, "maxValue", 0), "maxValue"); } #if DEBUG int maskCheck = CreateMaskFromHighValue(maxValue); int offsetCheck = priorOffset + CountBitsSet(priorMask); Debug.Assert(maskCheck <= short.MaxValue && offsetCheck < 32, "Overflow on BitVector32"); #endif short offset = (short)(priorOffset + CountBitsSet(priorMask)); if (offset >= 32) { throw new InvalidOperationException(SR.GetString(SR.BitVectorFull)); } return new Section(CreateMaskFromHighValue(maxValue), offset); } public override bool Equals(object o) { if (!(o is BitVector32)) { return false; } return data == ((BitVector32)o).data; } public override int GetHashCode() { return base.GetHashCode(); } /// <devdoc> /// </devdoc> public static string ToString(BitVector32 value) { StringBuilder sb = new StringBuilder(/*"BitVector32{".Length*/12 + /*32 bits*/32 + /*"}".Length"*/1); sb.Append("BitVector32{"); int locdata = (int)value.data; for (int i=0; i<32; i++) { if ((locdata & 0x80000000) != 0) { sb.Append("1"); } else { sb.Append("0"); } locdata <<= 1; } sb.Append("}"); return sb.ToString(); } /// <devdoc> /// </devdoc> public override string ToString() { return BitVector32.ToString(this); } /// <devdoc> /// <para> /// Represents an section of the vector that can contain a integer number.</para> /// </devdoc> public struct Section { private readonly short mask; private readonly short offset; internal Section(short mask, short offset) { this.mask = mask; this.offset = offset; } public short Mask { get { return mask; } } public short Offset { get { return offset; } } public override bool Equals(object o) { if (o is Section) return Equals((Section)o); else return false; } public bool Equals(Section obj) { return obj.mask == mask && obj.offset == offset; } public static bool operator ==(Section a, Section b) { return a.Equals(b); } public static bool operator !=(Section a, Section b) { return !(a == b); } public override int GetHashCode() { return base.GetHashCode(); } /// <devdoc> /// </devdoc> public static string ToString(Section value) { return "Section{0x" + Convert.ToString(value.Mask, 16) + ", 0x" + Convert.ToString(value.Offset, 16) + "}"; } /// <devdoc> /// </devdoc> public override string ToString() { return Section.ToString(this); } } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Reflection; using System.Text; using System.Collections; #if NET20 using Newtonsoft.Json.Utilities.LinqBridge; #else using System.Linq; #endif using System.Globalization; using Newtonsoft.Json.Serialization; namespace Newtonsoft.Json.Utilities { internal static class CollectionUtils { /// <summary> /// Determines whether the collection is <c>null</c> or empty. /// </summary> /// <param name="collection">The collection.</param> /// <returns> /// <c>true</c> if the collection is <c>null</c> or empty; otherwise, <c>false</c>. /// </returns> public static bool IsNullOrEmpty<T>(ICollection<T> collection) { if (collection != null) { return (collection.Count == 0); } return true; } /// <summary> /// Adds the elements of the specified collection to the specified generic IList. /// </summary> /// <param name="initial">The list to add to.</param> /// <param name="collection">The collection of elements to add.</param> public static void AddRange<T>(this IList<T> initial, IEnumerable<T> collection) { if (initial == null) { throw new ArgumentNullException(nameof(initial)); } if (collection == null) { return; } foreach (T value in collection) { initial.Add(value); } } #if (NET20 || NET35 || PORTABLE40) public static void AddRange<T>(this IList<T> initial, IEnumerable collection) { ValidationUtils.ArgumentNotNull(initial, nameof(initial)); // because earlier versions of .NET didn't support covariant generics initial.AddRange(collection.Cast<T>()); } #endif public static bool IsDictionaryType(Type type) { ValidationUtils.ArgumentNotNull(type, nameof(type)); if (typeof(IDictionary).IsAssignableFrom(type)) { return true; } if (ReflectionUtils.ImplementsGenericDefinition(type, typeof(IDictionary<,>))) { return true; } #if !(NET40 || NET35 || NET20 || PORTABLE40) if (ReflectionUtils.ImplementsGenericDefinition(type, typeof(IReadOnlyDictionary<,>))) { return true; } #endif return false; } public static ConstructorInfo ResolveEnumerableCollectionConstructor(Type collectionType, Type collectionItemType) { Type genericConstructorArgument = typeof(IList<>).MakeGenericType(collectionItemType); return ResolveEnumerableCollectionConstructor(collectionType, collectionItemType, genericConstructorArgument); } public static ConstructorInfo ResolveEnumerableCollectionConstructor(Type collectionType, Type collectionItemType, Type constructorArgumentType) { Type genericEnumerable = typeof(IEnumerable<>).MakeGenericType(collectionItemType); ConstructorInfo match = null; foreach (ConstructorInfo constructor in collectionType.GetConstructors(BindingFlags.Public | BindingFlags.Instance)) { IList<ParameterInfo> parameters = constructor.GetParameters(); if (parameters.Count == 1) { Type parameterType = parameters[0].ParameterType; if (genericEnumerable == parameterType) { // exact match match = constructor; break; } // incase we can't find an exact match, use first inexact if (match == null) { if (parameterType.IsAssignableFrom(constructorArgumentType)) { match = constructor; } } } } return match; } public static bool AddDistinct<T>(this IList<T> list, T value) { return list.AddDistinct(value, EqualityComparer<T>.Default); } public static bool AddDistinct<T>(this IList<T> list, T value, IEqualityComparer<T> comparer) { if (list.ContainsValue(value, comparer)) { return false; } list.Add(value); return true; } // this is here because LINQ Bridge doesn't support Contains with IEqualityComparer<T> public static bool ContainsValue<TSource>(this IEnumerable<TSource> source, TSource value, IEqualityComparer<TSource> comparer) { if (comparer == null) { comparer = EqualityComparer<TSource>.Default; } if (source == null) { throw new ArgumentNullException(nameof(source)); } foreach (TSource local in source) { if (comparer.Equals(local, value)) { return true; } } return false; } public static bool AddRangeDistinct<T>(this IList<T> list, IEnumerable<T> values, IEqualityComparer<T> comparer) { bool allAdded = true; foreach (T value in values) { if (!list.AddDistinct(value, comparer)) { allAdded = false; } } return allAdded; } public static int IndexOf<T>(this IEnumerable<T> collection, Func<T, bool> predicate) { int index = 0; foreach (T value in collection) { if (predicate(value)) { return index; } index++; } return -1; } public static bool Contains<T>(this List<T> list, T value, IEqualityComparer comparer) { for (int i = 0; i < list.Count; i++) { if (comparer.Equals(value, list[i])) { return true; } } return false; } public static int IndexOfReference<T>(this List<T> list, T item) { for (int i = 0; i < list.Count; i++) { if (ReferenceEquals(item, list[i])) { return i; } } return -1; } private static IList<int> GetDimensions(IList values, int dimensionsCount) { IList<int> dimensions = new List<int>(); IList currentArray = values; while (true) { dimensions.Add(currentArray.Count); // don't keep calculating dimensions for arrays inside the value array if (dimensions.Count == dimensionsCount) { break; } if (currentArray.Count == 0) { break; } object v = currentArray[0]; if (v is IList) { currentArray = (IList)v; } else { break; } } return dimensions; } private static void CopyFromJaggedToMultidimensionalArray(IList values, Array multidimensionalArray, int[] indices) { int dimension = indices.Length; if (dimension == multidimensionalArray.Rank) { multidimensionalArray.SetValue(JaggedArrayGetValue(values, indices), indices); return; } int dimensionLength = multidimensionalArray.GetLength(dimension); IList list = (IList)JaggedArrayGetValue(values, indices); int currentValuesLength = list.Count; if (currentValuesLength != dimensionLength) { throw new Exception("Cannot deserialize non-cubical array as multidimensional array."); } int[] newIndices = new int[dimension + 1]; for (int i = 0; i < dimension; i++) { newIndices[i] = indices[i]; } for (int i = 0; i < multidimensionalArray.GetLength(dimension); i++) { newIndices[dimension] = i; CopyFromJaggedToMultidimensionalArray(values, multidimensionalArray, newIndices); } } private static object JaggedArrayGetValue(IList values, int[] indices) { IList currentList = values; for (int i = 0; i < indices.Length; i++) { int index = indices[i]; if (i == indices.Length - 1) { return currentList[index]; } else { currentList = (IList)currentList[index]; } } return currentList; } public static Array ToMultidimensionalArray(IList values, Type type, int rank) { IList<int> dimensions = GetDimensions(values, rank); while (dimensions.Count < rank) { dimensions.Add(0); } Array multidimensionalArray = Array.CreateInstance(type, dimensions.ToArray()); CopyFromJaggedToMultidimensionalArray(values, multidimensionalArray, new int[0]); return multidimensionalArray; } } }