context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Management.Automation; using System.Management.Automation.Internal; using System.Management.Automation.Provider; using Dbg = System.Management.Automation; namespace Microsoft.PowerShell.Commands { /// <summary> /// A command to get the content of an item at a specified path. /// </summary> [Cmdlet(VerbsCommon.Get, "Content", DefaultParameterSetName = "Path", SupportsTransactions = true, HelpUri = "https://go.microsoft.com/fwlink/?LinkID=113310")] public class GetContentCommand : ContentCommandBase { #region Parameters /// <summary> /// The number of content items to retrieve per block. /// By default this value is 1 which means read one block /// at a time. To read all blocks at once, set this value /// to a negative number. /// </summary> [Parameter(ValueFromPipelineByPropertyName = true)] public long ReadCount { get; set; } = 1; /// <summary> /// The number of content items to retrieve. By default this /// value is -1 which means read all the content. /// </summary> [Parameter(ValueFromPipelineByPropertyName = true)] [Alias("First", "Head")] public long TotalCount { get { return _totalCount; } set { _totalCount = value; _totalCountSpecified = true; } } private bool _totalCountSpecified = false; /// <summary> /// The number of content items to retrieve from the back of the file. /// </summary> [Parameter(ValueFromPipelineByPropertyName = true)] [Alias("Last")] public int Tail { set { _backCount = value; _tailSpecified = true; } get { return _backCount; } } private int _backCount = -1; private bool _tailSpecified = false; /// <summary> /// A virtual method for retrieving the dynamic parameters for a cmdlet. Derived cmdlets /// that require dynamic parameters should override this method and return the /// dynamic parameter object. /// </summary> /// <param name="context"> /// The context under which the command is running. /// </param> /// <returns> /// An object representing the dynamic parameters for the cmdlet or null if there /// are none. /// </returns> internal override object GetDynamicParameters(CmdletProviderContext context) { if (Path != null && Path.Length > 0) { return InvokeProvider.Content.GetContentReaderDynamicParameters(Path[0], context); } return InvokeProvider.Content.GetContentReaderDynamicParameters(".", context); } #endregion Parameters #region parameter data /// <summary> /// The number of content items to retrieve. /// </summary> private long _totalCount = -1; #endregion parameter data #region Command code /// <summary> /// Gets the content of an item at the specified path. /// </summary> protected override void ProcessRecord() { // TotalCount and Tail should not be specified at the same time. // Throw out terminating error if this is the case. if (_totalCountSpecified && _tailSpecified) { string errMsg = StringUtil.Format(SessionStateStrings.GetContent_TailAndHeadCannotCoexist, "TotalCount", "Tail"); ErrorRecord error = new ErrorRecord(new InvalidOperationException(errMsg), "TailAndHeadCannotCoexist", ErrorCategory.InvalidOperation, null); WriteError(error); return; } if (TotalCount == 0) { // Don't read anything return; } // Get the content readers CmdletProviderContext currentContext = CmdletProviderContext; contentStreams = this.GetContentReaders(Path, currentContext); try { // Iterate through the content holders reading the content foreach (ContentHolder holder in contentStreams) { long countRead = 0; Dbg.Diagnostics.Assert( holder.Reader != null, "All holders should have a reader assigned"); if (_tailSpecified && !(holder.Reader is FileSystemContentReaderWriter)) { string errMsg = SessionStateStrings.GetContent_TailNotSupported; ErrorRecord error = new ErrorRecord(new InvalidOperationException(errMsg), "TailNotSupported", ErrorCategory.InvalidOperation, Tail); WriteError(error); continue; } // If Tail is negative, we are supposed to read all content out. This is same // as reading forwards. So we read forwards in this case. // If Tail is positive, we seek the right position. Or, if the seek failed // because of an unsupported encoding, we scan forward to get the tail content. if (Tail >= 0) { bool seekSuccess = false; try { seekSuccess = SeekPositionForTail(holder.Reader); } catch (Exception e) { ProviderInvocationException providerException = new ProviderInvocationException( "ProviderContentReadError", SessionStateStrings.ProviderContentReadError, holder.PathInfo.Provider, holder.PathInfo.Path, e); // Log a provider health event MshLog.LogProviderHealthEvent( this.Context, holder.PathInfo.Provider.Name, providerException, Severity.Warning); WriteError(new ErrorRecord( providerException.ErrorRecord, providerException)); continue; } // If the seek was successful, we start to read forwards from that // point. Otherwise, we need to scan forwards to get the tail content. if (!seekSuccess && !ScanForwardsForTail(holder, currentContext)) { continue; } } if (TotalCount != 0) { IList results = null; do { long countToRead = ReadCount; // Make sure we only ask for the amount the user wanted // I am using TotalCount - countToRead so that I don't // have to worry about overflow if ((TotalCount > 0) && (TotalCount - countToRead < countRead)) { countToRead = TotalCount - countRead; } try { results = holder.Reader.Read(countToRead); } catch (Exception e) // Catch-all OK. 3rd party callout { ProviderInvocationException providerException = new ProviderInvocationException( "ProviderContentReadError", SessionStateStrings.ProviderContentReadError, holder.PathInfo.Provider, holder.PathInfo.Path, e); // Log a provider health event MshLog.LogProviderHealthEvent( this.Context, holder.PathInfo.Provider.Name, providerException, Severity.Warning); WriteError(new ErrorRecord( providerException.ErrorRecord, providerException)); break; } if (results != null && results.Count > 0) { countRead += results.Count; if (ReadCount == 1) { // Write out the content as a single object WriteContentObject(results[0], countRead, holder.PathInfo, currentContext); } else { // Write out the content as an array of objects WriteContentObject(results, countRead, holder.PathInfo, currentContext); } } } while (results != null && results.Count > 0 && ((TotalCount < 0) || countRead < TotalCount)); } } } finally { // close all the content readers CloseContent(contentStreams, false); // Empty the content holder array contentStreams = new List<ContentHolder>(); } } /// <summary> /// Scan forwards to get the tail content. /// </summary> /// <param name="holder"></param> /// <param name="currentContext"></param> /// <returns> /// true if no error occured /// false if there was an error /// </returns> private bool ScanForwardsForTail(ContentHolder holder, CmdletProviderContext currentContext) { var fsReader = holder.Reader as FileSystemContentReaderWriter; Dbg.Diagnostics.Assert(fsReader != null, "Tail is only supported for FileSystemContentReaderWriter"); var tailResultQueue = new Queue<object>(); IList results = null; ErrorRecord error = null; do { try { results = fsReader.ReadWithoutWaitingChanges(ReadCount); } catch (Exception e) { ProviderInvocationException providerException = new ProviderInvocationException( "ProviderContentReadError", SessionStateStrings.ProviderContentReadError, holder.PathInfo.Provider, holder.PathInfo.Path, e); // Log a provider health event MshLog.LogProviderHealthEvent( this.Context, holder.PathInfo.Provider.Name, providerException, Severity.Warning); // Create and save the error record. The error record // will be written outside the while loop. // This is to make sure the accumulated results get written // out before the error record when the 'scanForwardForTail' is true. error = new ErrorRecord( providerException.ErrorRecord, providerException); break; } if (results != null && results.Count > 0) { foreach (object entry in results) { if (tailResultQueue.Count == Tail) tailResultQueue.Dequeue(); tailResultQueue.Enqueue(entry); } } } while (results != null && results.Count > 0); if (tailResultQueue.Count > 0) { // Respect the ReadCount parameter. // Output single object when ReadCount == 1; Output array otherwise int count = 0; if (ReadCount <= 0 || (ReadCount >= tailResultQueue.Count && ReadCount != 1)) { count = tailResultQueue.Count; ArrayList outputList = new ArrayList(); while (tailResultQueue.Count > 0) { outputList.Add(tailResultQueue.Dequeue()); } // Write out the content as an array of objects WriteContentObject(outputList.ToArray(), count, holder.PathInfo, currentContext); } else if (ReadCount == 1) { // Write out the content as single object while (tailResultQueue.Count > 0) WriteContentObject(tailResultQueue.Dequeue(), count++, holder.PathInfo, currentContext); } else // ReadCount < Queue.Count { while (tailResultQueue.Count >= ReadCount) { ArrayList outputList = new ArrayList(); for (int idx = 0; idx < ReadCount; idx++, count++) outputList.Add(tailResultQueue.Dequeue()); // Write out the content as an array of objects WriteContentObject(outputList.ToArray(), count, holder.PathInfo, currentContext); } int remainder = tailResultQueue.Count; if (remainder > 0) { ArrayList outputList = new ArrayList(); for (; remainder > 0; remainder--, count++) outputList.Add(tailResultQueue.Dequeue()); // Write out the content as an array of objects WriteContentObject(outputList.ToArray(), count, holder.PathInfo, currentContext); } } } if (error != null) { WriteError(error); return false; } return true; } /// <summary> /// Seek position to the right place. /// </summary> /// <param name="reader"> /// reader should be able to be casted to FileSystemContentReader /// </param> /// <returns> /// true if the stream pointer is moved to the right place /// false if we cannot seek /// </returns> private bool SeekPositionForTail(IContentReader reader) { var fsReader = reader as FileSystemContentReaderWriter; Dbg.Diagnostics.Assert(fsReader != null, "Tail is only supported for FileSystemContentReaderWriter"); try { fsReader.SeekItemsBackward(Tail); return true; } catch (BackReaderEncodingNotSupportedException) { // Move to the head fsReader.Seek(0, SeekOrigin.Begin); return false; } } /// <summary> /// Be sure to clean up. /// </summary> protected override void EndProcessing() { Dispose(true); } #endregion Command code } }
using System; using System.Collections.Specialized; using System.ComponentModel; using System.Linq; using System.Management.Automation; using CodeOwls.SeeShell.Common.Attributes; using CodeOwls.SeeShell.Common.Charts; using CodeOwls.SeeShell.Common.DataSources; using CodeOwls.SeeShell.Common.Exceptions; using CodeOwls.SeeShell.Common.Utility; namespace CodeOwls.SeeShell.Common.ViewModels.Charts { public class ChartSeriesViewModel : SingleDataSourceViewModelBase { private static readonly Log Log = new Log( typeof( ChartSeriesViewModel)); private ChartSeriesType _seriesType; private string _valueMemberPath; private string _highMemberPath; private string _lowMemberPath; private string _xMemberPath; private string _yMemberPath; private string _labelMemberPath; private string _fillMemberPath; private string _radiusMemberPath; private string _angleMemberPath; private ChartAxisViewModel _xAxis; private ChartAxisViewModel _yAxis; public ChartSeriesViewModel() { SeriesType = ChartSeriesType.Line; PropertyChanged += OnPropertyChanged; } public event EventHandler AxesUpdated; private void OnPropertyChanged(object sender, PropertyChangedEventArgs e) { if( e.PropertyName.Contains("Axis") || (null != _xAxis && null != _yAxis ) ) { return; } ConfigureAxes(); } protected override void OnDataSourceChanged(IPowerShellDataSource oldDataSource, IPowerShellDataSource newDataSource) { ConfigureAxes( newDataSource ); base.OnDataSourceChanged(oldDataSource, newDataSource); } private bool _enableConfigureAxes; public bool EnableConfigureAxes { get { return _enableConfigureAxes; } set { _enableConfigureAxes = value; ConfigureAxes(); } } bool CanConfigureAxes { get { return EnableConfigureAxes && ( CanConfigureCategoricalValue || CanConfigureCategoricalRange || CanConfigureScatter || CanConfigureRadialOrPolar ); } } private bool CanConfigureCategoricalValue { get { return SeriesType.IsCategoricalValue() && null != this.ValueMemberPath && null != this.LabelMemberPath; } } private bool CanConfigureCategoricalRange { get { return SeriesType.IsCategoricalRange() && null != this.HighMemberPath && null != this.LowMemberPath && null != this.LabelMemberPath; } } private bool CanConfigureScatter { get { return SeriesType.IsScatter() && null != this.XMemberPath && null != this.YMemberPath; } } bool CanConfigureRadialOrPolar { get { return (SeriesType.IsPolar() || SeriesType.IsRadial() ) && null != this.AngleMemberPath && null != this.RadiusMemberPath; } } void ConfigureAxes() { ConfigureAxes(DataSource); } void ConfigureAxes( IPowerShellDataSource dataSource ) { if (null != dataSource && null != dataSource.Data) { bool configured = false; if (0 != dataSource.Data.Count) { var o = dataSource.Data[0]; configured = TryConfigureAxes(o as SolidPSObjectBase); } if (!configured) { dataSource.Data.CollectionChanged += OnFirstDataItem; } } } private void OnFirstDataItem(object sender, NotifyCollectionChangedEventArgs e) { if (e.Action != NotifyCollectionChangedAction.Add ) { return; } DataSource.Data.CollectionChanged -= OnFirstDataItem; TryConfigureAxes(e.NewItems[0] as SolidPSObjectBase); } bool TryConfigureAxes( SolidPSObjectBase o ) { try { return ConfigureAxes(o); } catch (Exception e) { if (null != AllRecords && null != Dispatcher) { Dispatcher.BeginInvoke( (Action) (() => AllRecords.Add(new ErrorRecord(e, "SeeShell.Charts.AxisConfiguration", ErrorCategory.InvalidData, this)))); } return true; } } private bool ConfigureAxes(SolidPSObjectBase o) { if (!CanConfigureAxes) { return false; } if( SeriesType.IsCategorical() ) { ConfigureCategoricalAxes(o); } else if( SeriesType.IsCategoricalRange()) { ConfigureCategoricalRange(o); } else if ( SeriesType.IsScatter() ) { ConfigureScatterAxes(o); } else if( SeriesType.IsPolar()) { ConfigurePolarAxis(o); } else if( SeriesType.IsRadial()) { ConfigureRadialAxis(o); } else { throw new InvalidOperationException("Unanticipated series type encountered during axis determination: " + SeriesType); } VerifyAxisTypes(o); var sdx = GetScaleForProperty(XAxis.ValueMemberPath); var sdy = GetScaleForProperty(YAxis.ValueMemberPath); if( null == sdx ) { sdx = new ScaleDescriptor( XAxis.ValueMemberPath, ColorManager.AssignColor() ); } XAxis.AxisScaleDescriptors.Add( new ScaleDescriptorAssignment{ PropertyName = sdx.Name, Scale = sdx } ); if( null != sdy ) { var sdym = new[]{ sdy }.Union( DataSource.Scales.Select(s=>s.Scale) ) .Where(s => s.Name == sdy.Name) .ToList() .ConvertAll(a=> new ScaleDescriptorAssignment{PropertyName = a.Name, Scale = a}); sdym.ToList().ForEach( YAxis.AxisScaleDescriptors.Add ); } var ev = AxesUpdated; if( null != ev ) { ev(this, EventArgs.Empty); } return true; } private void VerifyAxisTypes(SolidPSObjectBase dataItem) { Log.InfoFormat( "verifying data type [{0}] for axis", dataItem.GetType().FullName ); var validAxisTypes = SeriesType.ValidAxisTypes(); if (!validAxisTypes.Contains(XAxis.AxisType)) { throw new InvalidChartAxisTypeException(SeriesType, XAxis.AxisType); } if (!validAxisTypes.Contains(YAxis.AxisType)) { throw new InvalidChartAxisTypeException(SeriesType, YAxis.AxisType); } VerifyAxisForData(XAxis, dataItem); VerifyAxisForData(YAxis, dataItem); VerifyPropertyForRadius(dataItem); } private void VerifyPropertyForRadius(SolidPSObjectBase dataItem) { if( String.IsNullOrEmpty( RadiusMemberPath) ) { return; } var prop = dataItem.GetPropertyByName(RadiusMemberPath); if( ! IsNumericType( prop )) { throw new InvalidChartValueMemberException( SeriesType, prop, RadiusMemberPath, "Radius"); } } private void VerifyAxisForData(ChartAxisViewModel xAxis, SolidPSObjectBase dataItem) { var prop = dataItem.GetPropertyByName(xAxis.ValueMemberPath); if( null == prop ) { throw new ChartAxisValueMemberDoesNotExistException( SeriesType, xAxis.AxisType, xAxis.ValueMemberPath ); } if( ! IsPropertyValidForAxis( prop, xAxis )) { throw new InvalidChartAxisValueMemberException(SeriesType, xAxis.AxisType, dataItem as SolidPSObjectBase, xAxis.ValueMemberPath); } } private bool IsPropertyValidForAxis(PSPropertyInfo prop, ChartAxisViewModel axis) { if( SeriesType == ChartSeriesType.Timeline ) { return axis.AxisType != ChartAxisType.CategoryX; } switch( axis.AxisType ) { case (ChartAxisType.NumericAngle): case( ChartAxisType.NumericRadius ): case( ChartAxisType.NumericX ): case( ChartAxisType.NumericY ): { return IsNumericType(prop); } //case( ChartAxisType.CategoryDateTimeX ): //case( ChartAxisType.CategoryX ): //case (ChartAxisType.CategoryAngle): default: return true; } } private static bool IsNumericType(PSPropertyInfo prop) { if( null == prop ) { return false; } var type = Type.GetType(prop.TypeNameOfValue, false, true); if (null == type) { return false; } double d; using (DefaultRunspaceManager.ForCurrentThread) { try { return Double.TryParse(prop.Value.ToString(), out d); } catch { } } return false; } private void ConfigureRadialAxis(object o) { var i = o as SolidPSObjectBase; var value = i.GetPropValue<object>(AngleMemberPath); var r = new ChartAxisViewModel { AxisType = ChartAxisType.NumericRadius, AxisLocation = AxisLocation.InsideRight, Name = this.RadiusMemberPath, DataSource = this.DataSource, Dispatcher = this.Dispatcher, ValueMemberPath = RadiusMemberPath, }; var a = new ChartAxisViewModel() { AxisType = ChartAxisType.CategoryAngle, //AxisLocation = AxisLocation.OutsideTop, AxisLocation = AxisLocation.OutsideBottom, Name = this.AngleMemberPath, DataSource = this.DataSource, ValueMemberPath = AngleMemberPath, LabelTemplate = "{" + LabelMemberPath + "}" }; this.XAxis = a; this.YAxis = r; } private void ConfigurePolarAxis(object o) { var r = new ChartAxisViewModel { AxisType = ChartAxisType.NumericRadius, Name = this.RadiusMemberPath, DataSource = this.DataSource, Dispatcher = this.Dispatcher, ValueMemberPath = RadiusMemberPath, }; var a = new ChartAxisViewModel() { AxisType = ChartAxisType.NumericAngle, AxisLocation=AxisLocation.OutsideBottom, //AxisLocation=AxisLocation.OutsideTop, Name = this.AngleMemberPath, DataSource = this.DataSource, ValueMemberPath = AngleMemberPath, LabelTemplate = "{" + LabelMemberPath + "}" }; this.XAxis = a; this.YAxis = r; } private void ConfigureScatterAxes(object o) { var x = new ChartAxisViewModel { AxisType = ChartAxisType.NumericX, Name = XMemberPath, DataSource = DataSource, ValueMemberPath = XMemberPath, LabelTemplate = "{" + LabelMemberPath + "}", AxisLocation = AxisLocation.OutsideBottom }; var y = new ChartAxisViewModel { AxisType = ChartAxisType.NumericY, Name = YMemberPath, DataSource = DataSource, ValueMemberPath = YMemberPath, AxisLocation = AxisLocation.OutsideLeft }; XAxis = x; YAxis = y; } private void ConfigureCategoricalRange(SolidPSObjectBase o) { var pso = o as SolidPSObjectBase; var xtype = GetPropertyType(pso, this.LabelMemberPath); ChartAxisType xaxistype = ChartAxisType.CategoryX; ChartAxisType yaxistype = ChartAxisType.NumericY; if (xtype == typeof(DateTime)) { xaxistype = ChartAxisType.CategoryDateTimeX; } var x = new ChartAxisViewModel { AxisType = xaxistype, Name = LabelMemberPath, DataSource = DataSource, ValueMemberPath = LabelMemberPath, LabelTemplate = "{" + LabelMemberPath + "}", AxisLocation = AxisLocation.OutsideBottom, }; var y = new ChartAxisViewModel { AxisType = yaxistype, Name = LowMemberPath ?? HighMemberPath, DataSource = DataSource, ValueMemberPath = LowMemberPath ?? HighMemberPath, AxisLocation = AxisLocation.OutsideLeft }; XAxis = x; YAxis = y; } private void ConfigureCategoricalAxes(object o) { var pso = o as SolidPSObjectBase; var xtype = GetPropertyType(pso, this.LabelMemberPath); ChartAxisType xaxistype = ChartAxisType.CategoryX; ChartAxisType yaxistype = ChartAxisType.NumericY; if( xtype == typeof( DateTime )) { xaxistype = ChartAxisType.CategoryDateTimeX; } var x = new ChartAxisViewModel { AxisType = xaxistype, Name = LabelMemberPath, DataSource = DataSource, ValueMemberPath = LabelMemberPath, LabelTemplate = "{" + LabelMemberPath + "}", AxisLocation = AxisLocation.OutsideBottom, }; var y = new ChartAxisViewModel { AxisType = yaxistype, Name = ValueMemberPath, DataSource = DataSource, ValueMemberPath = ValueMemberPath, AxisLocation = AxisLocation.OutsideLeft }; XAxis = x; YAxis = y; } private Type GetPropertyType(SolidPSObjectBase pso, string propertyName) { var value = pso.GetPropValue<object>(propertyName); if( null != value ) { return value.GetType(); } var xtypename = pso.GetPropTypeName(propertyName); if( null == xtypename ) { return typeof (object); } var xtype = (from asm in AppDomain.CurrentDomain.GetAssemblies() from t in asm.GetTypes() where t.FullName.ToLowerInvariant() == xtypename select t).FirstOrDefault(); return xtype; } [Parameter()] public ChartSeriesType SeriesType { get { return _seriesType; } set { _seriesType = value; NotifyOfPropertyChange(() => SeriesType); } } [Parameter()] public string ValueMemberPath { get { return _valueMemberPath; } set { _valueMemberPath = value; NotifyOfPropertyChange(() => ValueMemberPath); } } [Parameter()] [PathArgumentTransformation] public ChartAxisViewModel XAxis { get { return _xAxis; } set { _xAxis = value; NotifyOfPropertyChange(() => XAxis); } } [Parameter()] [PathArgumentTransformation] public ChartAxisViewModel YAxis { get { return _yAxis; } set { _yAxis = value; NotifyOfPropertyChange(() => YAxis); } } [Parameter()] public string HighMemberPath { get { return _highMemberPath; } set { _highMemberPath = value; NotifyOfPropertyChange(() => HighMemberPath); } } [Parameter()] public string LowMemberPath { get { return _lowMemberPath; } set { _lowMemberPath = value; NotifyOfPropertyChange(() => LowMemberPath); } } [Parameter()] public string XMemberPath { get { return _xMemberPath; } set { _xMemberPath = value; NotifyOfPropertyChange(() => XMemberPath); } } [Parameter()] public string YMemberPath { get { return _yMemberPath; } set { _yMemberPath = value; NotifyOfPropertyChange(() => YMemberPath); } } [Parameter()] public string LabelMemberPath { get { return _labelMemberPath; } set { _labelMemberPath = value; NotifyOfPropertyChange(() => LabelMemberPath); } } [Parameter()] public string FillMemberPath { get { return _fillMemberPath; } set { _fillMemberPath = value; NotifyOfPropertyChange(() => FillMemberPath); } } [Parameter()] public string RadiusMemberPath { get { return _radiusMemberPath; } set { _radiusMemberPath = value; NotifyOfPropertyChange(() => RadiusMemberPath); } } [Parameter()] public string AngleMemberPath { get { return _angleMemberPath; } set { _angleMemberPath = value; NotifyOfPropertyChange(() => AngleMemberPath); } } public object LiteralSeriesName { get; set; } public object LiteralByName { get; set; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Nini.Config; using OpenSim.Framework; using OpenMetaverse; namespace OpenSim.Region.Physics.Manager { public delegate void physicsCrash(); public delegate void RaycastCallback(bool hitYN, Vector3 collisionPoint, uint localid, float distance, Vector3 normal); public delegate void RayCallback(List<ContactResult> list); public delegate void JointMoved(PhysicsJoint joint); public delegate void JointDeactivated(PhysicsJoint joint); public delegate void JointErrorMessage(PhysicsJoint joint, string message); // this refers to an "error message due to a problem", not "amount of joint constraint violation" public enum RayFilterFlags : ushort { // the flags water = 0x01, land = 0x02, agent = 0x04, nonphysical = 0x08, physical = 0x10, phantom = 0x20, volumedtc = 0x40, // ray cast colision control (may only work for meshs) ContactsUnImportant = 0x2000, BackFaceCull = 0x4000, ClosestHit = 0x8000, // some combinations LSLPhantom = phantom | volumedtc, PrimsNonPhantom = nonphysical | physical, PrimsNonPhantomAgents = nonphysical | physical | agent, AllPrims = nonphysical | phantom | volumedtc | physical, AllButLand = agent | nonphysical | physical | phantom | volumedtc, ClosestAndBackCull = ClosestHit | BackFaceCull, All = 0x3f } public delegate void RequestAssetDelegate(UUID assetID, AssetReceivedDelegate callback); public delegate void AssetReceivedDelegate(AssetBase asset); /// <summary> /// Contact result from a raycast. /// </summary> public struct ContactResult { public Vector3 Pos; public float Depth; public uint ConsumerID; public Vector3 Normal; } public abstract class PhysicsScene { // private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// A unique identifying string for this instance of the physics engine. /// Useful in debug messages to distinguish one OdeScene instance from another. /// Usually set to include the region name that the physics engine is acting for. /// </summary> public string Name { get; protected set; } /// <summary> /// A string identifying the family of this physics engine. Most common values returned /// are "OpenDynamicsEngine" and "BulletSim" but others are possible. /// </summary> public string EngineType { get; protected set; } // The only thing that should register for this event is the SceneGraph // Anything else could cause problems. public event physicsCrash OnPhysicsCrash; public static PhysicsScene Null { get { return new NullPhysicsScene(); } } public RequestAssetDelegate RequestAssetMethod { get; set; } public virtual void TriggerPhysicsBasedRestart() { physicsCrash handler = OnPhysicsCrash; if (handler != null) { OnPhysicsCrash(); } } public abstract void Initialise(IMesher meshmerizer, IConfigSource config); /// <summary> /// Add an avatar /// </summary> /// <param name="avName"></param> /// <param name="position"></param> /// <param name="size"></param> /// <param name="isFlying"></param> /// <returns></returns> public abstract PhysicsActor AddAvatar(string avName, Vector3 position, Vector3 size, bool isFlying); /// <summary> /// Add an avatar /// </summary> /// <param name="localID"></param> /// <param name="avName"></param> /// <param name="position"></param> /// <param name="size"></param> /// <param name="isFlying"></param> /// <returns></returns> public virtual PhysicsActor AddAvatar(uint localID, string avName, Vector3 position, Vector3 size, bool isFlying) { PhysicsActor ret = AddAvatar(avName, position, size, isFlying); if (ret != null) ret.LocalID = localID; return ret; } /// <summary> /// Remove an avatar. /// </summary> /// <param name="actor"></param> public abstract void RemoveAvatar(PhysicsActor actor); /// <summary> /// Remove a prim. /// </summary> /// <param name="prim"></param> public abstract void RemovePrim(PhysicsActor prim); public abstract PhysicsActor AddPrimShape(string primName, PrimitiveBaseShape pbs, Vector3 position, Vector3 size, Quaternion rotation, bool isPhysical, uint localid); public virtual PhysicsActor AddPrimShape(string primName, PrimitiveBaseShape pbs, Vector3 position, Vector3 size, Quaternion rotation, bool isPhysical, bool isPhantom, byte shapetype, uint localid) { return AddPrimShape(primName, pbs, position, size, rotation, isPhysical, localid); } public virtual float TimeDilation { get { return 1.0f; } } public virtual bool SupportsNINJAJoints { get { return false; } } public virtual PhysicsJoint RequestJointCreation(string objectNameInScene, PhysicsJointType jointType, Vector3 position, Quaternion rotation, string parms, List<string> bodyNames, string trackedBodyName, Quaternion localRotation) { return null; } public virtual void RequestJointDeletion(string objectNameInScene) { return; } public virtual void RemoveAllJointsConnectedToActorThreadLocked(PhysicsActor actor) { return; } public virtual void DumpJointInfo() { return; } public event JointMoved OnJointMoved; protected virtual void DoJointMoved(PhysicsJoint joint) { // We need this to allow subclasses (but not other classes) to invoke the event; C# does // not allow subclasses to invoke the parent class event. if (OnJointMoved != null) { OnJointMoved(joint); } } public event JointDeactivated OnJointDeactivated; protected virtual void DoJointDeactivated(PhysicsJoint joint) { // We need this to allow subclasses (but not other classes) to invoke the event; C# does // not allow subclasses to invoke the parent class event. if (OnJointDeactivated != null) { OnJointDeactivated(joint); } } public event JointErrorMessage OnJointErrorMessage; protected virtual void DoJointErrorMessage(PhysicsJoint joint, string message) { // We need this to allow subclasses (but not other classes) to invoke the event; C# does // not allow subclasses to invoke the parent class event. if (OnJointErrorMessage != null) { OnJointErrorMessage(joint, message); } } public virtual Vector3 GetJointAnchor(PhysicsJoint joint) { return Vector3.Zero; } public virtual Vector3 GetJointAxis(PhysicsJoint joint) { return Vector3.Zero; } public abstract void AddPhysicsActorTaint(PhysicsActor prim); /// <summary> /// Perform a simulation of the current physics scene over the given timestep. /// </summary> /// <param name="timeStep"></param> /// <returns>The number of frames simulated over that period.</returns> public abstract float Simulate(float timeStep); /// <summary> /// Get statistics about this scene. /// </summary> /// <remarks>This facility is currently experimental and subject to change.</remarks> /// <returns> /// A dictionary where the key is the statistic name. If no statistics are supplied then returns null. /// </returns> public virtual Dictionary<string, float> GetStats() { return null; } public abstract void GetResults(); public abstract void SetTerrain(float[] heightMap); public abstract void SetWaterLevel(float baseheight); public abstract void DeleteTerrain(); public abstract void Dispose(); public abstract Dictionary<uint, float> GetTopColliders(); public abstract bool IsThreaded { get; } /// <summary> /// True if the physics plugin supports raycasting against the physics scene /// </summary> public virtual bool SupportsRayCast() { return false; } public virtual bool SupportsCombining() { return false; } public virtual void Combine(PhysicsScene pScene, Vector3 offset, Vector3 extents) {} public virtual void UnCombine(PhysicsScene pScene) {} /// <summary> /// Queue a raycast against the physics scene. /// The provided callback method will be called when the raycast is complete /// /// Many physics engines don't support collision testing at the same time as /// manipulating the physics scene, so we queue the request up and callback /// a custom method when the raycast is complete. /// This allows physics engines that give an immediate result to callback immediately /// and ones that don't, to callback when it gets a result back. /// /// ODE for example will not allow you to change the scene while collision testing or /// it asserts, 'opteration not valid for locked space'. This includes adding a ray to the scene. /// /// This is named RayCastWorld to not conflict with modrex's Raycast method. /// </summary> /// <param name="position">Origin of the ray</param> /// <param name="direction">Direction of the ray</param> /// <param name="length">Length of ray in meters</param> /// <param name="retMethod">Method to call when the raycast is complete</param> public virtual void RaycastWorld(Vector3 position, Vector3 direction, float length, RaycastCallback retMethod) { if (retMethod != null) retMethod(false, Vector3.Zero, 0, 999999999999f, Vector3.Zero); } public virtual void RaycastWorld(Vector3 position, Vector3 direction, float length, int Count, RayCallback retMethod) { if (retMethod != null) retMethod(new List<ContactResult>()); } public virtual List<ContactResult> RaycastWorld(Vector3 position, Vector3 direction, float length, int Count) { return new List<ContactResult>(); } public virtual object RaycastWorld(Vector3 position, Vector3 direction, float length, int Count, RayFilterFlags filter) { return null; } public virtual bool SupportsRaycastWorldFiltered() { return false; } // Extendable interface for new, physics engine specific operations public virtual object Extension(string pFunct, params object[] pParams) { // A NOP if the extension thing is not implemented by the physics engine return null; } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.ServiceModel.Channels { using System.Runtime.CompilerServices; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.ServiceModel; using System.ServiceModel.Security; using System.Transactions; using System.ServiceModel.Security.Tokens; using System.Net.Security; using System.Security.Cryptography.X509Certificates; using SR = System.ServiceModel.SR; class MsmqOutputMessage<TChannel> : NativeMsmqMessage { BufferProperty body; IntProperty bodyType; ByteProperty delivery; IntProperty timeToReachQueue; IntProperty timeToBeReceived; ByteProperty journal; StringProperty deadLetterQueue; IntProperty senderIdType; IntProperty authLevel; BufferProperty senderCert; IntProperty privLevel; ByteProperty trace; BufferProperty messageId; IntProperty encryptionAlgorithm; IntProperty hashAlgorithm; public MsmqOutputMessage(MsmqChannelFactoryBase<TChannel> factory, int bodySize, EndpointAddress remoteAddress) : this(factory, bodySize, remoteAddress, 0) { } protected MsmqOutputMessage(MsmqChannelFactoryBase<TChannel> factory, int bodySize, EndpointAddress remoteAddress, int additionalPropertyCount) : base(15 + additionalPropertyCount) { this.body = new BufferProperty(this, UnsafeNativeMethods.PROPID_M_BODY, bodySize); this.messageId = new BufferProperty(this, UnsafeNativeMethods.PROPID_M_MSGID, UnsafeNativeMethods.PROPID_M_MSGID_SIZE); EnsureBodyTypeProperty(UnsafeNativeMethods.VT_VECTOR | UnsafeNativeMethods.VT_UI1); EnsureJournalProperty((byte)UnsafeNativeMethods.MQMSG_JOURNAL, factory.UseSourceJournal); this.delivery = new ByteProperty(this, UnsafeNativeMethods.PROPID_M_DELIVERY); if (factory.Durable) { this.delivery.Value = (byte)UnsafeNativeMethods.MQMSG_DELIVERY_RECOVERABLE; } else { this.delivery.Value = (byte)UnsafeNativeMethods.MQMSG_DELIVERY_EXPRESS; } if (factory.TimeToLive != TimeSpan.MaxValue) { int totalSeconds = MsmqDuration.FromTimeSpan(factory.TimeToLive); EnsureTimeToReachQueueProperty(totalSeconds); this.timeToBeReceived = new IntProperty(this, UnsafeNativeMethods.PROPID_M_TIME_TO_BE_RECEIVED, totalSeconds); } switch (factory.DeadLetterQueue) { case DeadLetterQueue.None: EnsureJournalProperty((byte)UnsafeNativeMethods.MQMSG_DEADLETTER, false); break; case DeadLetterQueue.System: EnsureJournalProperty((byte)UnsafeNativeMethods.MQMSG_DEADLETTER, true); break; case DeadLetterQueue.Custom: EnsureJournalProperty((byte)UnsafeNativeMethods.MQMSG_DEADLETTER, true); EnsureDeadLetterQueueProperty(factory.DeadLetterQueuePathName); break; } if (MsmqAuthenticationMode.WindowsDomain == factory.MsmqTransportSecurity.MsmqAuthenticationMode) { EnsureSenderIdTypeProperty(UnsafeNativeMethods.MQMSG_SENDERID_TYPE_SID); this.authLevel = new IntProperty(this, UnsafeNativeMethods.PROPID_M_AUTH_LEVEL, UnsafeNativeMethods.MQMSG_AUTH_LEVEL_ALWAYS); this.hashAlgorithm = new IntProperty( this, UnsafeNativeMethods.PROPID_M_HASH_ALG, MsmqSecureHashAlgorithmHelper.ToInt32(factory.MsmqTransportSecurity.MsmqSecureHashAlgorithm)); if (ProtectionLevel.EncryptAndSign == factory.MsmqTransportSecurity.MsmqProtectionLevel) { this.privLevel = new IntProperty(this, UnsafeNativeMethods.PROPID_M_PRIV_LEVEL, UnsafeNativeMethods.MQMSG_PRIV_LEVEL_BODY_ENHANCED); this.encryptionAlgorithm = new IntProperty( this, UnsafeNativeMethods.PROPID_M_ENCRYPTION_ALG, MsmqEncryptionAlgorithmHelper.ToInt32(factory.MsmqTransportSecurity.MsmqEncryptionAlgorithm)); } } else if (MsmqAuthenticationMode.Certificate == factory.MsmqTransportSecurity.MsmqAuthenticationMode) { this.authLevel = new IntProperty(this, UnsafeNativeMethods.PROPID_M_AUTH_LEVEL, UnsafeNativeMethods.MQMSG_AUTH_LEVEL_ALWAYS); this.hashAlgorithm = new IntProperty( this, UnsafeNativeMethods.PROPID_M_HASH_ALG, MsmqSecureHashAlgorithmHelper.ToInt32(factory.MsmqTransportSecurity.MsmqSecureHashAlgorithm)); if (ProtectionLevel.EncryptAndSign == factory.MsmqTransportSecurity.MsmqProtectionLevel) { this.privLevel = new IntProperty(this, UnsafeNativeMethods.PROPID_M_PRIV_LEVEL, UnsafeNativeMethods.MQMSG_PRIV_LEVEL_BODY_ENHANCED); this.encryptionAlgorithm = new IntProperty( this, UnsafeNativeMethods.PROPID_M_ENCRYPTION_ALG, MsmqEncryptionAlgorithmHelper.ToInt32(factory.MsmqTransportSecurity.MsmqEncryptionAlgorithm)); } EnsureSenderIdTypeProperty(UnsafeNativeMethods.MQMSG_SENDERID_TYPE_NONE); this.senderCert = new BufferProperty(this, UnsafeNativeMethods.PROPID_M_SENDER_CERT); } else { this.authLevel = new IntProperty(this, UnsafeNativeMethods.PROPID_M_AUTH_LEVEL, UnsafeNativeMethods.MQMSG_AUTH_LEVEL_NONE); EnsureSenderIdTypeProperty(UnsafeNativeMethods.MQMSG_SENDERID_TYPE_NONE); } this.trace = new ByteProperty(this, UnsafeNativeMethods.PROPID_M_TRACE, (byte)(factory.UseMsmqTracing ? UnsafeNativeMethods.MQMSG_SEND_ROUTE_TO_REPORT_QUEUE : UnsafeNativeMethods.MQMSG_TRACE_NONE)); } public BufferProperty Body { get { return this.body; } } public BufferProperty MessageId { get { return this.messageId; } } internal void ApplyCertificateIfNeeded(SecurityTokenProviderContainer certificateTokenProvider, MsmqAuthenticationMode authenticationMode, TimeSpan timeout) { if (MsmqAuthenticationMode.Certificate == authenticationMode) { if (certificateTokenProvider == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("certificateTokenProvider"); } X509Certificate2 clientCertificate = certificateTokenProvider.GetCertificate(timeout); if (clientCertificate == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperCritical(new InvalidOperationException(SR.GetString(SR.MsmqCertificateNotFound))); this.senderCert.SetBufferReference(clientCertificate.GetRawCertData()); } } protected void EnsureBodyTypeProperty(int value) { if (this.bodyType == null) { this.bodyType = new IntProperty(this, UnsafeNativeMethods.PROPID_M_BODY_TYPE); } this.bodyType.Value = value; } protected void EnsureDeadLetterQueueProperty(string value) { if (value.Length > 0) { if (this.deadLetterQueue == null) { this.deadLetterQueue = new StringProperty(this, UnsafeNativeMethods.PROPID_M_DEADLETTER_QUEUE, value); } else { this.deadLetterQueue.SetValue(value); } } } protected void EnsureSenderIdTypeProperty(int value) { if (this.senderIdType == null) { this.senderIdType = new IntProperty(this, UnsafeNativeMethods.PROPID_M_SENDERID_TYPE); } this.senderIdType.Value = value; } protected void EnsureTimeToReachQueueProperty(int value) { if (this.timeToReachQueue == null) { this.timeToReachQueue = new IntProperty(this, UnsafeNativeMethods.PROPID_M_TIME_TO_REACH_QUEUE); } this.timeToReachQueue.Value = value; } protected void EnsureJournalProperty(byte flag, bool isFlagSet) { if (this.journal == null) { this.journal = new ByteProperty(this, UnsafeNativeMethods.PROPID_M_JOURNAL); } if (isFlagSet) { this.journal.Value |= flag; } else { this.journal.Value &= (byte)(~flag); } } } }
using System; using System.Linq; using System.Threading.Tasks; using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Engines; using BenchmarkDotNet.Jobs; using BenchmarkDotNet.Tests.Loggers; using BenchmarkDotNet.Tests.XUnit; using Xunit; using Xunit.Abstractions; namespace BenchmarkDotNet.IntegrationTests { public class AllSetupAndCleanupTest : BenchmarkTestExecutor { private const string Prefix = "// ### Called: "; private const string GlobalSetupCalled = Prefix + "GlobalSetup"; private const string GlobalCleanupCalled = Prefix + "GlobalCleanup"; private const string IterationSetupCalled = Prefix + "IterationSetup"; private const string IterationCleanupCalled = Prefix + "IterationCleanup"; private const string BenchmarkCalled = Prefix + "Benchmark"; private readonly string[] expectedLogLines = { "// ### Called: GlobalSetup", "// ### Called: IterationSetup (1)", // MainWarmup1 "// ### Called: Benchmark", // MainWarmup1 "// ### Called: IterationCleanup (1)", // MainWarmup1 "// ### Called: IterationSetup (2)", // MainWarmup2 "// ### Called: Benchmark", // MainWarmup2 "// ### Called: IterationCleanup (2)", // MainWarmup2 "// ### Called: IterationSetup (3)", // MainTarget1 "// ### Called: Benchmark", // MainTarget1 "// ### Called: IterationCleanup (3)", // MainTarget1 "// ### Called: IterationSetup (4)", // MainTarget2 "// ### Called: Benchmark", // MainTarget2 "// ### Called: IterationCleanup (4)", // MainTarget2 "// ### Called: IterationSetup (5)", // MainTarget3 "// ### Called: Benchmark", // MainTarget3 "// ### Called: IterationCleanup (5)", // MainTarget3 "// ### Called: GlobalCleanup" }; public AllSetupAndCleanupTest(ITestOutputHelper output) : base(output) { } [Fact] public void AllSetupAndCleanupMethodRunsTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarks>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarks { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public void GlobalSetup() => Console.WriteLine(GlobalSetupCalled); [GlobalCleanup] public void GlobalCleanup() => Console.WriteLine(GlobalCleanupCalled); [Benchmark] public void Benchmark() => Console.WriteLine(BenchmarkCalled); } [Fact] public void AllSetupAndCleanupMethodRunsAsyncTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarksAsync>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarksAsync { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public Task GlobalSetup() => Console.Out.WriteLineAsync(GlobalSetupCalled); [GlobalCleanup] public Task GlobalCleanup() => Console.Out.WriteLineAsync(GlobalCleanupCalled); [Benchmark] public Task Benchmark() => Console.Out.WriteLineAsync(BenchmarkCalled); } [Fact] public void AllSetupAndCleanupMethodRunsAsyncTaskSetupTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarksAsyncTaskSetup>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarksAsyncTaskSetup { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public Task GlobalSetup() => Console.Out.WriteLineAsync(GlobalSetupCalled); [GlobalCleanup] public Task GlobalCleanup() => Console.Out.WriteLineAsync(GlobalCleanupCalled); [Benchmark] public void Benchmark() => Console.WriteLine(BenchmarkCalled); } [Fact] public void AllSetupAndCleanupMethodRunsAsyncGenericTaskSetupTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarksAsyncGenericTaskSetup>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarksAsyncGenericTaskSetup { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public async Task<int> GlobalSetup() { await Console.Out.WriteLineAsync(GlobalSetupCalled); return 42; } [GlobalCleanup] public async Task<int> GlobalCleanup() { await Console.Out.WriteLineAsync(GlobalCleanupCalled); return 42; } [Benchmark] public void Benchmark() => Console.WriteLine(BenchmarkCalled); } [Fact] public void AllSetupAndCleanupMethodRunsAsyncValueTaskSetupTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarksAsyncValueTaskSetup>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarksAsyncValueTaskSetup { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public ValueTask GlobalSetup() => new ValueTask(Console.Out.WriteLineAsync(GlobalSetupCalled)); [GlobalCleanup] public ValueTask GlobalCleanup() => new ValueTask(Console.Out.WriteLineAsync(GlobalCleanupCalled)); [Benchmark] public void Benchmark() => Console.WriteLine(BenchmarkCalled); } [FactNotGitHubActionsWindows] public void AllSetupAndCleanupMethodRunsAsyncGenericValueTaskSetupTest() { var logger = new OutputLogger(Output); var miniJob = Job.Default.WithStrategy(RunStrategy.Monitoring).WithWarmupCount(2).WithIterationCount(3).WithInvocationCount(1).WithUnrollFactor(1).WithId("MiniJob"); var config = CreateSimpleConfig(logger, miniJob); CanExecute<AllSetupAndCleanupAttributeBenchmarksAsyncGenericValueTaskSetup>(config); var actualLogLines = logger.GetLog().Split('\r', '\n').Where(line => line.StartsWith(Prefix)).ToArray(); foreach (string line in actualLogLines) Output.WriteLine(line); Assert.Equal(expectedLogLines, actualLogLines); } public class AllSetupAndCleanupAttributeBenchmarksAsyncGenericValueTaskSetup { private int setupCounter; private int cleanupCounter; [IterationSetup] public void IterationSetup() => Console.WriteLine(IterationSetupCalled + " (" + ++setupCounter + ")"); [IterationCleanup] public void IterationCleanup() => Console.WriteLine(IterationCleanupCalled + " (" + ++cleanupCounter + ")"); [GlobalSetup] public async ValueTask<int> GlobalSetup() { await Console.Out.WriteLineAsync(GlobalSetupCalled); return 42; } [GlobalCleanup] public async ValueTask<int> GlobalCleanup() { await Console.Out.WriteLineAsync(GlobalCleanupCalled); return 42; } [Benchmark] public void Benchmark() => Console.WriteLine(BenchmarkCalled); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. //------------------------------------------------------------------------------ using System.Collections.Generic; using System.Data.Common; using System.Data.SqlTypes; using System.Diagnostics; using System.IO; using System.Globalization; using System.Reflection; using System.Xml; using MSS = Microsoft.SqlServer.Server; using Microsoft.SqlServer.Server; namespace System.Data.SqlClient { using Res = System.SR; internal abstract class DataFeed { } internal class StreamDataFeed : DataFeed { internal Stream _source; internal StreamDataFeed(Stream source) { _source = source; } } internal class TextDataFeed : DataFeed { internal TextReader _source; internal TextDataFeed(TextReader source) { _source = source; } } internal class XmlDataFeed : DataFeed { internal XmlReader _source; internal XmlDataFeed(XmlReader source) { _source = source; } } public sealed partial class SqlParameter : DbParameter { private MetaType _metaType; private SqlCollation _collation; private string _xmlSchemaCollectionDatabase; private string _xmlSchemaCollectionOwningSchema; private string _xmlSchemaCollectionName; private string _typeName; private string _parameterName; private byte _precision; private byte _scale; private bool _hasScale; // V1.0 compat, ignore _hasScale private MetaType _internalMetaType; private SqlBuffer _sqlBufferReturnValue; private INullable _valueAsINullable; private bool _isSqlParameterSqlType; private bool _isNull = true; private bool _coercedValueIsSqlType; private bool _coercedValueIsDataFeed; private int _actualSize = -1; public SqlParameter() : base() { } public SqlParameter(string parameterName, SqlDbType dbType) : this() { this.ParameterName = parameterName; this.SqlDbType = dbType; } public SqlParameter(string parameterName, object value) : this() { Debug.Assert(!(value is SqlDbType), "use SqlParameter(string, SqlDbType)"); this.ParameterName = parameterName; this.Value = value; } public SqlParameter(string parameterName, SqlDbType dbType, int size) : this() { this.ParameterName = parameterName; this.SqlDbType = dbType; this.Size = size; } public SqlParameter(string parameterName, SqlDbType dbType, int size, string sourceColumn) : this() { this.ParameterName = parameterName; this.SqlDbType = dbType; this.Size = size; this.SourceColumn = sourceColumn; } // // currently the user can't set this value. it gets set by the return value from tds // internal SqlCollation Collation { get { return _collation; } set { _collation = value; } } public SqlCompareOptions CompareInfo { // Bits 21 through 25 represent the CompareInfo get { SqlCollation collation = _collation; if (null != collation) { return collation.SqlCompareOptions; } return SqlCompareOptions.None; } set { SqlCollation collation = _collation; if (null == collation) { _collation = collation = new SqlCollation(); } if ((value & SqlString.x_iValidSqlCompareOptionMask) != value) { throw ADP.ArgumentOutOfRange(nameof(CompareInfo)); } collation.SqlCompareOptions = value; } } public string XmlSchemaCollectionDatabase { get { string xmlSchemaCollectionDatabase = _xmlSchemaCollectionDatabase; return ((xmlSchemaCollectionDatabase != null) ? xmlSchemaCollectionDatabase : ADP.StrEmpty); } set { _xmlSchemaCollectionDatabase = value; } } public string XmlSchemaCollectionOwningSchema { get { string xmlSchemaCollectionOwningSchema = _xmlSchemaCollectionOwningSchema; return ((xmlSchemaCollectionOwningSchema != null) ? xmlSchemaCollectionOwningSchema : ADP.StrEmpty); } set { _xmlSchemaCollectionOwningSchema = value; } } public string XmlSchemaCollectionName { get { string xmlSchemaCollectionName = _xmlSchemaCollectionName; return ((xmlSchemaCollectionName != null) ? xmlSchemaCollectionName : ADP.StrEmpty); } set { _xmlSchemaCollectionName = value; } } override public DbType DbType { get { return GetMetaTypeOnly().DbType; } set { MetaType metatype = _metaType; if ((null == metatype) || (metatype.DbType != value) || // Two special datetime cases for backward compat // DbType.Date and DbType.Time should always be treated as setting DbType.DateTime instead value == DbType.Date || value == DbType.Time) { PropertyTypeChanging(); _metaType = MetaType.GetMetaTypeFromDbType(value); } } } public override void ResetDbType() { ResetSqlDbType(); } internal MetaType InternalMetaType { get { Debug.Assert(null != _internalMetaType, "null InternalMetaType"); return _internalMetaType; } set { _internalMetaType = value; } } public int LocaleId { // Lowest 20 bits represent LocaleId get { SqlCollation collation = _collation; if (null != collation) { return collation.LCID; } return 0; } set { SqlCollation collation = _collation; if (null == collation) { _collation = collation = new SqlCollation(); } if (value != (SqlCollation.MaskLcid & value)) { throw ADP.ArgumentOutOfRange(nameof(LocaleId)); } collation.LCID = value; } } internal bool SizeInferred { get { return 0 == _size; } } internal MSS.SmiParameterMetaData MetaDataForSmi(out ParameterPeekAheadValue peekAhead) { peekAhead = null; MetaType mt = ValidateTypeLengths(); long actualLen = GetActualSize(); long maxLen = this.Size; // GetActualSize returns bytes length, but smi expects char length for // character types, so adjust if (!mt.IsLong) { if (SqlDbType.NChar == mt.SqlDbType || SqlDbType.NVarChar == mt.SqlDbType) { actualLen = actualLen / sizeof(char); } if (actualLen > maxLen) { maxLen = actualLen; } } // Determine maxLength for types that ValidateTypeLengths won't figure out if (0 == maxLen) { if (SqlDbType.Binary == mt.SqlDbType || SqlDbType.VarBinary == mt.SqlDbType) { maxLen = MSS.SmiMetaData.MaxBinaryLength; } else if (SqlDbType.Char == mt.SqlDbType || SqlDbType.VarChar == mt.SqlDbType) { maxLen = MSS.SmiMetaData.MaxANSICharacters; } else if (SqlDbType.NChar == mt.SqlDbType || SqlDbType.NVarChar == mt.SqlDbType) { maxLen = MSS.SmiMetaData.MaxUnicodeCharacters; } } else if ((maxLen > MSS.SmiMetaData.MaxBinaryLength && (SqlDbType.Binary == mt.SqlDbType || SqlDbType.VarBinary == mt.SqlDbType)) || (maxLen > MSS.SmiMetaData.MaxANSICharacters && (SqlDbType.Char == mt.SqlDbType || SqlDbType.VarChar == mt.SqlDbType)) || (maxLen > MSS.SmiMetaData.MaxUnicodeCharacters && (SqlDbType.NChar == mt.SqlDbType || SqlDbType.NVarChar == mt.SqlDbType))) { maxLen = -1; } int localeId = LocaleId; if (0 == localeId && mt.IsCharType) { object value = GetCoercedValue(); if (value is SqlString && !((SqlString)value).IsNull) { localeId = ((SqlString)value).LCID; } else { localeId = Locale.GetCurrentCultureLcid(); } } SqlCompareOptions compareOpts = CompareInfo; if (0 == compareOpts && mt.IsCharType) { object value = GetCoercedValue(); if (value is SqlString && !((SqlString)value).IsNull) { compareOpts = ((SqlString)value).SqlCompareOptions; } else { compareOpts = MSS.SmiMetaData.GetDefaultForType(mt.SqlDbType).CompareOptions; } } string typeSpecificNamePart1 = null; string typeSpecificNamePart2 = null; string typeSpecificNamePart3 = null; if (SqlDbType.Xml == mt.SqlDbType) { typeSpecificNamePart1 = this.XmlSchemaCollectionDatabase; typeSpecificNamePart2 = this.XmlSchemaCollectionOwningSchema; typeSpecificNamePart3 = this.XmlSchemaCollectionName; } else if (SqlDbType.Udt == mt.SqlDbType || (SqlDbType.Structured == mt.SqlDbType && !string.IsNullOrEmpty(this.TypeName))) { // Split the input name. The type name is specified as single 3 part name. // NOTE: ParseTypeName throws if format is incorrect String[] names; if (SqlDbType.Udt == mt.SqlDbType) { throw ADP.DbTypeNotSupported(SqlDbType.Udt.ToString()); } else { names = ParseTypeName(this.TypeName); } if (1 == names.Length) { typeSpecificNamePart3 = names[0]; } else if (2 == names.Length) { typeSpecificNamePart2 = names[0]; typeSpecificNamePart3 = names[1]; } else if (3 == names.Length) { typeSpecificNamePart1 = names[0]; typeSpecificNamePart2 = names[1]; typeSpecificNamePart3 = names[2]; } else { throw ADP.ArgumentOutOfRange(nameof(names)); } if ((!string.IsNullOrEmpty(typeSpecificNamePart1) && TdsEnums.MAX_SERVERNAME < typeSpecificNamePart1.Length) || (!string.IsNullOrEmpty(typeSpecificNamePart2) && TdsEnums.MAX_SERVERNAME < typeSpecificNamePart2.Length) || (!string.IsNullOrEmpty(typeSpecificNamePart3) && TdsEnums.MAX_SERVERNAME < typeSpecificNamePart3.Length)) { throw ADP.ArgumentOutOfRange(nameof(names)); } } byte precision = GetActualPrecision(); byte scale = GetActualScale(); // precision for decimal types may still need adjustment. if (SqlDbType.Decimal == mt.SqlDbType) { if (0 == precision) { precision = TdsEnums.DEFAULT_NUMERIC_PRECISION; } } // Sub-field determination List<SmiExtendedMetaData> fields = null; MSS.SmiMetaDataPropertyCollection extendedProperties = null; if (SqlDbType.Structured == mt.SqlDbType) { GetActualFieldsAndProperties(out fields, out extendedProperties, out peekAhead); } return new MSS.SmiParameterMetaData(mt.SqlDbType, maxLen, precision, scale, localeId, compareOpts, SqlDbType.Structured == mt.SqlDbType, fields, extendedProperties, this.ParameterNameFixed, typeSpecificNamePart1, typeSpecificNamePart2, typeSpecificNamePart3, this.Direction); } internal bool ParameterIsSqlType { get { return _isSqlParameterSqlType; } set { _isSqlParameterSqlType = value; } } override public string ParameterName { get { string parameterName = _parameterName; return ((null != parameterName) ? parameterName : ADP.StrEmpty); } set { if (string.IsNullOrEmpty(value) || (value.Length < TdsEnums.MAX_PARAMETER_NAME_LENGTH) || (('@' == value[0]) && (value.Length <= TdsEnums.MAX_PARAMETER_NAME_LENGTH))) { if (_parameterName != value) { PropertyChanging(); _parameterName = value; } } else { throw SQL.InvalidParameterNameLength(value); } } } internal string ParameterNameFixed { get { string parameterName = ParameterName; if ((0 < parameterName.Length) && ('@' != parameterName[0])) { parameterName = "@" + parameterName; } Debug.Assert(parameterName.Length <= TdsEnums.MAX_PARAMETER_NAME_LENGTH, "parameter name too long"); return parameterName; } } public override Byte Precision { get { return PrecisionInternal; } set { PrecisionInternal = value; } } internal byte PrecisionInternal { get { byte precision = _precision; SqlDbType dbtype = GetMetaSqlDbTypeOnly(); if ((0 == precision) && (SqlDbType.Decimal == dbtype)) { precision = ValuePrecision(SqlValue); } return precision; } set { SqlDbType sqlDbType = SqlDbType; if (sqlDbType == SqlDbType.Decimal && value > TdsEnums.MAX_NUMERIC_PRECISION) { throw SQL.PrecisionValueOutOfRange(value); } if (_precision != value) { PropertyChanging(); _precision = value; } } } private bool ShouldSerializePrecision() { return (0 != _precision); } public override Byte Scale { get { return ScaleInternal; } set { ScaleInternal = value; } } internal byte ScaleInternal { get { byte scale = _scale; SqlDbType dbtype = GetMetaSqlDbTypeOnly(); if ((0 == scale) && (SqlDbType.Decimal == dbtype)) { scale = ValueScale(SqlValue); } return scale; } set { if (_scale != value || !_hasScale) { PropertyChanging(); _scale = value; _hasScale = true; _actualSize = -1; // Invalidate actual size such that it is re-calculated } } } private bool ShouldSerializeScale() { return (0 != _scale); // V1.0 compat, ignore _hasScale } public SqlDbType SqlDbType { get { return GetMetaTypeOnly().SqlDbType; } set { MetaType metatype = _metaType; // HACK!!! // We didn't want to expose SmallVarBinary on SqlDbType so we // stuck it at the end of SqlDbType in v1.0, except that now // we have new data types after that and it's smack dab in the // middle of the valid range. To prevent folks from setting // this invalid value we have to have this code here until we // can take the time to fix it later. if ((SqlDbType)TdsEnums.SmallVarBinary == value) { throw SQL.InvalidSqlDbType(value); } if ((null == metatype) || (metatype.SqlDbType != value)) { PropertyTypeChanging(); _metaType = MetaType.GetMetaTypeFromSqlDbType(value, value == SqlDbType.Structured); } } } private bool ShouldSerializeSqlDbType() { return (null != _metaType); } public void ResetSqlDbType() { if (null != _metaType) { PropertyTypeChanging(); _metaType = null; } } public object SqlValue { get { if (_value != null) { if (_value == DBNull.Value) { return MetaType.GetNullSqlValue(GetMetaTypeOnly().SqlType); } if (_value is INullable) { return _value; } // For Date and DateTime2, return the CLR object directly without converting it to a SqlValue // GetMetaTypeOnly() will convert _value to a string in the case of char or char[], so only check // the SqlDbType for DateTime. if (_value is DateTime) { SqlDbType sqlDbType = GetMetaTypeOnly().SqlDbType; if (sqlDbType == SqlDbType.Date || sqlDbType == SqlDbType.DateTime2) { return _value; } } return (MetaType.GetSqlValueFromComVariant(_value)); } else if (_sqlBufferReturnValue != null) { return _sqlBufferReturnValue.SqlValue; } return null; } set { Value = value; } } public String TypeName { get { string typeName = _typeName; return ((null != typeName) ? typeName : ADP.StrEmpty); } set { _typeName = value; } } override public object Value { // V1.2.3300, XXXParameter V1.0.3300 get { if (_value != null) { return _value; } else if (_sqlBufferReturnValue != null) { if (ParameterIsSqlType) { return _sqlBufferReturnValue.SqlValue; } return _sqlBufferReturnValue.Value; } return null; } set { _value = value; _sqlBufferReturnValue = null; _coercedValue = null; _valueAsINullable = _value as INullable; _isSqlParameterSqlType = (_valueAsINullable != null); _isNull = ((_value == null) || (_value == DBNull.Value) || ((_isSqlParameterSqlType) && (_valueAsINullable.IsNull))); _actualSize = -1; } } internal INullable ValueAsINullable { get { return _valueAsINullable; } } internal bool IsNull { get { // NOTE: Udts can change their value any time if (_internalMetaType.SqlDbType == Data.SqlDbType.Udt) { _isNull = ((_value == null) || (_value == DBNull.Value) || ((_isSqlParameterSqlType) && (_valueAsINullable.IsNull))); } return _isNull; } } // // always returns data in bytes - except for non-unicode chars, which will be in number of chars // internal int GetActualSize() { MetaType mt = InternalMetaType; SqlDbType actualType = mt.SqlDbType; // NOTE: Users can change the Udt at any time, so we may need to recalculate if ((_actualSize == -1) || (actualType == Data.SqlDbType.Udt)) { _actualSize = 0; object val = GetCoercedValue(); bool isSqlVariant = false; if (IsNull && !mt.IsVarTime) { return 0; } // if this is a backend SQLVariant type, then infer the TDS type from the SQLVariant type if (actualType == SqlDbType.Variant) { mt = MetaType.GetMetaTypeFromValue(val, streamAllowed: false); actualType = MetaType.GetSqlDataType(mt.TDSType, 0 /*no user type*/, 0 /*non-nullable type*/).SqlDbType; isSqlVariant = true; } if (mt.IsFixed) { _actualSize = mt.FixedLength; } else { // @hack: until we have ForceOffset behavior we have the following semantics: // @hack: if the user supplies a Size through the Size property or constructor, // @hack: we only send a MAX of Size bytes over. If the actualSize is < Size, then // @hack: we send over actualSize int coercedSize = 0; // get the actual length of the data, in bytes switch (actualType) { case SqlDbType.NChar: case SqlDbType.NVarChar: case SqlDbType.NText: case SqlDbType.Xml: { coercedSize = ((!_isNull) && (!_coercedValueIsDataFeed)) ? (StringSize(val, _coercedValueIsSqlType)) : 0; _actualSize = (ShouldSerializeSize() ? Size : 0); _actualSize = ((ShouldSerializeSize() && (_actualSize <= coercedSize)) ? _actualSize : coercedSize); if (_actualSize == -1) _actualSize = coercedSize; _actualSize <<= 1; } break; case SqlDbType.Char: case SqlDbType.VarChar: case SqlDbType.Text: { // for these types, ActualSize is the num of chars, not actual bytes - since non-unicode chars are not always uniform size coercedSize = ((!_isNull) && (!_coercedValueIsDataFeed)) ? (StringSize(val, _coercedValueIsSqlType)) : 0; _actualSize = (ShouldSerializeSize() ? Size : 0); _actualSize = ((ShouldSerializeSize() && (_actualSize <= coercedSize)) ? _actualSize : coercedSize); if (_actualSize == -1) _actualSize = coercedSize; } break; case SqlDbType.Binary: case SqlDbType.VarBinary: case SqlDbType.Image: case SqlDbType.Timestamp: coercedSize = ((!_isNull) && (!_coercedValueIsDataFeed)) ? (BinarySize(val, _coercedValueIsSqlType)) : 0; _actualSize = (ShouldSerializeSize() ? Size : 0); _actualSize = ((ShouldSerializeSize() && (_actualSize <= coercedSize)) ? _actualSize : coercedSize); if (_actualSize == -1) _actualSize = coercedSize; break; case SqlDbType.Udt: throw ADP.DbTypeNotSupported(SqlDbType.Udt.ToString()); case SqlDbType.Structured: coercedSize = -1; break; case SqlDbType.Time: _actualSize = (isSqlVariant ? 5 : MetaType.GetTimeSizeFromScale(GetActualScale())); break; case SqlDbType.DateTime2: // Date in number of days (3 bytes) + time _actualSize = 3 + (isSqlVariant ? 5 : MetaType.GetTimeSizeFromScale(GetActualScale())); break; case SqlDbType.DateTimeOffset: // Date in days (3 bytes) + offset in minutes (2 bytes) + time _actualSize = 5 + (isSqlVariant ? 5 : MetaType.GetTimeSizeFromScale(GetActualScale())); break; default: Debug.Assert(false, "Unknown variable length type!"); break; } // switch // don't even send big values over to the variant if (isSqlVariant && (coercedSize > TdsEnums.TYPE_SIZE_LIMIT)) throw SQL.ParameterInvalidVariant(this.ParameterName); } } return _actualSize; } // Coerced Value is also used in SqlBulkCopy.ConvertValue(object value, _SqlMetaData metadata) internal static object CoerceValue(object value, MetaType destinationType, out bool coercedToDataFeed, out bool typeChanged, bool allowStreaming = true) { Debug.Assert(!(value is DataFeed), "Value provided should not already be a data feed"); Debug.Assert(!ADP.IsNull(value), "Value provided should not be null"); Debug.Assert(null != destinationType, "null destinationType"); coercedToDataFeed = false; typeChanged = false; Type currentType = value.GetType(); if ((typeof(object) != destinationType.ClassType) && (currentType != destinationType.ClassType) && ((currentType != destinationType.SqlType) || (SqlDbType.Xml == destinationType.SqlDbType))) { // Special case for Xml types (since we need to convert SqlXml into a string) try { // Assume that the type changed typeChanged = true; if ((typeof(string) == destinationType.ClassType)) { // For Xml data, destination Type is always string if (typeof(SqlXml) == currentType) { value = MetaType.GetStringFromXml((XmlReader)(((SqlXml)value).CreateReader())); } else if (typeof(SqlString) == currentType) { typeChanged = false; // Do nothing } else if (typeof(XmlReader).IsAssignableFrom(currentType)) { if (allowStreaming) { coercedToDataFeed = true; value = new XmlDataFeed((XmlReader)value); } else { value = MetaType.GetStringFromXml((XmlReader)value); } } else if (typeof(char[]) == currentType) { value = new string((char[])value); } else if (typeof(SqlChars) == currentType) { value = new string(((SqlChars)value).Value); } else if (value is TextReader && allowStreaming) { coercedToDataFeed = true; value = new TextDataFeed((TextReader)value); } else { value = Convert.ChangeType(value, destinationType.ClassType, (IFormatProvider)null); } } else if ((DbType.Currency == destinationType.DbType) && (typeof(string) == currentType)) { value = Decimal.Parse((string)value, NumberStyles.Currency, (IFormatProvider)null); // WebData 99376 } else if ((typeof(SqlBytes) == currentType) && (typeof(byte[]) == destinationType.ClassType)) { typeChanged = false; // Do nothing } else if ((typeof(string) == currentType) && (SqlDbType.Time == destinationType.SqlDbType)) { value = TimeSpan.Parse((string)value); } else if ((typeof(string) == currentType) && (SqlDbType.DateTimeOffset == destinationType.SqlDbType)) { value = DateTimeOffset.Parse((string)value, (IFormatProvider)null); } else if ((typeof(DateTime) == currentType) && (SqlDbType.DateTimeOffset == destinationType.SqlDbType)) { value = new DateTimeOffset((DateTime)value); } else if (TdsEnums.SQLTABLE == destinationType.TDSType && ( value is DbDataReader || value is System.Collections.Generic.IEnumerable<SqlDataRecord>)) { // no conversion for TVPs. typeChanged = false; } else if (destinationType.ClassType == typeof(byte[]) && value is Stream && allowStreaming) { coercedToDataFeed = true; value = new StreamDataFeed((Stream)value); } else { value = Convert.ChangeType(value, destinationType.ClassType, (IFormatProvider)null); } } catch (Exception e) { if (!ADP.IsCatchableExceptionType(e)) { throw; } throw ADP.ParameterConversionFailed(value, destinationType.ClassType, e); // WebData 75433 } } Debug.Assert(allowStreaming || !coercedToDataFeed, "Streaming is not allowed, but type was coerced into a data feed"); Debug.Assert(value.GetType() == currentType ^ typeChanged, "Incorrect value for typeChanged"); return value; } internal void FixStreamDataForNonPLP() { object value = GetCoercedValue(); AssertCachedPropertiesAreValid(); if (!_coercedValueIsDataFeed) { return; } _coercedValueIsDataFeed = false; if (value is TextDataFeed) { if (Size > 0) { char[] buffer = new char[Size]; int nRead = ((TextDataFeed)value)._source.ReadBlock(buffer, 0, Size); CoercedValue = new string(buffer, 0, nRead); } else { CoercedValue = ((TextDataFeed)value)._source.ReadToEnd(); } return; } if (value is StreamDataFeed) { if (Size > 0) { byte[] buffer = new byte[Size]; int totalRead = 0; Stream sourceStream = ((StreamDataFeed)value)._source; while (totalRead < Size) { int nRead = sourceStream.Read(buffer, totalRead, Size - totalRead); if (nRead == 0) { break; } totalRead += nRead; } if (totalRead < Size) { Array.Resize(ref buffer, totalRead); } CoercedValue = buffer; } else { MemoryStream ms = new MemoryStream(); ((StreamDataFeed)value)._source.CopyTo(ms); CoercedValue = ms.ToArray(); } return; } if (value is XmlDataFeed) { CoercedValue = MetaType.GetStringFromXml(((XmlDataFeed)value)._source); return; } // We should have returned before reaching here Debug.Assert(false, "_coercedValueIsDataFeed was true, but the value was not a known DataFeed type"); } internal byte GetActualPrecision() { return ShouldSerializePrecision() ? PrecisionInternal : ValuePrecision(CoercedValue); } internal byte GetActualScale() { if (ShouldSerializeScale()) { return ScaleInternal; } // issue: how could a user specify 0 as the actual scale? if (GetMetaTypeOnly().IsVarTime) { return TdsEnums.DEFAULT_VARTIME_SCALE; } return ValueScale(CoercedValue); } internal int GetParameterSize() { return ShouldSerializeSize() ? Size : ValueSize(CoercedValue); } private void GetActualFieldsAndProperties(out List<MSS.SmiExtendedMetaData> fields, out SmiMetaDataPropertyCollection props, out ParameterPeekAheadValue peekAhead) { fields = null; props = null; peekAhead = null; object value = GetCoercedValue(); if (value is SqlDataReader) { fields = new List<MSS.SmiExtendedMetaData>(((SqlDataReader)value).GetInternalSmiMetaData()); if (fields.Count <= 0) { throw SQL.NotEnoughColumnsInStructuredType(); } bool[] keyCols = new bool[fields.Count]; bool hasKey = false; for (int i = 0; i < fields.Count; i++) { MSS.SmiQueryMetaData qmd = fields[i] as MSS.SmiQueryMetaData; if (null != qmd && !qmd.IsKey.IsNull && qmd.IsKey.Value) { keyCols[i] = true; hasKey = true; } } // Add unique key property, if any found. if (hasKey) { props = new SmiMetaDataPropertyCollection(); props[MSS.SmiPropertySelector.UniqueKey] = new MSS.SmiUniqueKeyProperty(new List<bool>(keyCols)); } } else if (value is IEnumerable<SqlDataRecord>) { // must grab the first record of the enumerator to get the metadata IEnumerator<MSS.SqlDataRecord> enumerator = ((IEnumerable<MSS.SqlDataRecord>)value).GetEnumerator(); MSS.SqlDataRecord firstRecord = null; try { // no need for fields if there's no rows or no columns -- we'll be sending a null instance anyway. if (enumerator.MoveNext()) { firstRecord = enumerator.Current; int fieldCount = firstRecord.FieldCount; if (0 < fieldCount) { // It's valid! Grab those fields. bool[] keyCols = new bool[fieldCount]; bool[] defaultFields = new bool[fieldCount]; bool[] sortOrdinalSpecified = new bool[fieldCount]; int maxSortOrdinal = -1; // largest sort ordinal seen, used to optimize locating holes in the list bool hasKey = false; bool hasDefault = false; int sortCount = 0; SmiOrderProperty.SmiColumnOrder[] sort = new SmiOrderProperty.SmiColumnOrder[fieldCount]; fields = new List<MSS.SmiExtendedMetaData>(fieldCount); for (int i = 0; i < fieldCount; i++) { SqlMetaData colMeta = firstRecord.GetSqlMetaData(i); fields.Add(MSS.MetaDataUtilsSmi.SqlMetaDataToSmiExtendedMetaData(colMeta)); if (colMeta.IsUniqueKey) { keyCols[i] = true; hasKey = true; } if (colMeta.UseServerDefault) { defaultFields[i] = true; hasDefault = true; } sort[i].Order = colMeta.SortOrder; if (SortOrder.Unspecified != colMeta.SortOrder) { // SqlMetaData takes care of checking for negative sort ordinals with specified sort order // bail early if there's no way sort order could be monotonically increasing if (fieldCount <= colMeta.SortOrdinal) { throw SQL.SortOrdinalGreaterThanFieldCount(i, colMeta.SortOrdinal); } // Check to make sure we haven't seen this ordinal before if (sortOrdinalSpecified[colMeta.SortOrdinal]) { throw SQL.DuplicateSortOrdinal(colMeta.SortOrdinal); } sort[i].SortOrdinal = colMeta.SortOrdinal; sortOrdinalSpecified[colMeta.SortOrdinal] = true; if (colMeta.SortOrdinal > maxSortOrdinal) { maxSortOrdinal = colMeta.SortOrdinal; } sortCount++; } } if (hasKey) { props = new SmiMetaDataPropertyCollection(); props[MSS.SmiPropertySelector.UniqueKey] = new MSS.SmiUniqueKeyProperty(new List<bool>(keyCols)); } if (hasDefault) { // May have already created props list in unique key handling if (null == props) { props = new SmiMetaDataPropertyCollection(); } props[MSS.SmiPropertySelector.DefaultFields] = new MSS.SmiDefaultFieldsProperty(new List<bool>(defaultFields)); } if (0 < sortCount) { // validate monotonically increasing sort order. // Since we already checked for duplicates, we just need // to watch for values outside of the sortCount range. if (maxSortOrdinal >= sortCount) { // there is at least one hole, find the first one int i; for (i = 0; i < sortCount; i++) { if (!sortOrdinalSpecified[i]) { break; } } Debug.Assert(i < sortCount, "SqlParameter.GetActualFieldsAndProperties: SortOrdinal hole-finding algorithm failed!"); throw SQL.MissingSortOrdinal(i); } // May have already created props list if (null == props) { props = new SmiMetaDataPropertyCollection(); } props[MSS.SmiPropertySelector.SortOrder] = new MSS.SmiOrderProperty( new List<SmiOrderProperty.SmiColumnOrder>(sort)); } // pack it up so we don't have to rewind to send the first value peekAhead = new ParameterPeekAheadValue(); peekAhead.Enumerator = enumerator; peekAhead.FirstRecord = firstRecord; // now that it's all packaged, make sure we don't dispose it. enumerator = null; } else { throw SQL.NotEnoughColumnsInStructuredType(); } } else { throw SQL.IEnumerableOfSqlDataRecordHasNoRows(); } } finally { if (enumerator != null) { enumerator.Dispose(); } } } else if (value is DbDataReader) { // For ProjectK\CoreCLR, DbDataReader no longer supports GetSchema // So instead we will attempt to generate the metadata from the Field Type alone var reader = (DbDataReader)value; if (reader.FieldCount <= 0) { throw SQL.NotEnoughColumnsInStructuredType(); } fields = new List<MSS.SmiExtendedMetaData>(reader.FieldCount); for (int i = 0; i < reader.FieldCount; i++) { fields.Add(MSS.MetaDataUtilsSmi.SmiMetaDataFromType(reader.GetName(i), reader.GetFieldType(i))); } } } internal object GetCoercedValue() { // NOTE: User can change the Udt at any time if ((null == _coercedValue) || (_internalMetaType.SqlDbType == Data.SqlDbType.Udt)) { // will also be set during parameter Validation bool isDataFeed = Value is DataFeed; if ((IsNull) || (isDataFeed)) { // No coercion is done for DataFeeds and Nulls _coercedValue = Value; _coercedValueIsSqlType = (_coercedValue == null) ? false : _isSqlParameterSqlType; // set to null for output parameters that keeps _isSqlParameterSqlType _coercedValueIsDataFeed = isDataFeed; _actualSize = IsNull ? 0 : -1; } else { bool typeChanged; _coercedValue = CoerceValue(Value, _internalMetaType, out _coercedValueIsDataFeed, out typeChanged); _coercedValueIsSqlType = ((_isSqlParameterSqlType) && (!typeChanged)); // Type changed always results in a CLR type _actualSize = -1; } } AssertCachedPropertiesAreValid(); return _coercedValue; } internal bool CoercedValueIsSqlType { get { if (null == _coercedValue) { GetCoercedValue(); } AssertCachedPropertiesAreValid(); return _coercedValueIsSqlType; } } internal bool CoercedValueIsDataFeed { get { if (null == _coercedValue) { GetCoercedValue(); } AssertCachedPropertiesAreValid(); return _coercedValueIsDataFeed; } } [Conditional("DEBUG")] internal void AssertCachedPropertiesAreValid() { AssertPropertiesAreValid(_coercedValue, _coercedValueIsSqlType, _coercedValueIsDataFeed, IsNull); } [Conditional("DEBUG")] internal void AssertPropertiesAreValid(object value, bool? isSqlType = null, bool? isDataFeed = null, bool? isNull = null) { Debug.Assert(!isSqlType.HasValue || (isSqlType.Value == (value is INullable)), "isSqlType is incorrect"); Debug.Assert(!isDataFeed.HasValue || (isDataFeed.Value == (value is DataFeed)), "isDataFeed is incorrect"); Debug.Assert(!isNull.HasValue || (isNull.Value == ADP.IsNull(value)), "isNull is incorrect"); } private SqlDbType GetMetaSqlDbTypeOnly() { MetaType metaType = _metaType; if (null == metaType) { // infer the type from the value metaType = MetaType.GetDefaultMetaType(); } return metaType.SqlDbType; } // This may not be a good thing to do in case someone overloads the parameter type but I // don't want to go from SqlDbType -> metaType -> TDSType private MetaType GetMetaTypeOnly() { if (null != _metaType) { return _metaType; } if (null != _value && DBNull.Value != _value) { // We have a value set by the user then just use that value // char and char[] are not directly supported so we convert those values to string if (_value is char) { _value = _value.ToString(); } else if (Value is char[]) { _value = new string((char[])_value); } return MetaType.GetMetaTypeFromValue(_value, inferLen: false); } else if (null != _sqlBufferReturnValue) { // value came back from the server Type valueType = _sqlBufferReturnValue.GetTypeFromStorageType(_isSqlParameterSqlType); if (null != valueType) { return MetaType.GetMetaTypeFromType(valueType); } } return MetaType.GetDefaultMetaType(); } internal void Prepare(SqlCommand cmd) { if (null == _metaType) { throw ADP.PrepareParameterType(cmd); } else if (!ShouldSerializeSize() && !_metaType.IsFixed) { throw ADP.PrepareParameterSize(cmd); } else if ((!ShouldSerializePrecision() && !ShouldSerializeScale()) && (_metaType.SqlDbType == SqlDbType.Decimal)) { throw ADP.PrepareParameterScale(cmd, SqlDbType.ToString()); } } private void PropertyChanging() { _internalMetaType = null; } private void PropertyTypeChanging() { PropertyChanging(); CoercedValue = null; } internal void SetSqlBuffer(SqlBuffer buff) { _sqlBufferReturnValue = buff; _value = null; _coercedValue = null; _isNull = _sqlBufferReturnValue.IsNull; _coercedValueIsDataFeed = false; _coercedValueIsSqlType = false; _actualSize = -1; } internal void Validate(int index, bool isCommandProc) { MetaType metaType = GetMetaTypeOnly(); _internalMetaType = metaType; // NOTE: (General Criteria): SqlParameter does a Size Validation check and would fail if the size is 0. // This condition filters all scenarios where we view a valid size 0. if (ADP.IsDirection(this, ParameterDirection.Output) && !ADP.IsDirection(this, ParameterDirection.ReturnValue) && (!metaType.IsFixed) && !ShouldSerializeSize() && ((null == _value) || (_value == DBNull.Value)) && (SqlDbType != SqlDbType.Timestamp) && (SqlDbType != SqlDbType.Udt) && // Output parameter with size 0 throws for XML, TEXT, NTEXT, IMAGE. (SqlDbType != SqlDbType.Xml) && !metaType.IsVarTime) { throw ADP.UninitializedParameterSize(index, metaType.ClassType); } if (metaType.SqlDbType != SqlDbType.Udt && Direction != ParameterDirection.Output) { GetCoercedValue(); } // Validate structured-type-specific details. if (metaType.SqlDbType == SqlDbType.Structured) { if (!isCommandProc && string.IsNullOrEmpty(TypeName)) throw SQL.MustSetTypeNameForParam(metaType.TypeName, this.ParameterName); if (ParameterDirection.Input != this.Direction) { throw SQL.UnsupportedTVPOutputParameter(this.Direction, this.ParameterName); } if (DBNull.Value == GetCoercedValue()) { throw SQL.DBNullNotSupportedForTVPValues(this.ParameterName); } } else if (!string.IsNullOrEmpty(TypeName)) { throw SQL.UnexpectedTypeNameForNonStructParams(this.ParameterName); } } // func will change type to that with a 4 byte length if the type has a two // byte length and a parameter length > than that expressible in 2 bytes internal MetaType ValidateTypeLengths() { MetaType mt = InternalMetaType; // Since the server will automatically reject any // char, varchar, binary, varbinary, nchar, or nvarchar parameter that has a // byte sizeInCharacters > 8000 bytes, we promote the parameter to image, text, or ntext. This // allows the user to specify a parameter type using a COM+ datatype and be able to // use that parameter against a BLOB column. if ((SqlDbType.Udt != mt.SqlDbType) && (false == mt.IsFixed) && (false == mt.IsLong)) { // if type has 2 byte length long actualSizeInBytes = this.GetActualSize(); long sizeInCharacters = this.Size; // 'actualSizeInBytes' is the size of value passed; // 'sizeInCharacters' is the parameter size; // 'actualSizeInBytes' is in bytes; // 'this.Size' is in characters; // 'sizeInCharacters' is in characters; // 'TdsEnums.TYPE_SIZE_LIMIT' is in bytes; // For Non-NCharType and for non-Yukon or greater variables, size should be maintained; // Modified variable names from 'size' to 'sizeInCharacters', 'actualSize' to 'actualSizeInBytes', and // 'maxSize' to 'maxSizeInBytes' // The idea is to // Keeping these goals in mind - the following are the changes we are making long maxSizeInBytes = 0; if (mt.IsNCharType) maxSizeInBytes = ((sizeInCharacters * sizeof(char)) > actualSizeInBytes) ? sizeInCharacters * sizeof(char) : actualSizeInBytes; else { // Notes: // Elevation from (n)(var)char (4001+) to (n)text succeeds without failure only with Yukon and greater. // it fails in sql server 2000 maxSizeInBytes = (sizeInCharacters > actualSizeInBytes) ? sizeInCharacters : actualSizeInBytes; } if ((maxSizeInBytes > TdsEnums.TYPE_SIZE_LIMIT) || (_coercedValueIsDataFeed) || (sizeInCharacters == -1) || (actualSizeInBytes == -1)) { // is size > size able to be described by 2 bytes // Convert the parameter to its max type mt = MetaType.GetMaxMetaTypeFromMetaType(mt); _metaType = mt; InternalMetaType = mt; if (!mt.IsPlp) { if (mt.SqlDbType == SqlDbType.Xml) { throw ADP.InvalidMetaDataValue(); //Xml should always have IsPartialLength = true } if (mt.SqlDbType == SqlDbType.NVarChar || mt.SqlDbType == SqlDbType.VarChar || mt.SqlDbType == SqlDbType.VarBinary) { Size = (int)(SmiMetaData.UnlimitedMaxLengthIndicator); } } } } return mt; } private byte ValuePrecision(object value) { if (value is SqlDecimal) { if (((SqlDecimal)value).IsNull) return 0; return ((SqlDecimal)value).Precision; } return ValuePrecisionCore(value); } private byte ValueScale(object value) { if (value is SqlDecimal) { if (((SqlDecimal)value).IsNull) return 0; return ((SqlDecimal)value).Scale; } return ValueScaleCore(value); } private static int StringSize(object value, bool isSqlType) { if (isSqlType) { Debug.Assert(!((INullable)value).IsNull, "Should not call StringSize on null values"); if (value is SqlString) { return ((SqlString)value).Value.Length; } if (value is SqlChars) { return ((SqlChars)value).Value.Length; } } else { string svalue = (value as string); if (null != svalue) { return svalue.Length; } char[] cvalue = (value as char[]); if (null != cvalue) { return cvalue.Length; } if (value is char) { return 1; } } // Didn't match, unknown size return 0; } private static int BinarySize(object value, bool isSqlType) { if (isSqlType) { Debug.Assert(!((INullable)value).IsNull, "Should not call StringSize on null values"); if (value is SqlBinary) { return ((SqlBinary)value).Length; } if (value is SqlBytes) { return ((SqlBytes)value).Value.Length; } } else { byte[] bvalue = (value as byte[]); if (null != bvalue) { return bvalue.Length; } if (value is byte) { return 1; } } // Didn't match, unknown size return 0; } private int ValueSize(object value) { if (value is SqlString) { if (((SqlString)value).IsNull) return 0; return ((SqlString)value).Value.Length; } if (value is SqlChars) { if (((SqlChars)value).IsNull) return 0; return ((SqlChars)value).Value.Length; } if (value is SqlBinary) { if (((SqlBinary)value).IsNull) return 0; return ((SqlBinary)value).Length; } if (value is SqlBytes) { if (((SqlBytes)value).IsNull) return 0; return (int)(((SqlBytes)value).Length); } if (value is DataFeed) { // Unknown length return 0; } return ValueSizeCore(value); } // parse an string of the form db.schema.name where any of the three components // might have "[" "]" and dots within it. // returns: // [0] dbname (or null) // [1] schema (or null) // [2] name // NOTE: if perf/space implications of Regex is not a problem, we can get rid // of this and use a simple regex to do the parsing internal static string[] ParseTypeName(string typeName) { Debug.Assert(null != typeName, "null typename passed to ParseTypeName"); try { string errorMsg; { errorMsg = Res.SQL_TypeName; } return MultipartIdentifier.ParseMultipartIdentifier(typeName, "[\"", "]\"", '.', 3, true, errorMsg, true); } catch (ArgumentException) { { throw SQL.InvalidParameterTypeNameFormat(); } } } } }
// Copyright(C) 2002-2012 Hugo Rumayor Montemayor, All rights reserved. using System; using System.Text; using System.IO; using System.Diagnostics.CodeAnalysis; using ID3Lib.Exceptions; namespace ID3Lib { #region Global Fields /// <summary> /// Type of text used in frame /// </summary> [SuppressMessage("Microsoft.Design", "CA1028:EnumStorageShouldBeInt32")] public enum TextCode : byte { /// <summary> /// ASCII(ISO-8859-1) /// </summary> Ascii = 0x00, /// <summary> /// Unicode with BOM /// </summary> Utf16 = 0x01, /// <summary> /// BigEndian Unicode without BOM /// </summary> Utf16BE = 0x02, /// <summary> /// Encoded Unicode /// </summary> Utf8 = 0x03 }; #endregion /// <summary> /// Manages binary to text and vice versa format conversions. /// </summary> internal static class TextBuilder { #region Methods public static string ReadText(byte[] frame, ref int index, TextCode code) { switch (code) { case TextCode.Ascii: { return ReadASCII(frame, ref index); } case TextCode.Utf16: { return ReadUTF16(frame, ref index); } case TextCode.Utf16BE: { return ReadUTF16BE(frame, ref index); } case TextCode.Utf8: { return ReadUTF8(frame, ref index); } default: { throw new InvalidFrameException("Invalid text code string type."); } } } public static string ReadTextEnd(byte[] frame, int index, TextCode code) { switch (code) { case TextCode.Ascii: { return ReadASCIIEnd(frame, index); } case TextCode.Utf16: { return ReadUTF16End(frame, index); } case TextCode.Utf16BE: { return ReadUTF16BEEnd(frame, index); } case TextCode.Utf8: { return ReadUTF8End(frame, index); } default: { throw new InvalidFrameException("Invalid text code string type."); } } } public static string ReadASCII(byte[] frame, ref int index) { string text = null; int count = Memory.FindByte(frame, 0, index); if (count == -1) throw new InvalidFrameException("Invalid ASCII string size"); if (count > 0) { var encoding = Encoding.GetEncoding(1252); // Should be ASCII text = encoding.GetString(frame, index, count); index += count; // add the read bytes } index++; // jump an end of line byte return text; } public static string ReadUTF16(byte[] frame, ref int index) { // check for empty string first, and throw a useful exception // otherwise we'll get an out-of-range exception when we look for the BOM if (index >= frame.Length - 2) throw new InvalidFrameException("ReadUTF16: string must be terminated"); if (frame[index] == 0xfe && frame[index + 1] == 0xff) // Big Endian { index += 2; return ReadUTF16BE(frame, ref index); } if (frame[index] == 0xff && frame[index + 1] == 0xfe) // Little Endian { index += 2; return ReadUTF16LE(frame, ref index); } if (frame[index] == 0x00 && frame[index + 1] == 0x00) // empty string { index += 2; return ""; } throw new InvalidFrameException("Invalid UTF16 string."); } public static string ReadUTF16BE(byte[] frame, ref int index) { UnicodeEncoding encoding = new UnicodeEncoding(true, false); int count = Memory.FindShort(frame, 0, index); if (count == -1) throw new InvalidFrameException("Invalid UTF16BE string size"); // we can safely let count==0 fall through string text = encoding.GetString(frame, index, count); index += count; // add the bytes read index += 2; // skip the EOL return text; } private static string ReadUTF16LE(byte[] frame, ref int index) { UnicodeEncoding encoding = new UnicodeEncoding(false, false); int count = Memory.FindShort(frame, 0, index); if (count == -1) throw new InvalidFrameException("Invalid UTF16LE string size"); // we can safely let count==0 fall through string text = encoding.GetString(frame, index, count); index += count; // add the bytes read index += 2; // skip the EOL return text; } public static string ReadUTF8(byte[] frame, ref int index) { string text = null; int count = Memory.FindByte(frame, 0, index); if (count == -1) { throw new InvalidFrameException("Invalid UTF8 string size"); } if (count > 0) { text = UTF8Encoding.UTF8.GetString(frame, index, count); index += count; // add the read bytes } index++; // jump an end of line byte return text; } public static string ReadASCIIEnd(byte[] frame, int index) { Encoding encoding = Encoding.GetEncoding(1252); // Should be ASCII return encoding.GetString(frame, index, frame.Length - index); } public static string ReadUTF16End(byte[] frame, int index) { // check for empty string first // otherwise we'll get an exception when we look for the BOM // SourceForge bug ID: 2686976 if (index >= frame.Length - 2) return ""; if (frame[index] == 0xfe && frame[index + 1] == 0xff) // Big Endian return ReadUTF16BEEnd(frame, index + 2); if (frame[index] == 0xff && frame[index + 1] == 0xfe) // Little Endian return ReadUTF16LEEnd(frame, index + 2); throw new InvalidFrameException("Invalid UTF16 string."); } public static string ReadUTF16BEEnd(byte[] frame, int index) { var encoding = new UnicodeEncoding(true, false); return encoding.GetString(frame, index, frame.Length - index); } private static string ReadUTF16LEEnd(byte[] frame, int index) { var encoding = new UnicodeEncoding(false, false); return encoding.GetString(frame, index, frame.Length - index); } public static string ReadUTF8End(byte[] frame, int index) { return UTF8Encoding.UTF8.GetString(frame, index, frame.Length - index); } // Write routines public static byte[] WriteText(string text, TextCode code) { switch (code) { case TextCode.Ascii: { return WriteASCII(text); } case TextCode.Utf16: { return WriteUTF16(text); } case TextCode.Utf16BE: { return WriteUTF16BE(text); } case TextCode.Utf8: { return WriteUTF8(text); } default: { throw new InvalidFrameException("Invalid text code string type."); } } } public static byte[] WriteTextEnd(string text, TextCode code) { switch (code) { case TextCode.Ascii: { return WriteASCIIEnd(text); } case TextCode.Utf16: { return WriteUTF16End(text); } case TextCode.Utf16BE: { return WriteUTF16BEEnd(text); } case TextCode.Utf8: { return WriteUTF8End(text); } default: { throw new InvalidFrameException("Invalid text code string type."); } } } public static byte[] WriteASCII(string text) { var buffer = new MemoryStream(); var writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { writer.Write((byte)0); return buffer.ToArray(); } var encoding = Encoding.GetEncoding(1252); // Should be ASCII writer.Write(encoding.GetBytes(text)); writer.Write((byte)0); //EOL return buffer.ToArray(); } public static byte[] WriteUTF16(string text) { var buffer = new MemoryStream(); var writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { writer.Write((ushort)0); return buffer.ToArray(); } writer.Write((byte)0xff); //Little endian, we have UTF16BE for big endian writer.Write((byte)0xfe); var encoding = new UnicodeEncoding(false, false); writer.Write(encoding.GetBytes(text)); writer.Write((ushort)0); return buffer.ToArray(); } public static byte[] WriteUTF16BE(string text) { var buffer = new MemoryStream(); var writer = new BinaryWriter(buffer); var encoding = new UnicodeEncoding(true, false); if (String.IsNullOrEmpty(text)) //Write a null string { writer.Write((ushort)0); return buffer.ToArray(); } writer.Write(encoding.GetBytes(text)); writer.Write((ushort)0); return buffer.ToArray(); } public static byte[] WriteUTF8(string text) { var buffer = new MemoryStream(); var writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { writer.Write((byte)0); return buffer.ToArray(); } writer.Write(UTF8Encoding.UTF8.GetBytes(text)); writer.Write((byte)0); return buffer.ToArray(); } public static byte[] WriteASCIIEnd(string text) { var buffer = new MemoryStream(); var writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) { return buffer.ToArray(); } Encoding encoding = Encoding.GetEncoding(1252); // Should be ASCII writer.Write(encoding.GetBytes(text)); return buffer.ToArray(); } public static byte[] WriteUTF16End(string text) { MemoryStream buffer = new MemoryStream(text.Length + 2); BinaryWriter writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { return buffer.ToArray(); } UnicodeEncoding encoding; writer.Write((byte)0xff); // Little endian writer.Write((byte)0xfe); encoding = new UnicodeEncoding(false, false); writer.Write(encoding.GetBytes(text)); return buffer.ToArray(); } public static byte[] WriteUTF16BEEnd(string text) { MemoryStream buffer = new MemoryStream(); BinaryWriter writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { return buffer.ToArray(); } UnicodeEncoding encoding = new UnicodeEncoding(true, false); writer.Write(encoding.GetBytes(text)); return buffer.ToArray(); } public static byte[] WriteUTF8End(string text) { MemoryStream buffer = new MemoryStream(); BinaryWriter writer = new BinaryWriter(buffer); if (String.IsNullOrEmpty(text)) //Write a null string { return buffer.ToArray(); } writer.Write(UTF8Encoding.UTF8.GetBytes(text)); return buffer.ToArray(); } #endregion } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Web; using Microsoft.AspNetCore.Authentication; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.DataProtection; using Microsoft.AspNetCore.DataProtection.KeyManagement; using Microsoft.AspNetCore.DataProtection.XmlEncryption; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc.Localization; using Microsoft.AspNetCore.Routing; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Localization; using Microsoft.Extensions.Options; using Microsoft.Net.Http.Headers; using OrchardCore; using OrchardCore.Environment.Extensions; using OrchardCore.Environment.Shell; using OrchardCore.Environment.Shell.Builders; using OrchardCore.Environment.Shell.Configuration; using OrchardCore.Environment.Shell.Descriptor.Models; using OrchardCore.Environment.Shell.Models; using OrchardCore.Localization; using OrchardCore.Locking; using OrchardCore.Locking.Distributed; using OrchardCore.Modules; using OrchardCore.Modules.FileProviders; using SameSiteMode = Microsoft.AspNetCore.Http.SameSiteMode; namespace Microsoft.Extensions.DependencyInjection { public static class ServiceCollectionExtensions { /// <summary> /// Adds OrchardCore services to the host service collection. /// </summary> public static OrchardCoreBuilder AddOrchardCore(this IServiceCollection services) { if (services == null) { throw new ArgumentNullException(nameof(services)); } // If an instance of OrchardCoreBuilder exists reuse it, // so we can call AddOrchardCore several times. var builder = services .LastOrDefault(d => d.ServiceType == typeof(OrchardCoreBuilder))? .ImplementationInstance as OrchardCoreBuilder; if (builder == null) { builder = new OrchardCoreBuilder(services); services.AddSingleton(builder); AddDefaultServices(builder); AddShellServices(services); AddExtensionServices(builder); AddStaticFiles(builder); AddRouting(builder); AddAntiForgery(builder); AddSameSiteCookieBackwardsCompatibility(builder); AddAuthentication(builder); AddDataProtection(builder); // Register the list of services to be resolved later on services.AddSingleton(services); } return builder; } /// <summary> /// Adds OrchardCore services to the host service collection and let the app change /// the default behavior and set of features through a configure action. /// </summary> public static IServiceCollection AddOrchardCore(this IServiceCollection services, Action<OrchardCoreBuilder> configure) { var builder = services.AddOrchardCore(); configure?.Invoke(builder); return services; } private static void AddDefaultServices(OrchardCoreBuilder builder) { var services = builder.ApplicationServices; services.AddLogging(); services.AddOptions(); // These services might be moved at a higher level if no components from OrchardCore needs them. services.AddLocalization(); // For performance, prevents the 'ResourceManagerStringLocalizer' from being used. // Also support pluralization. services.AddSingleton<IStringLocalizerFactory, NullStringLocalizerFactory>(); services.AddSingleton<IHtmlLocalizerFactory, NullHtmlLocalizerFactory>(); services.AddWebEncoders(); services.AddHttpContextAccessor(); services.AddSingleton<IClock, Clock>(); services.AddScoped<ILocalClock, LocalClock>(); services.AddScoped<ILocalizationService, DefaultLocalizationService>(); services.AddScoped<ICalendarManager, DefaultCalendarManager>(); services.AddScoped<ICalendarSelector, DefaultCalendarSelector>(); services.AddSingleton<IPoweredByMiddlewareOptions, PoweredByMiddlewareOptions>(); services.AddScoped<IOrchardHelper, DefaultOrchardHelper>(); builder.ConfigureServices(s => { s.AddSingleton<ILock, LocalLock>(); s.AddSingleton<IDistributedLock>(sp => sp.GetRequiredService<LocalLock>()); }); } private static void AddShellServices(IServiceCollection services) { // Use a single tenant and all features by default services.AddHostingShellServices(); services.AddAllFeaturesDescriptor(); // Registers the application primary feature. services.AddTransient(sp => new ShellFeature ( sp.GetRequiredService<IHostEnvironment>().ApplicationName, alwaysEnabled: true) ); // Registers the application default feature. services.AddTransient(sp => new ShellFeature ( Application.DefaultFeatureId, alwaysEnabled: true) ); } private static void AddExtensionServices(OrchardCoreBuilder builder) { builder.ApplicationServices.AddSingleton<IModuleNamesProvider, AssemblyAttributeModuleNamesProvider>(); builder.ApplicationServices.AddSingleton<IApplicationContext, ModularApplicationContext>(); builder.ApplicationServices.AddExtensionManagerHost(); builder.ConfigureServices(services => { services.AddExtensionManager(); }); } /// <summary> /// Adds tenant level configuration to serve static files from modules /// </summary> private static void AddStaticFiles(OrchardCoreBuilder builder) { builder.ConfigureServices(services => { services.AddSingleton<IModuleStaticFileProvider>(serviceProvider => { var env = serviceProvider.GetRequiredService<IHostEnvironment>(); var appContext = serviceProvider.GetRequiredService<IApplicationContext>(); IModuleStaticFileProvider fileProvider; if (env.IsDevelopment()) { var fileProviders = new List<IStaticFileProvider> { new ModuleProjectStaticFileProvider(appContext), new ModuleEmbeddedStaticFileProvider(appContext) }; fileProvider = new ModuleCompositeStaticFileProvider(fileProviders); } else { fileProvider = new ModuleEmbeddedStaticFileProvider(appContext); } return fileProvider; }); services.AddSingleton<IStaticFileProvider>(serviceProvider => { return serviceProvider.GetRequiredService<IModuleStaticFileProvider>(); }); }); builder.Configure((app, routes, serviceProvider) => { var fileProvider = serviceProvider.GetRequiredService<IModuleStaticFileProvider>(); var options = serviceProvider.GetRequiredService<IOptions<StaticFileOptions>>().Value; options.RequestPath = ""; options.FileProvider = fileProvider; var shellConfiguration = serviceProvider.GetRequiredService<IShellConfiguration>(); var cacheControl = shellConfiguration.GetValue("StaticFileOptions:CacheControl", "public, max-age=2592000, s-max-age=31557600"); // Cache static files for a year as they are coming from embedded resources and should not vary options.OnPrepareResponse = ctx => { ctx.Context.Response.Headers[HeaderNames.CacheControl] = cacheControl; }; app.UseStaticFiles(options); }); } /// <summary> /// Adds isolated tenant level routing services. /// </summary> private static void AddRouting(OrchardCoreBuilder builder) { // 'AddRouting()' is called by the host. builder.ConfigureServices(collection => { // The routing system is not tenant aware and uses a global list of endpoint data sources which is // setup by the default configuration of 'RouteOptions' and mutated on each call of 'UseEndPoints()'. // So, we need isolated routing singletons (and a default configuration) per tenant. var implementationTypesToRemove = new ServiceCollection().AddRouting() .Where(sd => sd.Lifetime == ServiceLifetime.Singleton || sd.ServiceType == typeof(IConfigureOptions<RouteOptions>)) .Select(sd => sd.GetImplementationType()) .ToArray(); var descriptorsToRemove = collection .Where(sd => (sd is ClonedSingletonDescriptor || sd.ServiceType == typeof(IConfigureOptions<RouteOptions>)) && implementationTypesToRemove.Contains(sd.GetImplementationType())) .ToArray(); foreach (var descriptor in descriptorsToRemove) { collection.Remove(descriptor); } collection.AddRouting(); }, order: int.MinValue + 100); } /// <summary> /// Adds host and tenant level antiforgery services. /// </summary> private static void AddAntiForgery(OrchardCoreBuilder builder) { builder.ApplicationServices.AddAntiforgery(); builder.ConfigureServices((services, serviceProvider) => { var settings = serviceProvider.GetRequiredService<ShellSettings>(); var environment = serviceProvider.GetRequiredService<IHostEnvironment>(); var cookieName = "orchantiforgery_" + HttpUtility.UrlEncode(settings.Name + environment.ContentRootPath); // If uninitialized, we use the host services. if (settings.State == TenantState.Uninitialized) { // And delete a cookie that may have been created by another instance. var httpContextAccessor = serviceProvider.GetRequiredService<IHttpContextAccessor>(); // Use case when creating a container without ambient context. if (httpContextAccessor.HttpContext == null) { return; } // Use case when creating a container in a deferred task. if (httpContextAccessor.HttpContext.Response.HasStarted) { return; } httpContextAccessor.HttpContext.Response.Cookies.Delete(cookieName); return; } // Re-register the antiforgery services to be tenant-aware. var collection = new ServiceCollection() .AddAntiforgery(options => { options.Cookie.Name = cookieName; // Don't set the cookie builder 'Path' so that it uses the 'IAuthenticationFeature' value // set by the pipeline and comming from the request 'PathBase' which already ends with the // tenant prefix but may also start by a path related e.g to a virtual folder. }); services.Add(collection); }); } /// <summary> /// Adds backwards compatibility to the handling of SameSite cookies. /// </summary> private static void AddSameSiteCookieBackwardsCompatibility(OrchardCoreBuilder builder) { builder.ConfigureServices(services => { services.Configure<CookiePolicyOptions>(options => { options.MinimumSameSitePolicy = SameSiteMode.Unspecified; options.OnAppendCookie = cookieContext => CheckSameSiteBackwardsCompatiblity(cookieContext.Context, cookieContext.CookieOptions); options.OnDeleteCookie = cookieContext => CheckSameSiteBackwardsCompatiblity(cookieContext.Context, cookieContext.CookieOptions); }); }) .Configure(app => { app.UseCookiePolicy(); }); } private static void CheckSameSiteBackwardsCompatiblity(HttpContext httpContext, CookieOptions options) { var userAgent = httpContext.Request.Headers["User-Agent"].ToString(); if (options.SameSite == SameSiteMode.None) { if (string.IsNullOrEmpty(userAgent)) { return; } // Cover all iOS based browsers here. This includes: // - Safari on iOS 12 for iPhone, iPod Touch, iPad // - WkWebview on iOS 12 for iPhone, iPod Touch, iPad // - Chrome on iOS 12 for iPhone, iPod Touch, iPad // All of which are broken by SameSite=None, because they use the iOS networking stack if (userAgent.Contains("CPU iPhone OS 12") || userAgent.Contains("iPad; CPU OS 12")) { options.SameSite = AspNetCore.Http.SameSiteMode.Unspecified; return; } // Cover Mac OS X based browsers that use the Mac OS networking stack. This includes: // - Safari on Mac OS X. // This does not include: // - Chrome on Mac OS X // Because they do not use the Mac OS networking stack. if (userAgent.Contains("Macintosh; Intel Mac OS X 10_14") && userAgent.Contains("Version/") && userAgent.Contains("Safari")) { options.SameSite = AspNetCore.Http.SameSiteMode.Unspecified; return; } // Cover Chrome 50-69, because some versions are broken by SameSite=None, // and none in this range require it. // Note: this covers some pre-Chromium Edge versions, // but pre-Chromium Edge does not require SameSite=None. if (userAgent.Contains("Chrome/5") || userAgent.Contains("Chrome/6")) { options.SameSite = AspNetCore.Http.SameSiteMode.Unspecified; } } } /// <summary> /// Adds host and tenant level authentication services and configuration. /// </summary> private static void AddAuthentication(OrchardCoreBuilder builder) { builder.ApplicationServices.AddAuthentication(); builder.ConfigureServices(services => { services.AddAuthentication(); // IAuthenticationSchemeProvider is already registered at the host level. // We need to register it again so it is taken into account at the tenant level // because it holds a reference to an underlying dictionary, responsible of storing // the registered schemes which need to be distinct for each tenant. services.AddSingleton<IAuthenticationSchemeProvider, AuthenticationSchemeProvider>(); }) .Configure(app => { app.UseAuthentication(); }); } /// <summary> /// Adds tenant level data protection services. /// </summary> private static void AddDataProtection(OrchardCoreBuilder builder) { builder.ConfigureServices((services, serviceProvider) => { var settings = serviceProvider.GetRequiredService<ShellSettings>(); var options = serviceProvider.GetRequiredService<IOptions<ShellOptions>>(); // The 'FileSystemXmlRepository' will create the directory, but only if it is not overridden. var directory = new DirectoryInfo(Path.Combine( options.Value.ShellsApplicationDataPath, options.Value.ShellsContainerName, settings.Name, "DataProtection-Keys")); // Re-register the data protection services to be tenant-aware so that modules that internally // rely on IDataProtector/IDataProtectionProvider automatically get an isolated instance that // manages its own key ring and doesn't allow decrypting payloads encrypted by another tenant. // By default, the key ring is stored in the tenant directory of the configured App_Data path. var collection = new ServiceCollection() .AddDataProtection() .PersistKeysToFileSystem(directory) .SetApplicationName(settings.Name) .AddKeyManagementOptions(o => o.XmlEncryptor = o.XmlEncryptor ?? new NullXmlEncryptor()) .Services; // Remove any previously registered options setups. services.RemoveAll<IConfigureOptions<KeyManagementOptions>>(); services.RemoveAll<IConfigureOptions<DataProtectionOptions>>(); services.Add(collection); }); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // // // Types for awaiting Task and Task<T>. These types are emitted from Task{<T>}.GetAwaiter // and Task{<T>}.ConfigureAwait. They are meant to be used only by the compiler, e.g. // // await nonGenericTask; // ===================== // var $awaiter = nonGenericTask.GetAwaiter(); // if (!$awaiter.IsCompleted) // { // SPILL: // $builder.AwaitUnsafeOnCompleted(ref $awaiter, ref this); // return; // Label: // UNSPILL; // } // $awaiter.GetResult(); // // result += await genericTask.ConfigureAwait(false); // =================================================================================== // var $awaiter = genericTask.ConfigureAwait(false).GetAwaiter(); // if (!$awaiter.IsCompleted) // { // SPILL; // $builder.AwaitUnsafeOnCompleted(ref $awaiter, ref this); // return; // Label: // UNSPILL; // } // result += $awaiter.GetResult(); // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Diagnostics; using System.Diagnostics.Tracing; using System.Threading; using System.Threading.Tasks; #if !CORECLR using Internal.Threading.Tasks.Tracing; #endif // NOTE: For performance reasons, initialization is not verified. If a developer // incorrectly initializes a task awaiter, which should only be done by the compiler, // NullReferenceExceptions may be generated (the alternative would be for us to detect // this case and then throw a different exception instead). This is the same tradeoff // that's made with other compiler-focused value types like List<T>.Enumerator. namespace System.Runtime.CompilerServices { /// <summary>Provides an awaiter for awaiting a <see cref="System.Threading.Tasks.Task"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct TaskAwaiter : ICriticalNotifyCompletion, ITaskAwaiter { // WARNING: Unsafe.As is used to access the generic TaskAwaiter<> as TaskAwaiter. // Its layout must remain the same. /// <summary>The task being awaited.</summary> internal readonly Task m_task; /// <summary>Initializes the <see cref="TaskAwaiter"/>.</summary> /// <param name="task">The <see cref="System.Threading.Tasks.Task"/> to be awaited.</param> internal TaskAwaiter(Task task) { Debug.Assert(task != null, "Constructing an awaiter requires a task to await."); m_task = task; } /// <summary>Gets whether the task being awaited is completed.</summary> /// <remarks>This property is intended for compiler user rather than use directly in code.</remarks> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> public bool IsCompleted { get { return m_task.IsCompleted; } } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.InvalidOperationException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void OnCompleted(Action continuation) { OnCompletedInternal(m_task, continuation, continueOnCapturedContext: true, flowExecutionContext: true); } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.InvalidOperationException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void UnsafeOnCompleted(Action continuation) { OnCompletedInternal(m_task, continuation, continueOnCapturedContext: true, flowExecutionContext: false); } /// <summary>Ends the await on the completed <see cref="System.Threading.Tasks.Task"/>.</summary> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <exception cref="System.Threading.Tasks.TaskCanceledException">The task was canceled.</exception> /// <exception cref="System.Exception">The task completed in a Faulted state.</exception> [StackTraceHidden] public void GetResult() { ValidateEnd(m_task); } /// <summary> /// Fast checks for the end of an await operation to determine whether more needs to be done /// prior to completing the await. /// </summary> /// <param name="task">The awaited task.</param> [StackTraceHidden] internal static void ValidateEnd(Task task) { // Fast checks that can be inlined. if (task.IsWaitNotificationEnabledOrNotRanToCompletion) { // If either the end await bit is set or we're not completed successfully, // fall back to the slower path. HandleNonSuccessAndDebuggerNotification(task); } } /// <summary> /// Ensures the task is completed, triggers any necessary debugger breakpoints for completing /// the await on the task, and throws an exception if the task did not complete successfully. /// </summary> /// <param name="task">The awaited task.</param> [StackTraceHidden] private static void HandleNonSuccessAndDebuggerNotification(Task task) { // NOTE: The JIT refuses to inline ValidateEnd when it contains the contents // of HandleNonSuccessAndDebuggerNotification, hence the separation. // Synchronously wait for the task to complete. When used by the compiler, // the task will already be complete. This code exists only for direct GetResult use, // for cases where the same exception propagation semantics used by "await" are desired, // but where for one reason or another synchronous rather than asynchronous waiting is needed. if (!task.IsCompleted) { bool taskCompleted = task.InternalWait(Timeout.Infinite, default); Debug.Assert(taskCompleted, "With an infinite timeout, the task should have always completed."); } // Now that we're done, alert the debugger if so requested task.NotifyDebuggerOfWaitCompletionIfNecessary(); // And throw an exception if the task is faulted or canceled. if (!task.IsCompletedSuccessfully) ThrowForNonSuccess(task); } /// <summary>Throws an exception to handle a task that completed in a state other than RanToCompletion.</summary> [StackTraceHidden] private static void ThrowForNonSuccess(Task task) { Debug.Assert(task.IsCompleted, "Task must have been completed by now."); Debug.Assert(task.Status != TaskStatus.RanToCompletion, "Task should not be completed successfully."); // Handle whether the task has been canceled or faulted switch (task.Status) { // If the task completed in a canceled state, throw an OperationCanceledException. // This will either be the OCE that actually caused the task to cancel, or it will be a new // TaskCanceledException. TCE derives from OCE, and by throwing it we automatically pick up the // completed task's CancellationToken if it has one, including that CT in the OCE. case TaskStatus.Canceled: var oceEdi = task.GetCancellationExceptionDispatchInfo(); if (oceEdi != null) { oceEdi.Throw(); Debug.Fail("Throw() should have thrown"); } throw new TaskCanceledException(task); // If the task faulted, throw its first exception, // even if it contained more than one. case TaskStatus.Faulted: var edis = task.GetExceptionDispatchInfos(); if (edis.Count > 0) { edis[0].Throw(); Debug.Fail("Throw() should have thrown"); break; // Necessary to compile: non-reachable, but compiler can't determine that } else { Debug.Fail("There should be exceptions if we're Faulted."); throw task.Exception; } } } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="task">The task being awaited.</param> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <param name="continueOnCapturedContext">Whether to capture and marshal back to the current context.</param> /// <param name="flowExecutionContext">Whether to flow ExecutionContext across the await.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> internal static void OnCompletedInternal(Task task, Action continuation, bool continueOnCapturedContext, bool flowExecutionContext) { if (continuation == null) throw new ArgumentNullException(nameof(continuation)); // If TaskWait* ETW events are enabled, trace a beginning event for this await // and set up an ending event to be traced when the asynchronous await completes. if ( #if CORECLR TplEtwProvider.Log.IsEnabled() || Task.s_asyncDebuggingEnabled #else TaskTrace.Enabled #endif ) { continuation = OutputWaitEtwEvents(task, continuation); } // Set the continuation onto the awaited task. task.SetContinuationForAwait(continuation, continueOnCapturedContext, flowExecutionContext); } #if CORECLR /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="task">The task being awaited.</param> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <param name="continueOnCapturedContext">Whether to capture and marshal back to the current context.</param> /// <param name="flowExecutionContext">Whether to flow ExecutionContext across the await.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> internal static void UnsafeOnCompletedInternal(Task task, IAsyncStateMachineBox stateMachineBox, bool continueOnCapturedContext) { Debug.Assert(stateMachineBox != null); // If TaskWait* ETW events are enabled, trace a beginning event for this await // and set up an ending event to be traced when the asynchronous await completes. if (TplEtwProvider.Log.IsEnabled() || Task.s_asyncDebuggingEnabled) { task.SetContinuationForAwait(OutputWaitEtwEvents(task, stateMachineBox.MoveNextAction), continueOnCapturedContext, flowExecutionContext: false); } else { task.UnsafeSetContinuationForAwait(stateMachineBox, continueOnCapturedContext); } } #endif /// <summary> /// Outputs a WaitBegin ETW event, and augments the continuation action to output a WaitEnd ETW event. /// </summary> /// <param name="task">The task being awaited.</param> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <returns>The action to use as the actual continuation.</returns> private static Action OutputWaitEtwEvents(Task task, Action continuation) { Debug.Assert(task != null, "Need a task to wait on"); Debug.Assert(continuation != null, "Need a continuation to invoke when the wait completes"); #if CORECLR if (Task.s_asyncDebuggingEnabled) { Task.AddToActiveTasks(task); } var etwLog = TplEtwProvider.Log; if (etwLog.IsEnabled()) { // ETW event for Task Wait Begin var currentTaskAtBegin = Task.InternalCurrent; // If this task's continuation is another task, get it. var continuationTask = AsyncMethodBuilderCore.TryGetContinuationTask(continuation); etwLog.TaskWaitBegin( (currentTaskAtBegin != null ? currentTaskAtBegin.m_taskScheduler.Id : TaskScheduler.Default.Id), (currentTaskAtBegin != null ? currentTaskAtBegin.Id : 0), task.Id, TplEtwProvider.TaskWaitBehavior.Asynchronous, (continuationTask != null ? continuationTask.Id : 0)); } #else Debug.Assert(TaskTrace.Enabled, "Should only be used when ETW tracing is enabled"); // ETW event for Task Wait Begin var currentTaskAtBegin = Task.InternalCurrent; TaskTrace.TaskWaitBegin_Asynchronous( (currentTaskAtBegin != null ? currentTaskAtBegin.m_taskScheduler.Id : TaskScheduler.Default.Id), (currentTaskAtBegin != null ? currentTaskAtBegin.Id : 0), task.Id); #endif // Create a continuation action that outputs the end event and then invokes the user // provided delegate. This incurs the allocations for the closure/delegate, but only if the event // is enabled, and in doing so it allows us to pass the awaited task's information into the end event // in a purely pay-for-play manner (the alternatively would be to increase the size of TaskAwaiter // just for this ETW purpose, not pay-for-play, since GetResult would need to know whether a real yield occurred). #if CORECLR return AsyncMethodBuilderCore.CreateContinuationWrapper(continuation, (innerContinuation,innerTask) => { if (Task.s_asyncDebuggingEnabled) { Task.RemoveFromActiveTasks(innerTask.Id); } TplEtwProvider innerEtwLog = TplEtwProvider.Log; // ETW event for Task Wait End. Guid prevActivityId = new Guid(); bool bEtwLogEnabled = innerEtwLog.IsEnabled(); if (bEtwLogEnabled) { var currentTaskAtEnd = Task.InternalCurrent; innerEtwLog.TaskWaitEnd( (currentTaskAtEnd != null ? currentTaskAtEnd.m_taskScheduler.Id : TaskScheduler.Default.Id), (currentTaskAtEnd != null ? currentTaskAtEnd.Id : 0), innerTask.Id); // Ensure the continuation runs under the activity ID of the task that completed for the // case the antecedent is a promise (in the other cases this is already the case). if (innerEtwLog.TasksSetActivityIds && (innerTask.Options & (TaskCreationOptions)InternalTaskOptions.PromiseTask) != 0) EventSource.SetCurrentThreadActivityId(TplEtwProvider.CreateGuidForTaskID(innerTask.Id), out prevActivityId); } // Invoke the original continuation provided to OnCompleted. innerContinuation(); if (bEtwLogEnabled) { innerEtwLog.TaskWaitContinuationComplete(innerTask.Id); if (innerEtwLog.TasksSetActivityIds && (innerTask.Options & (TaskCreationOptions)InternalTaskOptions.PromiseTask) != 0) EventSource.SetCurrentThreadActivityId(prevActivityId); } }, task); #else return () => { // ETW event for Task Wait End. if (TaskTrace.Enabled) { var currentTaskAtEnd = Task.InternalCurrent; TaskTrace.TaskWaitEnd( (currentTaskAtEnd != null ? currentTaskAtEnd.m_taskScheduler.Id : TaskScheduler.Default.Id), (currentTaskAtEnd != null ? currentTaskAtEnd.Id : 0), task.Id); } // Invoke the original continuation provided to OnCompleted. continuation(); }; #endif } } /// <summary>Provides an awaiter for awaiting a <see cref="System.Threading.Tasks.Task{TResult}"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct TaskAwaiter<TResult> : ICriticalNotifyCompletion, ITaskAwaiter { // WARNING: Unsafe.As is used to access TaskAwaiter<> as the non-generic TaskAwaiter. // Its layout must remain the same. /// <summary>The task being awaited.</summary> private readonly Task<TResult> m_task; /// <summary>Initializes the <see cref="TaskAwaiter{TResult}"/>.</summary> /// <param name="task">The <see cref="System.Threading.Tasks.Task{TResult}"/> to be awaited.</param> internal TaskAwaiter(Task<TResult> task) { Debug.Assert(task != null, "Constructing an awaiter requires a task to await."); m_task = task; } /// <summary>Gets whether the task being awaited is completed.</summary> /// <remarks>This property is intended for compiler user rather than use directly in code.</remarks> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> public bool IsCompleted { get { return m_task.IsCompleted; } } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void OnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, continueOnCapturedContext: true, flowExecutionContext: true); } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void UnsafeOnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, continueOnCapturedContext: true, flowExecutionContext: false); } /// <summary>Ends the await on the completed <see cref="System.Threading.Tasks.Task{TResult}"/>.</summary> /// <returns>The result of the completed <see cref="System.Threading.Tasks.Task{TResult}"/>.</returns> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <exception cref="System.Threading.Tasks.TaskCanceledException">The task was canceled.</exception> /// <exception cref="System.Exception">The task completed in a Faulted state.</exception> [StackTraceHidden] public TResult GetResult() { TaskAwaiter.ValidateEnd(m_task); return m_task.ResultOnSuccess; } } /// <summary> /// Marker interface used to know whether a particular awaiter is either a /// TaskAwaiter or a TaskAwaiter`1. It must not be implemented by any other /// awaiters. /// </summary> internal interface ITaskAwaiter { } /// <summary> /// Marker interface used to know whether a particular awaiter is either a /// CTA.ConfiguredTaskAwaiter or a CTA`1.ConfiguredTaskAwaiter. It must not /// be implemented by any other awaiters. /// </summary> internal interface IConfiguredTaskAwaiter { } /// <summary>Provides an awaitable object that allows for configured awaits on <see cref="System.Threading.Tasks.Task"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct ConfiguredTaskAwaitable { /// <summary>The task being awaited.</summary> private readonly ConfiguredTaskAwaitable.ConfiguredTaskAwaiter m_configuredTaskAwaiter; /// <summary>Initializes the <see cref="ConfiguredTaskAwaitable"/>.</summary> /// <param name="task">The awaitable <see cref="System.Threading.Tasks.Task"/>.</param> /// <param name="continueOnCapturedContext"> /// true to attempt to marshal the continuation back to the original context captured; otherwise, false. /// </param> internal ConfiguredTaskAwaitable(Task task, bool continueOnCapturedContext) { Debug.Assert(task != null, "Constructing an awaitable requires a task to await."); m_configuredTaskAwaiter = new ConfiguredTaskAwaitable.ConfiguredTaskAwaiter(task, continueOnCapturedContext); } /// <summary>Gets an awaiter for this awaitable.</summary> /// <returns>The awaiter.</returns> public ConfiguredTaskAwaitable.ConfiguredTaskAwaiter GetAwaiter() { return m_configuredTaskAwaiter; } /// <summary>Provides an awaiter for a <see cref="ConfiguredTaskAwaitable"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct ConfiguredTaskAwaiter : ICriticalNotifyCompletion, IConfiguredTaskAwaiter { // WARNING: Unsafe.As is used to access the generic ConfiguredTaskAwaiter as this. // Its layout must remain the same. /// <summary>The task being awaited.</summary> internal readonly Task m_task; /// <summary>Whether to attempt marshaling back to the original context.</summary> internal readonly bool m_continueOnCapturedContext; /// <summary>Initializes the <see cref="ConfiguredTaskAwaiter"/>.</summary> /// <param name="task">The <see cref="System.Threading.Tasks.Task"/> to await.</param> /// <param name="continueOnCapturedContext"> /// true to attempt to marshal the continuation back to the original context captured /// when BeginAwait is called; otherwise, false. /// </param> internal ConfiguredTaskAwaiter(Task task, bool continueOnCapturedContext) { Debug.Assert(task != null, "Constructing an awaiter requires a task to await."); m_task = task; m_continueOnCapturedContext = continueOnCapturedContext; } /// <summary>Gets whether the task being awaited is completed.</summary> /// <remarks>This property is intended for compiler user rather than use directly in code.</remarks> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> public bool IsCompleted { get { return m_task.IsCompleted; } } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void OnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, m_continueOnCapturedContext, flowExecutionContext: true); } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void UnsafeOnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, m_continueOnCapturedContext, flowExecutionContext: false); } /// <summary>Ends the await on the completed <see cref="System.Threading.Tasks.Task"/>.</summary> /// <returns>The result of the completed <see cref="System.Threading.Tasks.Task{TResult}"/>.</returns> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <exception cref="System.Threading.Tasks.TaskCanceledException">The task was canceled.</exception> /// <exception cref="System.Exception">The task completed in a Faulted state.</exception> [StackTraceHidden] public void GetResult() { TaskAwaiter.ValidateEnd(m_task); } } } /// <summary>Provides an awaitable object that allows for configured awaits on <see cref="System.Threading.Tasks.Task{TResult}"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct ConfiguredTaskAwaitable<TResult> { /// <summary>The underlying awaitable on whose logic this awaitable relies.</summary> private readonly ConfiguredTaskAwaitable<TResult>.ConfiguredTaskAwaiter m_configuredTaskAwaiter; /// <summary>Initializes the <see cref="ConfiguredTaskAwaitable{TResult}"/>.</summary> /// <param name="task">The awaitable <see cref="System.Threading.Tasks.Task{TResult}"/>.</param> /// <param name="continueOnCapturedContext"> /// true to attempt to marshal the continuation back to the original context captured; otherwise, false. /// </param> internal ConfiguredTaskAwaitable(Task<TResult> task, bool continueOnCapturedContext) { m_configuredTaskAwaiter = new ConfiguredTaskAwaitable<TResult>.ConfiguredTaskAwaiter(task, continueOnCapturedContext); } /// <summary>Gets an awaiter for this awaitable.</summary> /// <returns>The awaiter.</returns> public ConfiguredTaskAwaitable<TResult>.ConfiguredTaskAwaiter GetAwaiter() { return m_configuredTaskAwaiter; } /// <summary>Provides an awaiter for a <see cref="ConfiguredTaskAwaitable{TResult}"/>.</summary> /// <remarks>This type is intended for compiler use only.</remarks> public readonly struct ConfiguredTaskAwaiter : ICriticalNotifyCompletion, IConfiguredTaskAwaiter { // WARNING: Unsafe.As is used to access this as the non-generic ConfiguredTaskAwaiter. // Its layout must remain the same. /// <summary>The task being awaited.</summary> private readonly Task<TResult> m_task; /// <summary>Whether to attempt marshaling back to the original context.</summary> private readonly bool m_continueOnCapturedContext; /// <summary>Initializes the <see cref="ConfiguredTaskAwaiter"/>.</summary> /// <param name="task">The awaitable <see cref="System.Threading.Tasks.Task{TResult}"/>.</param> /// <param name="continueOnCapturedContext"> /// true to attempt to marshal the continuation back to the original context captured; otherwise, false. /// </param> internal ConfiguredTaskAwaiter(Task<TResult> task, bool continueOnCapturedContext) { Debug.Assert(task != null, "Constructing an awaiter requires a task to await."); m_task = task; m_continueOnCapturedContext = continueOnCapturedContext; } /// <summary>Gets whether the task being awaited is completed.</summary> /// <remarks>This property is intended for compiler user rather than use directly in code.</remarks> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> public bool IsCompleted { get { return m_task.IsCompleted; } } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void OnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, m_continueOnCapturedContext, flowExecutionContext: true); } /// <summary>Schedules the continuation onto the <see cref="System.Threading.Tasks.Task"/> associated with this <see cref="TaskAwaiter"/>.</summary> /// <param name="continuation">The action to invoke when the await operation completes.</param> /// <exception cref="System.ArgumentNullException">The <paramref name="continuation"/> argument is null (Nothing in Visual Basic).</exception> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <remarks>This method is intended for compiler user rather than use directly in code.</remarks> public void UnsafeOnCompleted(Action continuation) { TaskAwaiter.OnCompletedInternal(m_task, continuation, m_continueOnCapturedContext, flowExecutionContext: false); } /// <summary>Ends the await on the completed <see cref="System.Threading.Tasks.Task{TResult}"/>.</summary> /// <returns>The result of the completed <see cref="System.Threading.Tasks.Task{TResult}"/>.</returns> /// <exception cref="System.NullReferenceException">The awaiter was not properly initialized.</exception> /// <exception cref="System.Threading.Tasks.TaskCanceledException">The task was canceled.</exception> /// <exception cref="System.Exception">The task completed in a Faulted state.</exception> [StackTraceHidden] public TResult GetResult() { TaskAwaiter.ValidateEnd(m_task); return m_task.ResultOnSuccess; } } } }
// Copyright 2011 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using NodaTime.Properties; using NodaTime.Text; using NUnit.Framework; namespace NodaTime.Test.Text { [TestFixture] public class LocalDatePatternTest : PatternTestBase<LocalDate> { private static readonly LocalDate SampleLocalDate = new LocalDate(1976, 6, 19); internal static readonly Data[] InvalidPatternData = { new Data { Pattern = "!", Message = Messages.Parse_UnknownStandardFormat, Parameters = {'!', typeof(LocalDate).FullName }}, new Data { Pattern = "%", Message = Messages.Parse_UnknownStandardFormat, Parameters = { '%', typeof(LocalDate).FullName } }, new Data { Pattern = "\\", Message = Messages.Parse_UnknownStandardFormat, Parameters = { '\\', typeof(LocalDate).FullName } }, new Data { Pattern = "%%", Message = Messages.Parse_PercentDoubled }, new Data { Pattern = "%\\", Message = Messages.Parse_EscapeAtEndOfString }, new Data { Pattern = "MMMMM", Message = Messages.Parse_RepeatCountExceeded, Parameters = { 'M', 4 } }, new Data { Pattern = "ddddd", Message = Messages.Parse_RepeatCountExceeded, Parameters = { 'd', 4 } }, new Data { Pattern = "M%", Message = Messages.Parse_PercentAtEndOfString }, new Data { Pattern = "yyyyy", Message = Messages.Parse_RepeatCountExceeded, Parameters = { 'y', 4 } }, new Data { Pattern = "uuuuu", Message = Messages.Parse_RepeatCountExceeded, Parameters = { 'u', 4 } }, new Data { Pattern = "ggg", Message = Messages.Parse_RepeatCountExceeded, Parameters = { 'g', 2 } }, new Data { Pattern = "'qwe", Message = Messages.Parse_MissingEndQuote, Parameters = { '\'' } }, new Data { Pattern = "'qwe\\", Message = Messages.Parse_EscapeAtEndOfString }, new Data { Pattern = "'qwe\\'", Message = Messages.Parse_MissingEndQuote, Parameters = { '\'' } }, // Note incorrect use of "u" (year) instead of "y" (year of era) new Data { Pattern = "dd MM uuuu gg", Message = Messages.Parse_EraWithoutYearOfEra }, // Era specifier and calendar specifier in the same pattern. new Data { Pattern = "dd MM yyyy gg c", Message = Messages.Parse_CalendarAndEra }, // Invalid patterns directly after the yyyy specifier. This will detect the issue early, but then // continue and reject it in the normal path. new Data { Pattern = "yyyy'", Message = Messages.Parse_MissingEndQuote, Parameters = { '\'' } }, new Data { Pattern = "yyyy\\", Message = Messages.Parse_EscapeAtEndOfString }, // Common typo, which is caught in 2.0... new Data { Pattern = "yyyy-mm-dd", Message = Messages.Parse_UnquotedLiteral, Parameters = { 'm' } }, // T isn't valid in a date pattern new Data { Pattern = "yyyy-MM-ddT00:00:00", Message = Messages.Parse_UnquotedLiteral, Parameters = { 'T' } }, // These became invalid in v2.0, when we decided that y and yyy weren't sensible. new Data { Pattern = "y M d", Message = Messages.Parse_InvalidRepeatCount, Parameters = { 'y', 1 } }, new Data { Pattern = "yyy M d", Message = Messages.Parse_InvalidRepeatCount, Parameters = { 'y', 3 } }, }; internal static Data[] ParseFailureData = { new Data { Pattern = "yyyy gg", Text = "2011 NodaEra", Message = Messages.Parse_MismatchedText, Parameters = {'g'} }, new Data { Pattern = "yyyy uuuu gg", Text = "0010 0009 B.C.", Message = Messages.Parse_InconsistentValues2, Parameters = {'g', 'u', typeof(LocalDate)} }, new Data { Pattern = "yyyy MM dd dddd", Text = "2011 10 09 Saturday", Message = Messages.Parse_InconsistentDayOfWeekTextValue }, new Data { Pattern = "yyyy MM dd ddd", Text = "2011 10 09 Sat", Message = Messages.Parse_InconsistentDayOfWeekTextValue }, new Data { Pattern = "yyyy MM dd ddd", Text = "2011 10 09 FooBar", Message = Messages.Parse_MismatchedText, Parameters = {'d'} }, new Data { Pattern = "yyyy MM dd dddd", Text = "2011 10 09 FooBar", Message = Messages.Parse_MismatchedText, Parameters = {'d'} }, new Data { Pattern = "yyyy/MM/dd", Text = "2011/02-29", Message = Messages.Parse_DateSeparatorMismatch }, // Don't match a short name against a long pattern new Data { Pattern = "yyyy MMMM dd", Text = "2011 Oct 09", Message = Messages.Parse_MismatchedText, Parameters = {'M'} }, // Or vice versa... although this time we match the "Oct" and then fail as we're expecting a space new Data { Pattern = "yyyy MMM dd", Text = "2011 October 09", Message = Messages.Parse_MismatchedCharacter, Parameters = {' '}}, // Invalid year, year-of-era, month, day new Data { Pattern = "yyyy MM dd", Text = "0000 01 01", Message = Messages.Parse_FieldValueOutOfRange, Parameters = { 0, 'y', typeof(LocalDate) } }, new Data { Pattern = "yyyy MM dd", Text = "2011 15 29", Message = Messages.Parse_MonthOutOfRange, Parameters = { 15, 2011 } }, new Data { Pattern = "yyyy MM dd", Text = "2011 02 35", Message = Messages.Parse_DayOfMonthOutOfRange, Parameters = { 35, 2, 2011 } }, // Year of era can't be negative... new Data { Pattern = "yyyy MM dd", Text = "-15 01 01", Message = Messages.Parse_UnexpectedNegative }, // Invalid leap years new Data { Pattern = "yyyy MM dd", Text = "2011 02 29", Message = Messages.Parse_DayOfMonthOutOfRange, Parameters = { 29, 2, 2011 } }, new Data { Pattern = "yyyy MM dd", Text = "1900 02 29", Message = Messages.Parse_DayOfMonthOutOfRange, Parameters = { 29, 2, 1900 } }, // Year of era and two-digit year, but they don't match new Data { Pattern = "uuuu yy", Text = "2011 10", Message = Messages.Parse_InconsistentValues2, Parameters = { 'y', 'u', typeof(LocalDate) } }, // Invalid calendar name new Data { Pattern = "c yyyy MM dd", Text = "2015 01 01", Message = Messages.Parse_NoMatchingCalendarSystem }, // Invalid year new Data { Template = new LocalDate(1, 1, 1, CalendarSystem.IslamicBcl), Pattern = "uuuu", Text = "9999", Message = Messages.Parse_FieldValueOutOfRange, Parameters = { 9999, 'u', typeof(LocalDate) } }, new Data { Template = new LocalDate(1, 1, 1, CalendarSystem.IslamicBcl), Pattern = "yyyy", Text = "9999", Message = Messages.Parse_YearOfEraOutOfRange, Parameters = { 9999, "EH", "Hijri" } }, // https://github.com/nodatime/nodatime/issues/414 new Data { Pattern = "yyyy-MM-dd", Text = "1984-00-15", Message = Messages.Parse_FieldValueOutOfRange, Parameters = { 0, 'M', typeof(LocalDate) } }, new Data { Pattern = "M/d/yyyy", Text = "00/15/1984", Message = Messages.Parse_FieldValueOutOfRange, Parameters = { 0, 'M', typeof(LocalDate) } } }; internal static Data[] ParseOnlyData = { // Alternative era names new Data(0, 10, 3) { Pattern = "yyyy MM dd gg", Text = "0001 10 03 BCE" }, // Valid leap years new Data(2000, 2, 29) { Pattern = "yyyy MM dd", Text = "2000 02 29" }, new Data(2004, 2, 29) { Pattern = "yyyy MM dd", Text = "2004 02 29" }, // Month parsing should be case-insensitive new Data(2011, 10, 3) { Pattern = "yyyy MMM dd", Text = "2011 OcT 03" }, new Data(2011, 10, 3) { Pattern = "yyyy MMMM dd", Text = "2011 OcToBeR 03" }, // Day-of-week parsing should be case-insensitive new Data(2011, 10, 9) { Pattern = "yyyy MM dd ddd", Text = "2011 10 09 sUN" }, new Data(2011, 10, 9) { Pattern = "yyyy MM dd dddd", Text = "2011 10 09 SuNDaY" }, // Genitive name is an extension of the non-genitive name; parse longer first. new Data(2011, 1, 10) { Pattern = "yyyy MMMM dd", Text = "2011 MonthName-Genitive 10", Culture = Cultures.GenitiveNameTestCultureWithLeadingNames }, new Data(2011, 1, 10) { Pattern = "yyyy MMMM dd", Text = "2011 MonthName 10", Culture = Cultures.GenitiveNameTestCultureWithLeadingNames }, new Data(2011, 1, 10) { Pattern = "yyyy MMM dd", Text = "2011 MN-Gen 10", Culture = Cultures.GenitiveNameTestCultureWithLeadingNames }, new Data(2011, 1, 10) { Pattern = "yyyy MMM dd", Text = "2011 MN 10", Culture = Cultures.GenitiveNameTestCultureWithLeadingNames }, }; internal static Data[] FormatOnlyData = { // Would parse back to 2011 new Data(1811, 7, 3) { Pattern = "yy M d", Text = "11 7 3" }, // Tests for the documented 2-digit formatting of BC years // (Less of an issue since yy became "year of era") new Data(-94, 7, 3) { Pattern = "yy M d", Text = "95 7 3" }, new Data(-93, 7, 3) { Pattern = "yy M d", Text = "94 7 3" }, }; internal static Data[] FormatAndParseData = { // Standard patterns // Invariant culture uses the crazy MM/dd/yyyy format. Blech. new Data(2011, 10, 20) { Pattern = "d", Text = "10/20/2011" }, new Data(2011, 10, 20) { Pattern = "D", Text = "Thursday, 20 October 2011" }, // Custom patterns new Data(2011, 10, 3) { Pattern = "yyyy/MM/dd", Text = "2011/10/03" }, new Data(2011, 10, 3) { Pattern = "yyyy/MM/dd", Text = "2011-10-03", Culture = Cultures.FrCa }, new Data(2011, 10, 3) { Pattern = "yyyyMMdd", Text = "20111003" }, new Data(2001, 7, 3) { Pattern = "yy M d", Text = "01 7 3" }, new Data(2011, 7, 3) { Pattern = "yy M d", Text = "11 7 3" }, new Data(2030, 7, 3) { Pattern = "yy M d", Text = "30 7 3" }, // Cutoff defaults to 30 (at the moment...) new Data(1931, 7, 3) { Pattern = "yy M d", Text = "31 7 3" }, new Data(1976, 7, 3) { Pattern = "yy M d", Text = "76 7 3" }, // In the first century, we don't skip back a century for "high" two-digit year numbers. new Data(25, 7, 3) { Pattern = "yy M d", Text = "25 7 3", Template = new LocalDate(50, 1, 1) }, new Data(35, 7, 3) { Pattern = "yy M d", Text = "35 7 3", Template = new LocalDate(50, 1, 1) }, new Data(2000, 10, 3) { Pattern = "MM/dd", Text = "10/03"}, new Data(1885, 10, 3) { Pattern = "MM/dd", Text = "10/03", Template = new LocalDate(1885, 10, 3) }, // When we parse in all of the below tests, we'll use the month and day-of-month if it's provided; // the template value is specified to allow simple roundtripping. (Day of week doesn't affect what value is parsed; it just validates.) // Non-genitive month name when there's no "day of month", even if there's a "day of week" new Data(2011, 1, 3) { Pattern = "MMMM", Text = "FullNonGenName", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3)}, new Data(2011, 1, 3) { Pattern = "MMMM dddd", Text = "FullNonGenName Monday", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3) }, new Data(2011, 1, 3) { Pattern = "MMM", Text = "AbbrNonGenName", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3) }, new Data(2011, 1, 3) { Pattern = "MMM ddd", Text = "AbbrNonGenName Mon", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3) }, // Genitive month name when the pattern includes "day of month" new Data(2011, 1, 3) { Pattern = "MMMM dd", Text = "FullGenName 03", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3) }, // TODO: Check whether or not this is actually appropriate new Data(2011, 1, 3) { Pattern = "MMM dd", Text = "AbbrGenName 03", Culture = Cultures.GenitiveNameTestCulture, Template = new LocalDate(2011, 5, 3) }, // Era handling new Data(2011, 1, 3) { Pattern = "yyyy MM dd gg", Text = "2011 01 03 A.D." }, new Data(2011, 1, 3) { Pattern = "uuuu yyyy MM dd gg", Text = "2011 2011 01 03 A.D." }, new Data(-1, 1, 3) { Pattern = "yyyy MM dd gg", Text = "0002 01 03 B.C." }, // Day of week handling new Data(2011, 10, 9) { Pattern = "yyyy MM dd dddd", Text = "2011 10 09 Sunday" }, new Data(2011, 10, 9) { Pattern = "yyyy MM dd ddd", Text = "2011 10 09 Sun" }, // Month handling new Data(2011, 10, 9) { Pattern = "yyyy MMMM dd", Text = "2011 October 09" }, new Data(2011, 10, 9) { Pattern = "yyyy MMM dd", Text = "2011 Oct 09" }, // Year and two-digit year-of-era in the same format. Note that the year // gives the full year information, so we're not stuck in the 20th/21st century new Data(1825, 10, 9) { Pattern = "uuuu yy MM/dd", Text = "1825 25 10/09" }, // Negative years new Data(-43, 3, 15) { Pattern = "uuuu MM dd", Text = "-0043 03 15"}, // Calendar handling new Data(2011, 10, 9) { Pattern = "c yyyy MM dd", Text = "ISO 2011 10 09" }, new Data(2011, 10, 9) { Pattern = "yyyy MM dd c", Text = "2011 10 09 ISO" }, new Data(2011, 10, 9, CalendarSystem.Coptic) { Pattern = "c uuuu MM dd", Text = "Coptic 2011 10 09" }, new Data(2011, 10, 9, CalendarSystem.Coptic) { Pattern = "uuuu MM dd c", Text = "2011 10 09 Coptic" }, // Awkward day-of-week handling // December 14th 2012 was a Friday. Friday is "Foo" or "FooBar" in AwkwardDayOfWeekCulture. new Data(2012, 12, 14) { Pattern = "ddd yyyy MM dd", Text = "Foo 2012 12 14", Culture = Cultures.AwkwardDayOfWeekCulture }, new Data(2012, 12, 14) { Pattern = "dddd yyyy MM dd", Text = "FooBar 2012 12 14", Culture = Cultures.AwkwardDayOfWeekCulture }, // December 13th 2012 was a Thursday. Friday is "FooBaz" or "FooBa" in AwkwardDayOfWeekCulture. new Data(2012, 12, 13) { Pattern = "ddd yyyy MM dd", Text = "FooBaz 2012 12 13", Culture = Cultures.AwkwardDayOfWeekCulture }, new Data(2012, 12, 13) { Pattern = "dddd yyyy MM dd", Text = "FooBa 2012 12 13", Culture = Cultures.AwkwardDayOfWeekCulture }, }; internal static IEnumerable<Data> ParseData = ParseOnlyData.Concat(FormatAndParseData); internal static IEnumerable<Data> FormatData = FormatOnlyData.Concat(FormatAndParseData); [Test] [TestCaseSource(typeof(Cultures), nameof(Cultures.AllCultures))] public void BclLongDatePatternGivesSameResultsInNoda(CultureInfo culture) { // See https://bugzilla.xamarin.com/show_bug.cgi?id=11363 if (TestHelper.IsRunningOnMono && culture.IetfLanguageTag == "mt-MT") { return; } AssertBclNodaEquality(culture, culture.DateTimeFormat.LongDatePattern); } [Test] [TestCaseSource(typeof(Cultures), nameof(Cultures.AllCultures))] public void BclShortDatePatternGivesSameResultsInNoda(CultureInfo culture) { AssertBclNodaEquality(culture, culture.DateTimeFormat.ShortDatePattern); } [Test] public void WithCalendar() { var pattern = LocalDatePattern.IsoPattern.WithCalendar(CalendarSystem.Coptic); var value = pattern.Parse("0284-08-29").Value; Assert.AreEqual(new LocalDate(284, 8, 29, CalendarSystem.Coptic), value); } private void AssertBclNodaEquality(CultureInfo culture, string patternText) { // The BCL never seems to use abbreviated month genitive names. // I think it's reasonable that we do. Hmm. // See https://github.com/nodatime/nodatime/issues/377 if (patternText.Contains("MMM") && !patternText.Contains("MMMM") && culture.DateTimeFormat.AbbreviatedMonthGenitiveNames[SampleLocalDate.Month - 1] != culture.DateTimeFormat.AbbreviatedMonthNames[SampleLocalDate.Month - 1]) { return; } var pattern = LocalDatePattern.Create(patternText, culture); var calendarSystem = CalendarSystemForCalendar(culture.Calendar); if (calendarSystem == null) { // We can't map this calendar system correctly yet; the test would be invalid. return; } var sampleDateInCalendar = SampleLocalDate.WithCalendar(calendarSystem); // To construct a DateTime, we need a time... let's give a non-midnight one to catch // any unexpected uses of time within the date patterns. DateTime sampleDateTime = (SampleLocalDate + new LocalTime(2, 3, 5)).ToDateTimeUnspecified(); Assert.AreEqual(sampleDateTime.ToString(patternText, culture), pattern.Format(sampleDateInCalendar)); } public sealed class Data : PatternTestData<LocalDate> { // Default to the start of the year 2000. protected override LocalDate DefaultTemplate => LocalDatePattern.DefaultTemplateValue; /// <summary> /// Initializes a new instance of the <see cref="Data" /> class. /// </summary> /// <param name="value">The value.</param> public Data(LocalDate value) : base(value) { } public Data(int year, int month, int day) : this(new LocalDate(year, month, day)) { } public Data(int year, int month, int day, CalendarSystem calendar) : this(new LocalDate(year, month, day, calendar)) { } public Data() : this(LocalDatePattern.DefaultTemplateValue) { } internal override IPattern<LocalDate> CreatePattern() => LocalDatePattern.CreateWithInvariantCulture(Pattern) .WithTemplateValue(Template) .WithCulture(Culture); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading; using System.Threading.Tasks; using Xunit; using Xunit.Abstractions; namespace System.Net.WebSockets.Client.Tests { /// <summary> /// ClientWebSocket unit tests that do not require a remote server. /// </summary> public class ClientWebSocketUnitTest { private readonly ITestOutputHelper _output; public ClientWebSocketUnitTest(ITestOutputHelper output) { _output = output; } private static bool WebSocketsSupported { get { return WebSocketHelper.WebSocketsSupported; } } [ConditionalFact("WebSocketsSupported")] public void Ctor_Success() { var cws = new ClientWebSocket(); cws.Dispose(); } [ConditionalFact("WebSocketsSupported")] public void Abort_CreateAndAbort_StateIsClosed() { using (var cws = new ClientWebSocket()) { cws.Abort(); Assert.Equal(WebSocketState.Closed, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndClose_ThrowsInvalidOperationException() { using (var cws = new ClientWebSocket()) { Assert.Throws<InvalidOperationException>(() => { Task t = cws.CloseAsync(WebSocketCloseStatus.Empty, "", new CancellationToken()); }); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndCloseOutput_ThrowsInvalidOperationExceptionWithMessage() { using (var cws = new ClientWebSocket()) { AssertExtensions.Throws<InvalidOperationException>( () => cws.CloseOutputAsync(WebSocketCloseStatus.Empty, "", new CancellationToken()).GetAwaiter().GetResult(), ResourceHelper.GetExceptionMessage("net_WebSockets_NotConnected")); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndReceive_ThrowsInvalidOperationException() { using (var cws = new ClientWebSocket()) { var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); Assert.Throws<InvalidOperationException>(() => { Task t = cws.ReceiveAsync(segment, ct); }); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndReceive_ThrowsInvalidOperationExceptionWithMessage() { using (var cws = new ClientWebSocket()) { var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); AssertExtensions.Throws<InvalidOperationException>( () => cws.ReceiveAsync(segment, ct).GetAwaiter().GetResult(), ResourceHelper.GetExceptionMessage("net_WebSockets_NotConnected")); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndSend_ThrowsInvalidOperationException() { using (var cws = new ClientWebSocket()) { var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); Assert.Throws<InvalidOperationException>(() => { Task t = cws.SendAsync(segment, WebSocketMessageType.Text, false, ct); }); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_CreateAndSend_ThrowsInvalidOperationExceptionWithMessage() { using (var cws = new ClientWebSocket()) { var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); AssertExtensions.Throws<InvalidOperationException>( () => cws.SendAsync(segment, WebSocketMessageType.Text, false, ct).GetAwaiter().GetResult(), ResourceHelper.GetExceptionMessage("net_WebSockets_NotConnected")); Assert.Equal(WebSocketState.None, cws.State); } } [ConditionalFact("WebSocketsSupported")] public void Ctor_ExpectedPropertyValues() { using (var cws = new ClientWebSocket()) { Assert.Equal(null, cws.CloseStatus); Assert.Equal(null, cws.CloseStatusDescription); Assert.NotEqual(null, cws.Options); Assert.Equal(WebSocketState.None, cws.State); Assert.Equal(null, cws.SubProtocol); Assert.Equal("System.Net.WebSockets.ClientWebSocket", cws.ToString()); } } [ConditionalFact("WebSocketsSupported")] public void Abort_CreateAndDisposeAndAbort_StateIsClosedSuccess() { var cws = new ClientWebSocket(); cws.Dispose(); cws.Abort(); Assert.Equal(WebSocketState.Closed, cws.State); } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_DisposeAndClose_ThrowsObjectDisposedException() { var cws = new ClientWebSocket(); cws.Dispose(); Assert.Throws<ObjectDisposedException>(() => { Task t = cws.CloseAsync(WebSocketCloseStatus.Empty, "", new CancellationToken()); }); Assert.Equal(WebSocketState.Closed, cws.State); } [ConditionalFact("WebSocketsSupported")] public void CloseAsync_DisposeAndCloseOutput_ThrowsObjectDisposedExceptionWithMessage() { var cws = new ClientWebSocket(); cws.Dispose(); var expectedException = new ObjectDisposedException(cws.GetType().FullName); AssertExtensions.Throws<ObjectDisposedException>( () => cws.CloseOutputAsync(WebSocketCloseStatus.Empty, "", new CancellationToken()).GetAwaiter().GetResult(), expectedException.Message); Assert.Equal(WebSocketState.Closed, cws.State); } [ConditionalFact("WebSocketsSupported")] public void ReceiveAsync_CreateAndDisposeAndReceive_ThrowsObjectDisposedExceptionWithMessage() { var cws = new ClientWebSocket(); cws.Dispose(); var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); var expectedException = new ObjectDisposedException(cws.GetType().FullName); AssertExtensions.Throws<ObjectDisposedException>( () => cws.ReceiveAsync(segment, ct).GetAwaiter().GetResult(), expectedException.Message); Assert.Equal(WebSocketState.Closed, cws.State); } [ConditionalFact("WebSocketsSupported")] public void SendAsync_CreateAndDisposeAndSend_ThrowsObjectDisposedExceptionWithMessage() { var cws = new ClientWebSocket(); cws.Dispose(); var buffer = new byte[100]; var segment = new ArraySegment<byte>(buffer); var ct = new CancellationToken(); var expectedException = new ObjectDisposedException(cws.GetType().FullName); AssertExtensions.Throws<ObjectDisposedException>( () => cws.SendAsync(segment, WebSocketMessageType.Text, false, ct).GetAwaiter().GetResult(), expectedException.Message); Assert.Equal(WebSocketState.Closed, cws.State); } [ConditionalFact("WebSocketsSupported")] public void Dispose_CreateAndDispose_ExpectedPropertyValues() { var cws = new ClientWebSocket(); cws.Dispose(); Assert.Equal(null, cws.CloseStatus); Assert.Equal(null, cws.CloseStatusDescription); Assert.NotEqual(null, cws.Options); Assert.Equal(WebSocketState.Closed, cws.State); Assert.Equal(null, cws.SubProtocol); Assert.Equal("System.Net.WebSockets.ClientWebSocket", cws.ToString()); } } }
using Kitware.VTK; using System; // input file is C:\VTK\Graphics\Testing\Tcl\dataSetSurfaceFilter.tcl // output file is AVdataSetSurfaceFilter.cs /// <summary> /// The testing class derived from AVdataSetSurfaceFilter /// </summary> public class AVdataSetSurfaceFilterClass { /// <summary> /// The main entry method called by the CSharp driver /// </summary> /// <param name="argv"></param> public static void AVdataSetSurfaceFilter(String [] argv) { //Prefix Content is: "" // create pipeline - structured grid[] //[] pl3d = new vtkMultiBlockPLOT3DReader(); pl3d.SetXYZFileName((string)"" + (VTK_DATA_ROOT.ToString()) + "/Data/combxyz.bin"); pl3d.SetQFileName((string)"" + (VTK_DATA_ROOT.ToString()) + "/Data/combq.bin"); pl3d.SetScalarFunctionNumber((int)100); pl3d.SetVectorFunctionNumber((int)202); pl3d.Update(); gf = new vtkDataSetSurfaceFilter(); gf.SetInputData((vtkDataSet)pl3d.GetOutput().GetBlock(0)); gMapper = vtkPolyDataMapper.New(); gMapper.SetInputConnection((vtkAlgorithmOutput)gf.GetOutputPort()); gMapper.SetScalarRange( (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); gActor = new vtkActor(); gActor.SetMapper((vtkMapper)gMapper); gf2 = new vtkDataSetSurfaceFilter(); gf2.SetInputData((vtkDataSet)pl3d.GetOutput().GetBlock(0)); gf2.UseStripsOn(); g2Mapper = vtkPolyDataMapper.New(); g2Mapper.SetInputConnection((vtkAlgorithmOutput)gf2.GetOutputPort()); g2Mapper.SetScalarRange((double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); g2Actor = new vtkActor(); g2Actor.SetMapper((vtkMapper)g2Mapper); g2Actor.AddPosition((double)0,(double)15,(double)0); // create pipeline - poly data[] //[] gf3 = new vtkDataSetSurfaceFilter(); gf3.SetInputConnection((vtkAlgorithmOutput)gf.GetOutputPort()); g3Mapper = vtkPolyDataMapper.New(); g3Mapper.SetInputConnection((vtkAlgorithmOutput)gf3.GetOutputPort()); g3Mapper.SetScalarRange((double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); g3Actor = new vtkActor(); g3Actor.SetMapper((vtkMapper)g3Mapper); g3Actor.AddPosition((double)0,(double)0,(double)15); gf4 = new vtkDataSetSurfaceFilter(); gf4.SetInputConnection((vtkAlgorithmOutput)gf2.GetOutputPort()); gf4.UseStripsOn(); g4Mapper = vtkPolyDataMapper.New(); g4Mapper.SetInputConnection((vtkAlgorithmOutput)gf4.GetOutputPort()); g4Mapper.SetScalarRange((double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); g4Actor = new vtkActor(); g4Actor.SetMapper((vtkMapper)g4Mapper); g4Actor.AddPosition((double)0,(double)15,(double)15); // create pipeline - unstructured grid[] //[] s = new vtkSphere(); s.SetCenter(((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetCenter()[0], ((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetCenter()[1], ((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetCenter()[2]); s.SetRadius((double)100.0); //everything[] eg = new vtkExtractGeometry(); eg.SetInputData((vtkDataSet)pl3d.GetOutput().GetBlock(0)); eg.SetImplicitFunction((vtkImplicitFunction)s); gf5 = new vtkDataSetSurfaceFilter(); gf5.SetInputConnection((vtkAlgorithmOutput)eg.GetOutputPort()); g5Mapper = vtkPolyDataMapper.New(); g5Mapper.SetInputConnection((vtkAlgorithmOutput)gf5.GetOutputPort()); g5Mapper.SetScalarRange((double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); g5Actor = new vtkActor(); g5Actor.SetMapper((vtkMapper)g5Mapper); g5Actor.AddPosition((double)0,(double)0,(double)30); gf6 = new vtkDataSetSurfaceFilter(); gf6.SetInputConnection((vtkAlgorithmOutput)eg.GetOutputPort()); gf6.UseStripsOn(); g6Mapper = vtkPolyDataMapper.New(); g6Mapper.SetInputConnection((vtkAlgorithmOutput)gf6.GetOutputPort()); g6Mapper.SetScalarRange((double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[0], (double)((vtkDataSet)pl3d.GetOutput().GetBlock(0)).GetScalarRange()[1]); g6Actor = new vtkActor(); g6Actor.SetMapper((vtkMapper)g6Mapper); g6Actor.AddPosition((double)0,(double)15,(double)30); // create pipeline - rectilinear grid[] //[] rgridReader = new vtkRectilinearGridReader(); rgridReader.SetFileName((string)"" + (VTK_DATA_ROOT.ToString()) + "/Data/RectGrid2.vtk"); rgridReader.Update(); gf7 = new vtkDataSetSurfaceFilter(); gf7.SetInputConnection((vtkAlgorithmOutput)rgridReader.GetOutputPort()); g7Mapper = vtkPolyDataMapper.New(); g7Mapper.SetInputConnection((vtkAlgorithmOutput)gf7.GetOutputPort()); g7Mapper.SetScalarRange((double)((vtkDataSet)rgridReader.GetOutput()).GetScalarRange()[0], (double)((vtkDataSet)rgridReader.GetOutput()).GetScalarRange()[1]); g7Actor = new vtkActor(); g7Actor.SetMapper((vtkMapper)g7Mapper); g7Actor.SetScale((double)3,(double)3,(double)3); gf8 = new vtkDataSetSurfaceFilter(); gf8.SetInputConnection((vtkAlgorithmOutput)rgridReader.GetOutputPort()); gf8.UseStripsOn(); g8Mapper = vtkPolyDataMapper.New(); g8Mapper.SetInputConnection((vtkAlgorithmOutput)gf8.GetOutputPort()); g8Mapper.SetScalarRange((double)((vtkDataSet)rgridReader.GetOutput()).GetScalarRange()[0], (double)((vtkDataSet)rgridReader.GetOutput()).GetScalarRange()[1]); g8Actor = new vtkActor(); g8Actor.SetMapper((vtkMapper)g8Mapper); g8Actor.SetScale((double)3,(double)3,(double)3); g8Actor.AddPosition((double)0,(double)15,(double)0); // Create the RenderWindow, Renderer and both Actors[] //[] ren1 = vtkRenderer.New(); renWin = vtkRenderWindow.New(); renWin.AddRenderer((vtkRenderer)ren1); iren = new vtkRenderWindowInteractor(); iren.SetRenderWindow((vtkRenderWindow)renWin); ren1.AddActor((vtkProp)gActor); ren1.AddActor((vtkProp)g2Actor); ren1.AddActor((vtkProp)g3Actor); ren1.AddActor((vtkProp)g4Actor); ren1.AddActor((vtkProp)g5Actor); ren1.AddActor((vtkProp)g6Actor); ren1.AddActor((vtkProp)g7Actor); ren1.AddActor((vtkProp)g8Actor); renWin.SetSize((int)340,(int)550); cam1 = ren1.GetActiveCamera(); cam1.SetClippingRange((double)84,(double)174); cam1.SetFocalPoint((double)5.22824,(double)6.09412,(double)35.9813); cam1.SetPosition((double)100.052,(double)62.875,(double)102.818); cam1.SetViewUp((double)-0.307455,(double)-0.464269,(double)0.830617); iren.Initialize(); // render the image[] //[] // prevent the tk window from showing up then start the event loop[] //deleteAllVTKObjects(); } static string VTK_DATA_ROOT; static int threshold; static vtkMultiBlockPLOT3DReader pl3d; static vtkDataSetSurfaceFilter gf; static vtkPolyDataMapper gMapper; static vtkActor gActor; static vtkDataSetSurfaceFilter gf2; static vtkPolyDataMapper g2Mapper; static vtkActor g2Actor; static vtkDataSetSurfaceFilter gf3; static vtkPolyDataMapper g3Mapper; static vtkActor g3Actor; static vtkDataSetSurfaceFilter gf4; static vtkPolyDataMapper g4Mapper; static vtkActor g4Actor; static vtkSphere s; static vtkExtractGeometry eg; static vtkDataSetSurfaceFilter gf5; static vtkPolyDataMapper g5Mapper; static vtkActor g5Actor; static vtkDataSetSurfaceFilter gf6; static vtkPolyDataMapper g6Mapper; static vtkActor g6Actor; static vtkRectilinearGridReader rgridReader; static vtkDataSetSurfaceFilter gf7; static vtkPolyDataMapper g7Mapper; static vtkActor g7Actor; static vtkDataSetSurfaceFilter gf8; static vtkPolyDataMapper g8Mapper; static vtkActor g8Actor; static vtkRenderer ren1; static vtkRenderWindow renWin; static vtkRenderWindowInteractor iren; static vtkCamera cam1; ///<summary> A Get Method for Static Variables </summary> public static string GetVTK_DATA_ROOT() { return VTK_DATA_ROOT; } ///<summary> A Set Method for Static Variables </summary> public static void SetVTK_DATA_ROOT(string toSet) { VTK_DATA_ROOT = toSet; } ///<summary> A Get Method for Static Variables </summary> public static int Getthreshold() { return threshold; } ///<summary> A Set Method for Static Variables </summary> public static void Setthreshold(int toSet) { threshold = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkMultiBlockPLOT3DReader Getpl3d() { return pl3d; } ///<summary> A Set Method for Static Variables </summary> public static void Setpl3d(vtkMultiBlockPLOT3DReader toSet) { pl3d = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf() { return gf; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf(vtkDataSetSurfaceFilter toSet) { gf = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper GetgMapper() { return gMapper; } ///<summary> A Set Method for Static Variables </summary> public static void SetgMapper(vtkPolyDataMapper toSet) { gMapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor GetgActor() { return gActor; } ///<summary> A Set Method for Static Variables </summary> public static void SetgActor(vtkActor toSet) { gActor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf2() { return gf2; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf2(vtkDataSetSurfaceFilter toSet) { gf2 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg2Mapper() { return g2Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg2Mapper(vtkPolyDataMapper toSet) { g2Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg2Actor() { return g2Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg2Actor(vtkActor toSet) { g2Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf3() { return gf3; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf3(vtkDataSetSurfaceFilter toSet) { gf3 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg3Mapper() { return g3Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg3Mapper(vtkPolyDataMapper toSet) { g3Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg3Actor() { return g3Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg3Actor(vtkActor toSet) { g3Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf4() { return gf4; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf4(vtkDataSetSurfaceFilter toSet) { gf4 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg4Mapper() { return g4Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg4Mapper(vtkPolyDataMapper toSet) { g4Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg4Actor() { return g4Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg4Actor(vtkActor toSet) { g4Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkSphere Gets() { return s; } ///<summary> A Set Method for Static Variables </summary> public static void Sets(vtkSphere toSet) { s = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkExtractGeometry Geteg() { return eg; } ///<summary> A Set Method for Static Variables </summary> public static void Seteg(vtkExtractGeometry toSet) { eg = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf5() { return gf5; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf5(vtkDataSetSurfaceFilter toSet) { gf5 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg5Mapper() { return g5Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg5Mapper(vtkPolyDataMapper toSet) { g5Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg5Actor() { return g5Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg5Actor(vtkActor toSet) { g5Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf6() { return gf6; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf6(vtkDataSetSurfaceFilter toSet) { gf6 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg6Mapper() { return g6Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg6Mapper(vtkPolyDataMapper toSet) { g6Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg6Actor() { return g6Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg6Actor(vtkActor toSet) { g6Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkRectilinearGridReader GetrgridReader() { return rgridReader; } ///<summary> A Set Method for Static Variables </summary> public static void SetrgridReader(vtkRectilinearGridReader toSet) { rgridReader = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf7() { return gf7; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf7(vtkDataSetSurfaceFilter toSet) { gf7 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg7Mapper() { return g7Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg7Mapper(vtkPolyDataMapper toSet) { g7Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg7Actor() { return g7Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg7Actor(vtkActor toSet) { g7Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkDataSetSurfaceFilter Getgf8() { return gf8; } ///<summary> A Set Method for Static Variables </summary> public static void Setgf8(vtkDataSetSurfaceFilter toSet) { gf8 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkPolyDataMapper Getg8Mapper() { return g8Mapper; } ///<summary> A Set Method for Static Variables </summary> public static void Setg8Mapper(vtkPolyDataMapper toSet) { g8Mapper = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkActor Getg8Actor() { return g8Actor; } ///<summary> A Set Method for Static Variables </summary> public static void Setg8Actor(vtkActor toSet) { g8Actor = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkRenderer Getren1() { return ren1; } ///<summary> A Set Method for Static Variables </summary> public static void Setren1(vtkRenderer toSet) { ren1 = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkRenderWindow GetrenWin() { return renWin; } ///<summary> A Set Method for Static Variables </summary> public static void SetrenWin(vtkRenderWindow toSet) { renWin = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkRenderWindowInteractor Getiren() { return iren; } ///<summary> A Set Method for Static Variables </summary> public static void Setiren(vtkRenderWindowInteractor toSet) { iren = toSet; } ///<summary> A Get Method for Static Variables </summary> public static vtkCamera Getcam1() { return cam1; } ///<summary> A Set Method for Static Variables </summary> public static void Setcam1(vtkCamera toSet) { cam1 = toSet; } ///<summary>Deletes all static objects created</summary> public static void deleteAllVTKObjects() { //clean up vtk objects if(pl3d!= null){pl3d.Dispose();} if(gf!= null){gf.Dispose();} if(gMapper!= null){gMapper.Dispose();} if(gActor!= null){gActor.Dispose();} if(gf2!= null){gf2.Dispose();} if(g2Mapper!= null){g2Mapper.Dispose();} if(g2Actor!= null){g2Actor.Dispose();} if(gf3!= null){gf3.Dispose();} if(g3Mapper!= null){g3Mapper.Dispose();} if(g3Actor!= null){g3Actor.Dispose();} if(gf4!= null){gf4.Dispose();} if(g4Mapper!= null){g4Mapper.Dispose();} if(g4Actor!= null){g4Actor.Dispose();} if(s!= null){s.Dispose();} if(eg!= null){eg.Dispose();} if(gf5!= null){gf5.Dispose();} if(g5Mapper!= null){g5Mapper.Dispose();} if(g5Actor!= null){g5Actor.Dispose();} if(gf6!= null){gf6.Dispose();} if(g6Mapper!= null){g6Mapper.Dispose();} if(g6Actor!= null){g6Actor.Dispose();} if(rgridReader!= null){rgridReader.Dispose();} if(gf7!= null){gf7.Dispose();} if(g7Mapper!= null){g7Mapper.Dispose();} if(g7Actor!= null){g7Actor.Dispose();} if(gf8!= null){gf8.Dispose();} if(g8Mapper!= null){g8Mapper.Dispose();} if(g8Actor!= null){g8Actor.Dispose();} if(ren1!= null){ren1.Dispose();} if(renWin!= null){renWin.Dispose();} if(iren!= null){iren.Dispose();} if(cam1!= null){cam1.Dispose();} } } //--- end of script --//
using System; using System.Linq; using System.Text; using System.Runtime.InteropServices; using System.Threading; // ReSharper disable UnusedMember.Global // ReSharper disable InconsistentNaming // ReSharper disable IdentifierTypo // ReSharper disable ArrangeTypeMemberModifiers // ReSharper disable CommentTypo namespace //~~NameInstance~~// { [StructLayout(LayoutKind.Sequential)] public struct POINTAPI { public long x; public long y; } [StructLayout(LayoutKind.Sequential)] public struct DROPFILES { public long pFiles; public POINTAPI pt; public long fNC; public long fWide; } public class ConsoleClipboard : IDisposable { // --- // Predefined Clipboard Formats public const int CF_TEXT = 1; // Supported public const int CF_BITMAP = 2; public const int CF_METAFILEPICT = 3; public const int CF_SYLK = 4; public const int CF_DIF = 5; public const int CF_TIFF = 6; public const int CF_OEMTEXT = 7; public const int CF_DIB = 8; public const int CF_PALETTE = 9; public const int CF_PENDATA = 10; public const int CF_RIFF = 11; public const int CF_WAVE = 12; public const int CF_UNICODETEXT = 13; // Supported public const int CF_ENHMETAFILE = 14; public const int CF_HDROP = 15; // Supported public const int CF_LOCALE = 16; public const int CF_MAX = 17; public const int GMEM_FIXED = 0; public const int GMEM_MOVEABLE = 2; public const int GMEM_NOCOMPACT = 16; public const int GMEM_NODISCARD = 32; public const int GMEM_ZEROINIT = 64; public const int GMEM_MODIFY = 128; public const int GMEM_DISCARDABLE = 256; public const int GMEM_NOT_BANKED = 4096; public const int GMEM_SHARE = 8192; public const int GMEM_DDESHARE = 8192; public const int GMEM_NOTIFY = 16384; public const int GMEM_LOWER = GMEM_NOT_BANKED; public const int GMEM_VALID_FLAGS = 32626; public const int GMEM_INVALID_HANDLE = 32768; public const int GHND = GMEM_MOVEABLE | GMEM_ZEROINIT; public const int GPTR = GMEM_FIXED | GMEM_ZEROINIT; // New shell-oriented clipboard formats public const string CFSTR_SHELLIDLIST = "Shell IDList Array"; public const string CFSTR_SHELLIDLISTOFFSET = "Shell Object Offsets"; public const string CFSTR_NETRESOURCES = "Net Resource"; public const string CFSTR_FILEDESCRIPTOR = "FileGroupDescriptor"; public const string CFSTR_FILECONTENTS = "FileContents"; public const string CFSTR_FILENAME = "FileName"; public const string CFSTR_PRINTERGROUP = "PrinterFriendlyName"; public const string CFSTR_FILENAMEMAP = "FileNameMap"; public const int MAX_PATH = 260; // --- [DllImport("user32.dll", SetLastError=true)] static extern bool OpenClipboard(IntPtr hWndNewOwner); [DllImport("user32.dll")] static extern bool CloseClipboard(); [DllImport("user32.dll")] static extern bool SetClipboardData(uint uFormat, IntPtr data); [DllImport("user32.dll")] static extern IntPtr GetClipboardData(uint uFormat); //[DllImport("user32.dll", SetLastError = true)] public static extern uint EnumClipboardFormats(uint format); [DllImport("kernel32.dll", SetLastError = true)] public static extern IntPtr GlobalLock(IntPtr hMem); [DllImport("kernel32.dll")] public static extern UIntPtr GlobalSize(IntPtr hMem); [DllImport("kernel32.dll")] [return: MarshalAs(UnmanagedType.Bool)] public static extern bool GlobalUnlock(IntPtr hMem); // Clipboard Manager Functions [DllImport("user32.dll")] static extern long EmptyClipboard(); [DllImport("user32.dll")] static extern long IsClipboardFormatAvailable(long wFormat); [DllImport("shell32.dll")] static extern uint DragQueryFile(IntPtr hDrop, uint iFile, [Out] StringBuilder lpszFile, uint cch); [DllImport("shell32.dll", EntryPoint = "DragQueryFileA")] static extern long DragQueryFile(IntPtr hDrop, long iFile, [MarshalAs(UnmanagedType.LPStr)] string lpStr, long ch); [DllImport("kernel32.dll")] static extern IntPtr GlobalAlloc(uint uFlags, UIntPtr dwBytes); [DllImport("kernel32.dll", EntryPoint = "RtlMoveMemory")] static extern void CopyMem(IntPtr Destination, [MarshalAs(UnmanagedType.LPStr)] string Source, long Length); // --- public ConsoleClipboard() { CloseClipboard(); } /// <summary> /// Gets the data on the clipboard in the format specified by the selected item of the specified listbox. /// </summary> public string GetText() { if (IsClipboardFormatAvailable(CF_UNICODETEXT) == 0) return ""; if (!OpenClipboard()) return ""; IntPtr pointer = IntPtr.Zero; try { //Get pointer to clipboard data in the selected format pointer = GetClipboardData(CF_UNICODETEXT); //Do a bunch of crap necessary to copy the data from the memory //the above pointer points at to a place we can access it. var length = GlobalSize(pointer); var @lock = GlobalLock(pointer); //Init a buffer which will contain the clipboard data byte[] buffer = new byte[(int)length]; //Copy clipboard data to buffer Marshal.Copy(@lock, buffer, 0, (int)length); if (buffer.Length > 0) return Encoding.ASCII.GetString(buffer).Replace("\0", ""); } catch (Exception e) { Console.WriteLine(e); throw; } finally { CloseClipboard(); if (pointer != IntPtr.Zero) GlobalUnlock(pointer); } return ""; } public bool AreFilenamesOnTheClipboard() { return IsClipboardFormatAvailable(CF_HDROP) != 0; } public string[] GetFilenames() { string[] files = Array.Empty<string>(); // Insure desired format is there, and open clipboard. if (IsClipboardFormatAvailable(CF_HDROP) == 0) return files; if (!OpenClipboard()) return files; try { // Get handle to Dropped Filelist data, and number of files. IntPtr handleToDroppedFilenames = GetClipboardData(CF_HDROP); var numberofFilenames = DragQueryFile(handleToDroppedFilenames, -1, "", 0); // Allocate space for return and working variables. files = new string[numberofFilenames]; // Retrieve each filename in Dropped Filelist. for (uint i = 0; i <= numberofFilenames - 1; i++) { StringBuilder filename = new StringBuilder(new string(' ', MAX_PATH)); DragQueryFile(handleToDroppedFilenames, i, filename, MAX_PATH); files[i] = ForStrings.BeforeNullOrTrim(filename.ToString()); } } finally { // Clean up CloseClipboard(); } // Assign return value equal to number of files dropped. return files; } public bool Set(string text) { if (!OpenClipboard()) return false; IntPtr pointer = IntPtr.Zero; try { pointer = Marshal.StringToHGlobalUni(text); SetClipboardData(CF_UNICODETEXT, pointer); } catch { return false; } finally { CloseClipboard(); } return true; } public bool Set(string[] files) { if (OpenClipboard()) { IntPtr handleToDestinationMemory = IntPtr.Zero; try { EmptyClipboard(); if (files.IsEmpty()) return true; // Build double-null terminated list of files. DROPFILES df = new DROPFILES(); string data = ""; string[] nonEmptyFilenames = files.Where(f => f.IsNotEmpty()).ToArray(); if (nonEmptyFilenames.Length == 0) return true; for (int i = 0; i <= nonEmptyFilenames.Length - 1; i++) data += nonEmptyFilenames[i] + '\0'; data += '\0'; // Allocate and get pointer to global memory, // then copy file list to it. var sizeOfDROPFILES = Marshal.SizeOf(typeof(DROPFILES)); int size = sizeOfDROPFILES + data.Length; handleToDestinationMemory = Marshal.AllocHGlobal(size); if (handleToDestinationMemory != IntPtr.Zero) { // Build DROPFILES structure in global memory. df.pFiles = sizeOfDROPFILES; IntPtr pointerToDestinationMemory = GlobalLock(handleToDestinationMemory); Marshal.StructureToPtr(df, pointerToDestinationMemory, false); //IntPtr pointerToData = Marshal.StringToHGlobalUni(data); CopyMem(pointerToDestinationMemory + sizeOfDROPFILES, data, data.Length); //pointerToData, data.Length); //GlobalUnlock(handleToDestinationMemory); // Copy data to clipboard, and return success. if (SetClipboardData(CF_HDROP, handleToDestinationMemory)) { return true; } } } catch (Exception e) { Console.WriteLine(e); throw; } finally { // Clean up CloseClipboard(); } } return false; } public bool OpenClipboard() { int attemptsLeft = 5; int millisecondsToSleep = 50; while (attemptsLeft > 0) { try { CloseClipboard(); if (OpenClipboard(IntPtr.Zero)) return true; } catch { CloseClipboard(); if(--attemptsLeft > 0) { Thread.Sleep(millisecondsToSleep); millisecondsToSleep += 200; CloseClipboard(); } } } return false; } private void ReleaseUnmanagedResources() { CloseClipboard(); } public void Dispose() { ReleaseUnmanagedResources(); GC.SuppressFinalize(this); } ~ConsoleClipboard() { ReleaseUnmanagedResources(); } } }
using System; using System.Collections.Generic; using System.Threading.Tasks; namespace Orleans.Transactions.TestKit { public interface INoAttributionGrain : IGrainWithGuidKey { Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface ISuppressAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOption.Suppress)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface ICreateOrJoinAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOption.CreateOrJoin)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface ICreateAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOption.Create)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface IJoinAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOptionAlias.Mandatory)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface ISupportedAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOption.Supported)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public interface INotAllowedAttributionGrain : IGrainWithGuidKey { [Transaction(TransactionOption.NotAllowed)] Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } #region wrappers public interface ITransactionAttributionGrain { Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers); } public static class TransactionAttributionGrainExtensions { public static ITransactionAttributionGrain GetTransactionAttributionGrain(this IGrainFactory grainFactory, Guid id, TransactionOption? option = null) { if(!option.HasValue) { return new NoAttributionGrain(grainFactory.GetGrain<INoAttributionGrain>(id)); } switch(option.Value) { case TransactionOption.Suppress: return new SuppressAttributionGrain(grainFactory.GetGrain<ISuppressAttributionGrain>(id)); case TransactionOption.CreateOrJoin: return new CreateOrJoinAttributionGrain(grainFactory.GetGrain<ICreateOrJoinAttributionGrain>(id)); case TransactionOption.Create: return new CreateAttributionGrain(grainFactory.GetGrain<ICreateAttributionGrain>(id)); case TransactionOption.Join: return new JoinAttributionGrain(grainFactory.GetGrain<IJoinAttributionGrain>(id)); case TransactionOption.Supported: return new SupportedAttributionGrain(grainFactory.GetGrain<ISupportedAttributionGrain>(id)); case TransactionOption.NotAllowed: return new NotAllowedAttributionGrain(grainFactory.GetGrain<INotAllowedAttributionGrain>(id)); default: throw new NotSupportedException($"Transaction option {option.Value} is not supported."); } } [GenerateSerializer] public class NoAttributionGrain : ITransactionAttributionGrain { [Id(0)] public INoAttributionGrain grain; public NoAttributionGrain(INoAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class SuppressAttributionGrain : ITransactionAttributionGrain { [Id(0)] public ISuppressAttributionGrain grain; public SuppressAttributionGrain(ISuppressAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class CreateOrJoinAttributionGrain : ITransactionAttributionGrain { [Id(0)] public ICreateOrJoinAttributionGrain grain; public CreateOrJoinAttributionGrain(ICreateOrJoinAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class CreateAttributionGrain : ITransactionAttributionGrain { [Id(0)] public ICreateAttributionGrain grain; public CreateAttributionGrain(ICreateAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class JoinAttributionGrain : ITransactionAttributionGrain { [Id(0)] public IJoinAttributionGrain grain; public JoinAttributionGrain(IJoinAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class SupportedAttributionGrain : ITransactionAttributionGrain { [Id(0)] public ISupportedAttributionGrain grain; public SupportedAttributionGrain(ISupportedAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } [GenerateSerializer] public class NotAllowedAttributionGrain : ITransactionAttributionGrain { [Id(0)] public INotAllowedAttributionGrain grain; public NotAllowedAttributionGrain(INotAllowedAttributionGrain grain) { this.grain = grain; } public Task<List<string>[]> GetNestedTransactionIds(int tier, List<ITransactionAttributionGrain>[] tiers) { return this.grain.GetNestedTransactionIds(tier, tiers); } } } #endregion wrappers }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace Soccer.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames. // If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames // If still not found, try get the sample provided for a specific type and mediaType if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // Try create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); sampleObject = objectGenerator.GenerateObject(type); } return sampleObject; } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, e.Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using osuTK; using osuTK.Input; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Game.Graphics.Sprites; using osu.Game.Rulesets.Mods; using System; using System.Linq; using System.Collections.Generic; using System.Threading; using Humanizer; using osu.Framework.Input.Events; using osu.Game.Graphics; namespace osu.Game.Overlays.Mods { public class ModSection : CompositeDrawable { private readonly Drawable header; public FillFlowContainer<ModButtonEmpty> ButtonsContainer { get; } protected IReadOnlyList<ModButton> Buttons { get; private set; } = Array.Empty<ModButton>(); public Action<Mod> Action; public Key[] ToggleKeys; public readonly ModType ModType; public IEnumerable<Mod> SelectedMods => Buttons.Select(b => b.SelectedMod).Where(m => m != null); private CancellationTokenSource modsLoadCts; protected bool SelectionAnimationRunning => pendingSelectionOperations.Count > 0; /// <summary> /// True when all mod icons have completed loading. /// </summary> public bool ModIconsLoaded { get; private set; } = true; public IEnumerable<Mod> Mods { set { var modContainers = value.Select(m => { if (m == null) return new ModButtonEmpty(); return CreateModButton(m).With(b => { b.SelectionChanged = mod => { ModButtonStateChanged(mod); Action?.Invoke(mod); }; }); }).ToArray(); modsLoadCts?.Cancel(); if (modContainers.Length == 0) { ModIconsLoaded = true; header.Hide(); Hide(); return; } ModIconsLoaded = false; LoadComponentsAsync(modContainers, c => { ModIconsLoaded = true; ButtonsContainer.ChildrenEnumerable = c; }, (modsLoadCts = new CancellationTokenSource()).Token); Buttons = modContainers.OfType<ModButton>().ToArray(); header.FadeIn(200); this.FadeIn(200); } } protected virtual void ModButtonStateChanged(Mod mod) { } protected override bool OnKeyDown(KeyDownEvent e) { if (e.ControlPressed) return false; if (ToggleKeys != null) { int index = Array.IndexOf(ToggleKeys, e.Key); if (index > -1 && index < Buttons.Count) Buttons[index].SelectNext(e.ShiftPressed ? -1 : 1); } return base.OnKeyDown(e); } private const double initial_multiple_selection_delay = 120; private double selectionDelay = initial_multiple_selection_delay; private double lastSelection; private readonly Queue<Action> pendingSelectionOperations = new Queue<Action>(); protected override void Update() { base.Update(); if (selectionDelay == initial_multiple_selection_delay || Time.Current - lastSelection >= selectionDelay) { if (pendingSelectionOperations.TryDequeue(out var dequeuedAction)) { dequeuedAction(); // each time we play an animation, we decrease the time until the next animation (to ramp the visual and audible elements). selectionDelay = Math.Max(30, selectionDelay * 0.8f); lastSelection = Time.Current; } else { // reset the selection delay after all animations have been completed. // this will cause the next action to be immediately performed. selectionDelay = initial_multiple_selection_delay; } } } /// <summary> /// Selects all mods. /// </summary> public void SelectAll() { pendingSelectionOperations.Clear(); foreach (var button in Buttons.Where(b => !b.Selected)) pendingSelectionOperations.Enqueue(() => button.SelectAt(0)); } /// <summary> /// Deselects all mods. /// </summary> public void DeselectAll() { pendingSelectionOperations.Clear(); DeselectTypes(Buttons.Select(b => b.SelectedMod?.GetType()).Where(t => t != null)); } /// <summary> /// Deselect one or more mods in this section. /// </summary> /// <param name="modTypes">The types of <see cref="Mod"/>s which should be deselected.</param> /// <param name="immediate">Whether the deselection should happen immediately. Should only be used when required to ensure correct selection flow.</param> /// <param name="newSelection">If this deselection is triggered by a user selection, this should contain the newly selected type. This type will never be deselected, even if it matches one provided in <paramref name="modTypes"/>.</param> public void DeselectTypes(IEnumerable<Type> modTypes, bool immediate = false, Mod newSelection = null) { foreach (var button in Buttons) { if (button.SelectedMod == null) continue; if (button.SelectedMod == newSelection) continue; foreach (var type in modTypes) { if (type.IsInstanceOfType(button.SelectedMod)) { if (immediate) button.Deselect(); else pendingSelectionOperations.Enqueue(button.Deselect); } } } } /// <summary> /// Updates all buttons with the given list of selected mods. /// </summary> /// <param name="newSelectedMods">The new list of selected mods to select.</param> public void UpdateSelectedButtons(IReadOnlyList<Mod> newSelectedMods) { foreach (var button in Buttons) updateButtonSelection(button, newSelectedMods); } private void updateButtonSelection(ModButton button, IReadOnlyList<Mod> newSelectedMods) { foreach (var mod in newSelectedMods) { int index = Array.FindIndex(button.Mods, m1 => mod.GetType() == m1.GetType()); if (index < 0) continue; var buttonMod = button.Mods[index]; // as this is likely coming from an external change, ensure the settings of the mod are in sync. buttonMod.CopyFrom(mod); button.SelectAt(index, false); return; } button.Deselect(); } public ModSection(ModType type) { ModType = type; AutoSizeAxes = Axes.Y; RelativeSizeAxes = Axes.X; Origin = Anchor.TopCentre; Anchor = Anchor.TopCentre; InternalChildren = new[] { header = CreateHeader(type.Humanize(LetterCasing.Title)), ButtonsContainer = new FillFlowContainer<ModButtonEmpty> { AutoSizeAxes = Axes.Y, RelativeSizeAxes = Axes.X, Origin = Anchor.BottomLeft, Anchor = Anchor.BottomLeft, Spacing = new Vector2(50f, 0f), Margin = new MarginPadding { Top = 20, }, AlwaysPresent = true }, }; } protected virtual Drawable CreateHeader(string text) => new OsuSpriteText { Font = OsuFont.GetFont(weight: FontWeight.Bold), Text = text }; protected virtual ModButton CreateModButton(Mod mod) => new ModButton(mod); /// <summary> /// Play out all remaining animations immediately to leave mods in a good (final) state. /// </summary> public void FlushAnimation() { while (pendingSelectionOperations.TryDequeue(out var dequeuedAction)) dequeuedAction(); } } }
#region -- License Terms -- // // MessagePack for CLI // // Copyright (C) 2017 FUJIWARA, Yusuke // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion -- License Terms -- using System; using System.Globalization; #if !NET35 && !UNITY #if !WINDOWS_PHONE #if !UNITY || MSGPACK_UNITY_FULL using System.Numerics; #endif // !NET35 && !UNITY #endif // !WINDOWS_PHONE #endif // !UNITY || MSGPACK_UNITY_FULL #if !MSTEST using NUnit.Framework; #else using TestFixtureAttribute = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute; using TestAttribute = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute; using TimeoutAttribute = NUnit.Framework.TimeoutAttribute; using Assert = NUnit.Framework.Assert; using Is = NUnit.Framework.Is; #endif namespace MsgPack { partial class TimestampTest { [Test] public void TestAdd_TimeSpan_Same() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.Zero; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_TimeSpan_Same() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.Zero; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_TimeSpan_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( 1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_TimeSpan_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( 1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_TimeSpan_1Tick() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 101 ) ); } [Test] public void TestAdditionOperator_TimeSpan_1Tick() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 101 ) ); } [Test] public void TestAdd_TimeSpan_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( -1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_TimeSpan_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( -1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_TimeSpan_Minus1Tick() { var @base = new Timestamp( 1L, 101 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_TimeSpan_Minus1Tick() { var @base = new Timestamp( 1L, 101 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_TimeSpan_MoveUp() { var @base = new Timestamp( 1L, 999999900 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestAdditionOperator_TimeSpan_MoveUp() { var @base = new Timestamp( 1L, 999999900 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestAdd_TimeSpan_MoveDown() { var @base = new Timestamp( 1L, 99 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999999 ) ); } [Test] public void TestAdditionOperator_TimeSpan_MoveDown() { var @base = new Timestamp( 1L, 99 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999999 ) ); } [Test] public void TestAdd_TimeSpan_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = TimeSpan.FromSeconds( 1 ); Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_TimeSpan_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = TimeSpan.FromSeconds( 1 ); Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_TimeSpan_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = TimeSpan.FromSeconds( -1 ); Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_TimeSpan_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = TimeSpan.FromSeconds( -1 ); Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_TimeSpan_MaxPlus1Tick_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999900 ); var operand = TimeSpan.FromTicks( 1 ); Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_TimeSpan_MaxPlus1Tick_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999900 ); var operand = TimeSpan.FromTicks( 1 ); Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_TimeSpan_MinMinus1ick_Overflow() { var @base = new Timestamp( -9223372036854775808L, 99 ); var operand = TimeSpan.FromTicks( -1 ); Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_TimeSpan_MinMinus1ick_Overflow() { var @base = new Timestamp( -9223372036854775808L, 99 ); var operand = TimeSpan.FromTicks( -1 ); Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestSubtract_TimeSpan_Same() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.Zero; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_Same() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.Zero; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_TimeSpan_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( 1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( 1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_TimeSpan_1Tick() { var @base = new Timestamp( 1L, 101 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_1Tick() { var @base = new Timestamp( 1L, 101 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_TimeSpan_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( -1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromSeconds( -1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_TimeSpan_Minus1Tick() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 101 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_Minus1Tick() { var @base = new Timestamp( 1L, 1 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 101 ) ); } [Test] public void TestSubtract_TimeSpan_MoveUp() { var @base = new Timestamp( 1L, 999999900 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MoveUp() { var @base = new Timestamp( 1L, 999999900 ); var operand = TimeSpan.FromTicks( -1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestSubtract_TimeSpan_MoveDown() { var @base = new Timestamp( 1L, 99 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999999 ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MoveDown() { var @base = new Timestamp( 1L, 99 ); var operand = TimeSpan.FromTicks( 1 ); var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999999 ) ); } [Test] public void TestSubtract_TimeSpan_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = TimeSpan.FromSeconds( -1 ); Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = TimeSpan.FromSeconds( -1 ); Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_TimeSpan_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = TimeSpan.FromSeconds( 1 ); Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = TimeSpan.FromSeconds( 1 ); Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_TimeSpan_MaxPlus1Tick_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999900 ); var operand = TimeSpan.FromTicks( -1 ); Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MaxPlus1Tick_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999900 ); var operand = TimeSpan.FromTicks( -1 ); Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_TimeSpan_MinMinus1Tick_Overflow() { var @base = new Timestamp( -9223372036854775808L, 99 ); var operand = TimeSpan.FromTicks( 1 ); Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_TimeSpan_MinMinus1Tick_Overflow() { var @base = new Timestamp( -9223372036854775808L, 99 ); var operand = TimeSpan.FromTicks( 1 ); Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } #if !NET35 && !UNITY #if !WINDOWS_PHONE #if !UNITY || MSGPACK_UNITY_FULL [Test] public void TestAdd_BigInteger_Same() { var @base = new Timestamp( 1L, 1 ); var operand = BigInteger.Zero; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_BigInteger_Same() { var @base = new Timestamp( 1L, 1 ); var operand = BigInteger.Zero; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_BigInteger_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = 1000000000; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_BigInteger_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = 1000000000; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_BigInteger_1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = 1; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 2 ) ); } [Test] public void TestAdditionOperator_BigInteger_1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = 1; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 2 ) ); } [Test] public void TestAdd_BigInteger_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = -1000000000; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_BigInteger_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = -1000000000; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_BigInteger_Minus1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = -1; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestAdditionOperator_BigInteger_Minus1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = -1; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestAdd_BigInteger_MoveUp() { var @base = new Timestamp( 1L, 999999999 ); var operand = 2; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdditionOperator_BigInteger_MoveUp() { var @base = new Timestamp( 1L, 999999999 ); var operand = 2; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestAdd_BigInteger_MoveDown() { var @base = new Timestamp( 1L, 0 ); var operand = -2; var result = @base.Add( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999998 ) ); } [Test] public void TestAdditionOperator_BigInteger_MoveDown() { var @base = new Timestamp( 1L, 0 ); var operand = -2; var result = @base + operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999998 ) ); } [Test] public void TestAdd_BigInteger_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = 1000000000; Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_BigInteger_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = 1000000000; Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_BigInteger_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = -1000000000; Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_BigInteger_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = -1000000000; Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_BigInteger_MaxPlus1Nsec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999999 ); var operand = 1; Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_BigInteger_MaxPlus1Nsec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999999 ); var operand = 1; Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestAdd_BigInteger_MinMinus1Nsec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 0 ); var operand = -1; Assert.Throws<OverflowException>( () => @base.Add( operand ) ); } [Test] public void TestAdditionOperator_BigInteger_MinMinus1Nsec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 0 ); var operand = -1; Assert.Throws<OverflowException>( () => { var x = @base + operand; } ); } [Test] public void TestSubtract_BigInteger_Same() { var @base = new Timestamp( 1L, 1 ); var operand = BigInteger.Zero; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_BigInteger_Same() { var @base = new Timestamp( 1L, 1 ); var operand = BigInteger.Zero; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_BigInteger_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = 1000000000; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_BigInteger_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = 1000000000; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_BigInteger_1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = 1; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestSubtractionOperator_BigInteger_1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = 1; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 0 ) ); } [Test] public void TestSubtract_BigInteger_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = -1000000000; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_BigInteger_Minus1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = -1000000000; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_BigInteger_Minus1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = -1; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 2 ) ); } [Test] public void TestSubtractionOperator_BigInteger_Minus1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = -1; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 1 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 2 ) ); } [Test] public void TestSubtract_BigInteger_MoveUp() { var @base = new Timestamp( 1L, 999999999 ); var operand = -2; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtractionOperator_BigInteger_MoveUp() { var @base = new Timestamp( 1L, 999999999 ); var operand = -2; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 2 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 1 ) ); } [Test] public void TestSubtract_BigInteger_MoveDown() { var @base = new Timestamp( 1L, 0 ); var operand = 2; var result = @base.Subtract( operand ); Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999998 ) ); } [Test] public void TestSubtractionOperator_BigInteger_MoveDown() { var @base = new Timestamp( 1L, 0 ); var operand = 2; var result = @base - operand; Assert.That( result.UnixEpochSecondsPart, Is.EqualTo( 0 ) ); Assert.That( result.NanosecondsPart, Is.EqualTo( 999999998 ) ); } [Test] public void TestSubtract_BigInteger_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = -1000000000; Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_BigInteger_MaxPlus1Sec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 0 ); var operand = -1000000000; Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_BigInteger_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = 1000000000; Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_BigInteger_MinMinus1Sec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 999999999 ); var operand = 1000000000; Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_BigInteger_MaxPlus1Nsec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999999 ); var operand = -1; Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_BigInteger_MaxPlus1Nsec_Overflow() { var @base = new Timestamp( 9223372036854775807L, 999999999 ); var operand = -1; Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_BigInteger_MinMinus1Nsec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 0 ); var operand = 1; Assert.Throws<OverflowException>( () => @base.Subtract( operand ) ); } [Test] public void TestSubtractionOperator_BigInteger_MinMinus1Nsec_Overflow() { var @base = new Timestamp( -9223372036854775808L, 0 ); var operand = 1; Assert.Throws<OverflowException>( () => { var x = @base - operand; } ); } [Test] public void TestSubtract_Timestamp_Same() { var @base = new Timestamp( 1L, 1 ); var operand = new Timestamp( 1L, 1 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( 0 ) ) ); } [Test] public void TestSubtractionOperator_Timstamp_Same() { var @base = new Timestamp( 1, 1 ); var operand = new Timestamp( 1, 1 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( 0 ) ) ); } [Test] public void TestSubtract_Timestamp_1Sec() { var @base = new Timestamp( 1L, 1 ); var operand = new Timestamp( 1L, 0 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( 1 ) ) ); } [Test] public void TestSubtractionOperator_Timstamp_1Sec() { var @base = new Timestamp( 1, 1 ); var operand = new Timestamp( 1, 0 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( 1 ) ) ); } [Test] public void TestSubtract_Timestamp_1Nsec() { var @base = new Timestamp( 1L, 1 ); var operand = new Timestamp( 0L, 1 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( 1000000000 ) ) ); } [Test] public void TestSubtractionOperator_Timstamp_1Nsec() { var @base = new Timestamp( 1, 1 ); var operand = new Timestamp( 0, 1 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( 1000000000 ) ) ); } [Test] public void TestSubtract_Timestamp_MoveDown() { var @base = new Timestamp( 2L, 1 ); var operand = new Timestamp( 1L, 2 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( 999999999 ) ) ); } [Test] public void TestSubtractionOperator_Timstamp_MoveDown() { var @base = new Timestamp( 2, 1 ); var operand = new Timestamp( 1, 2 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( 999999999 ) ) ); } [Test] public void TestSubtract_Timestamp_PositiveNegative() { var @base = new Timestamp( 1L, 2 ); var operand = new Timestamp( -1L, 1 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( 2000000001 ) ) ); } [Test] public void TestSubtractionOperator_Timstamp_PositiveNegative() { var @base = new Timestamp( 1, 2 ); var operand = new Timestamp( -1, 1 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( 2000000001 ) ) ); } [Test] public void TestSubtract_Timestamp_MaxMin() { var @base = new Timestamp( 9223372036854775807L, 999999999 ); var operand = new Timestamp( -9223372036854775808L, 0 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( Int64.MaxValue ) * 1000000000 + 999999999 - new BigInteger( Int64.MinValue ) * 1000000000 ) ); } [Test] public void TestSubtractionOperator_Timstamp_MaxMin() { var @base = new Timestamp( 9223372036854775807, 999999999 ); var operand = new Timestamp( -9223372036854775808, 0 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( Int64.MaxValue ) * 1000000000 + 999999999 - new BigInteger( Int64.MinValue ) * 1000000000 ) ); } [Test] public void TestSubtract_Timestamp_MinMax() { var @base = new Timestamp( -9223372036854775808L, 0 ); var operand = new Timestamp( 9223372036854775807L, 999999999 ); var result = @base.Subtract( operand ); Assert.That( result, Is.EqualTo( new BigInteger( Int64.MinValue ) * 1000000000 - new BigInteger( Int64.MaxValue ) * 1000000000 - 999999999 ) ); } [Test] public void TestSubtractionOperator_Timstamp_MinMax() { var @base = new Timestamp( -9223372036854775808, 0 ); var operand = new Timestamp( 9223372036854775807, 999999999 ); var result = @base - operand; Assert.That( result, Is.EqualTo( new BigInteger( Int64.MinValue ) * 1000000000 - new BigInteger( Int64.MaxValue ) * 1000000000 - 999999999 ) ); } #endif // !NET35 && !UNITY #endif // !WINDOWS_PHONE #endif // !UNITY || MSGPACK_UNITY_FULL } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Text; using System.Text.RegularExpressions; using Xunit; public class RegexUnicodeCharTests { // This test case checks the Unicode-aware features in the regex engine. private const int MaxUnicodeRange = 2 << 15;//we are adding 0's here? [Fact] public static void RegexUnicodeChar() { // Regex engine is Unicode aware now for the \w and \d character classes // \s is not - i.e. it still only recognizes the ASCII space separators, not Unicode ones // The new character classes for this: // [\p{L1}\p{Lu}\p{Lt}\p{Lo}\p{Nd}\p{Pc}] List<char> validChars = new List<char>(); List<char> invalidChars = new List<char>(); for (int i = 0; i < MaxUnicodeRange; i++) { char c = (char)i; switch (CharUnicodeInfo.GetUnicodeCategory(c)) { case UnicodeCategory.UppercaseLetter: //Lu case UnicodeCategory.LowercaseLetter: //Li case UnicodeCategory.TitlecaseLetter: // Lt case UnicodeCategory.ModifierLetter: // Lm case UnicodeCategory.OtherLetter: // Lo case UnicodeCategory.DecimalDigitNumber: // Nd // case UnicodeCategory.LetterNumber: // ?? // case UnicodeCategory.OtherNumber: // ?? case UnicodeCategory.NonSpacingMark: // case UnicodeCategory.SpacingCombiningMark: // Mc case UnicodeCategory.ConnectorPunctuation: // Pc validChars.Add(c); break; default: invalidChars.Add(c); break; } } // \w - we will create strings from valid characters that form \w and make sure that the regex engine catches this. // Build a random string with valid characters followed by invalid characters Random random = new Random(-55); Regex regex = new Regex(@"\w*"); int validCharLength = 10; int charCount = validChars.Count; int invalidCharCount = invalidChars.Count; int invalidCharLength = 15; for (int i = 0; i < 100; i++) { StringBuilder builder1 = new StringBuilder(); StringBuilder builder2 = new StringBuilder(); for (int j = 0; j < validCharLength; j++) { char c = validChars[random.Next(charCount)]; builder1.Append(c); builder2.Append(c); } for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); string input = builder1.ToString(); Match match = regex.Match(input); Assert.True(match.Success); Assert.Equal(builder2.ToString(), match.Value); Assert.Equal(0, match.Index); Assert.Equal(validCharLength, match.Length); match = match.NextMatch(); do { // This is tedious. But we report empty Matches for each of the non-matching characters!!! // duh!!! because we say so on the pattern - remember what * stands for :-) Assert.Equal(string.Empty, match.Value); Assert.Equal(0, match.Length); match = match.NextMatch(); } while (match.Success); } // Build a random string with invalid characters followed by valid characters and then again invalid random = new Random(-55); regex = new Regex(@"\w+"); validCharLength = 10; charCount = validChars.Count; invalidCharCount = invalidChars.Count; invalidCharLength = 15; for (int i = 0; i < 500; i++) { StringBuilder builder1 = new StringBuilder(); StringBuilder builder2 = new StringBuilder(); for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); for (int j = 0; j < validCharLength; j++) { char c = validChars[random.Next(charCount)]; builder1.Append(c); builder2.Append(c); } for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); string input = builder1.ToString(); Match match = regex.Match(input); Assert.True(match.Success); Assert.Equal(builder2.ToString(), match.Value); Assert.Equal(invalidCharLength, match.Index); Assert.Equal(validCharLength, match.Length); match = match.NextMatch(); Assert.False(match.Success); } validChars = new List<char>(); invalidChars = new List<char>(); for (int i = 0; i < MaxUnicodeRange; i++) { char c = (char)i; switch (CharUnicodeInfo.GetUnicodeCategory(c)) { case UnicodeCategory.DecimalDigitNumber: // Nd validChars.Add(c); break; default: invalidChars.Add(c); break; } } // \d - we will create strings from valid characters that form \d and make sure that the regex engine catches this. // Build a random string with valid characters and then again invalid regex = new Regex(@"\d+"); validCharLength = 10; invalidCharLength = 15; charCount = validChars.Count; invalidCharCount = invalidChars.Count; for (int i = 0; i < 100; i++) { StringBuilder builder1 = new StringBuilder(); StringBuilder builder2 = new StringBuilder(); for (int j = 0; j < validCharLength; j++) { char c = validChars[random.Next(charCount)]; builder1.Append(c); builder2.Append(c); } for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); string input = builder1.ToString(); Match match = regex.Match(input); Assert.Equal(builder2.ToString(), match.Value); Assert.Equal(0, match.Index); Assert.Equal(validCharLength, match.Length); match = match.NextMatch(); Assert.False(match.Success); } // Build a random string with invalid characters, valid and then again invalid regex = new Regex(@"\d+"); validCharLength = 10; invalidCharLength = 15; charCount = validChars.Count; invalidCharCount = invalidChars.Count; for (int i = 0; i < 100; i++) { StringBuilder builder1 = new StringBuilder(); StringBuilder builder2 = new StringBuilder(); for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); for (int j = 0; j < validCharLength; j++) { char c = validChars[random.Next(charCount)]; builder1.Append(c); builder2.Append(c); } for (int j = 0; j < invalidCharLength; j++) builder1.Append(invalidChars[random.Next(invalidCharCount)]); string input = builder1.ToString(); Match match = regex.Match(input); Assert.True(match.Success); Assert.Equal(builder2.ToString(), match.Value); Assert.Equal(invalidCharLength, match.Index); Assert.Equal(validCharLength, match.Length); match = match.NextMatch(); Assert.False(match.Success); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Linq.Expressions; using Xunit; namespace System.Linq.Tests { public class GroupJoinTests : EnumerableBasedTests { public struct CustomerRec { public string name; public int? custID; } public struct OrderRec { public int? orderID; public int? custID; public int? total; } public struct AnagramRec { public string name; public int? orderID; public int? total; } public struct JoinRec : IEquatable<JoinRec> { public string name; public int?[] orderID; public int?[] total; public override int GetHashCode() { // Not great, but it'll serve. return name.GetHashCode() ^ orderID.Length ^ (total.Length * 31); } public bool Equals(JoinRec other) { if (!string.Equals(name, other.name)) return false; if (orderID == null) { if (other.orderID != null) return false; } else { if (other.orderID == null) return false; if (orderID.Length != other.orderID.Length) return false; for (int i = 0; i != other.orderID.Length; ++i) if (orderID[i] != other.orderID[i]) return false; } if (total == null) { if (other.total != null) return false; } else { if (other.total == null) return false; if (total.Length != other.total.Length) return false; for (int i = 0; i != other.total.Length; ++i) if (total[i] != other.total[i]) return false; } return true; } public override bool Equals(object obj) { return obj is JoinRec && Equals((JoinRec)obj); } } [Fact] public void OuterEmptyInnerNonEmpty() { CustomerRec[] outer = { }; OrderRec[] inner = new [] { new OrderRec{ orderID = 45321, custID = 98022, total = 50 }, new OrderRec{ orderID = 97865, custID = 32103, total = 25 } }; Assert.Empty(outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.custID, e => e.custID, (cr, orIE) => new JoinRec { name = cr.name, orderID = orIE.Select(o => o.orderID).ToArray(), total = orIE.Select(o => o.total).ToArray() })); } [Fact] public void CustomComparer() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new [] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; JoinRec[] expected = new [] { new JoinRec{ name = "Tim", orderID = new int?[]{ 93489 }, total = new int?[]{ 45 } }, new JoinRec{ name = "Bob", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Robert", orderID = new int?[]{ 93483 }, total = new int?[]{ 19 } } }; Assert.Equal(expected, outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void OuterNull() { IQueryable<CustomerRec> outer = null; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("outer", () => outer.GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void InnerNull() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; IQueryable<AnagramRec> inner = null; AssertExtensions.Throws<ArgumentNullException>("inner", () => outer.AsQueryable().GroupJoin(inner, e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void OuterKeySelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("outerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), null, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void InnerKeySelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("innerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, null, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, new AnagramEqualityComparer())); } [Fact] public void ResultSelectorNull() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("resultSelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (Expression<Func<CustomerRec, IEnumerable<AnagramRec>, JoinRec>>)null, new AnagramEqualityComparer())); } [Fact] public void OuterNullNoComparer() { IQueryable<CustomerRec> outer = null; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("outer", () => outer.GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void InnerNullNoComparer() { CustomerRec[] outer = new[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; IQueryable<AnagramRec> inner = null; AssertExtensions.Throws<ArgumentNullException>("inner", () => outer.AsQueryable().GroupJoin(inner, e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void OuterKeySelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("outerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), null, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void InnerKeySelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("innerKeySelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, null, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() })); } [Fact] public void ResultSelectorNullNoComparer() { CustomerRec[] outer = new CustomerRec[] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new AnagramRec[] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; AssertExtensions.Throws<ArgumentNullException>("resultSelector", () => outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (Expression<Func<CustomerRec, IEnumerable<AnagramRec>, JoinRec>>)null)); } [Fact] public void NullComparer() { CustomerRec[] outer = new [] { new CustomerRec{ name = "Tim", custID = 1234 }, new CustomerRec{ name = "Bob", custID = 9865 }, new CustomerRec{ name = "Robert", custID = 9895 } }; AnagramRec[] inner = new [] { new AnagramRec{ name = "Robert", orderID = 93483, total = 19 }, new AnagramRec{ name = "miT", orderID = 93489, total = 45 } }; JoinRec[] expected = new [] { new JoinRec{ name = "Tim", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Bob", orderID = new int?[]{ }, total = new int?[]{ } }, new JoinRec{ name = "Robert", orderID = new int?[]{ 93483 }, total = new int?[]{ 19 } } }; Assert.Equal(expected, outer.AsQueryable().GroupJoin(inner.AsQueryable(), e => e.name, e => e.name, (cr, arIE) => new JoinRec { name = cr.name, orderID = arIE.Select(o => o.orderID).ToArray(), total = arIE.Select(o => o.total).ToArray() }, null)); } [Fact] public void GroupJoin1() { var count = (new int[] { 0, 1, 2 }).AsQueryable().GroupJoin(new int[] { 1, 2, 3 }, n1 => n1, n2 => n2, (n1, n2) => n1).Count(); Assert.Equal(3, count); } [Fact] public void GroupJoin2() { var count = (new int[] { 0, 1, 2 }).AsQueryable().GroupJoin(new int[] { 1, 2, 3 }, n1 => n1, n2 => n2, (n1, n2) => n1, EqualityComparer<int>.Default).Count(); Assert.Equal(3, count); } } }
using System; using NBitcoin.BouncyCastle.Crypto.Utilities; namespace NBitcoin.BouncyCastle.Crypto.Engines { /** * A class that provides CAST6 key encryption operations, * such as encoding data and generating keys. * * All the algorithms herein are from the Internet RFC * * RFC2612 - CAST6 (128bit block, 128-256bit key) * * and implement a simplified cryptography interface. */ public sealed class Cast6Engine : Cast5Engine { //==================================== // Useful constants //==================================== private const int ROUNDS = 12; private const int BLOCK_SIZE = 16; // bytes = 128 bits /* * Put the round and mask keys into an array. * Kr0[i] => _Kr[i*4 + 0] */ private int []_Kr = new int[ROUNDS*4]; // the rotating round key(s) private uint []_Km = new uint[ROUNDS*4]; // the masking round key(s) /* * Key setup */ private int []_Tr = new int[24 * 8]; private uint []_Tm = new uint[24 * 8]; private uint[] _workingKey = new uint[8]; public Cast6Engine() { } public override string AlgorithmName { get { return "CAST6"; } } public override void Reset() { } public override int GetBlockSize() { return BLOCK_SIZE; } //================================== // Private Implementation //================================== /* * Creates the subkeys using the same nomenclature * as described in RFC2612. * * See section 2.4 */ internal override void SetKey( byte[] key) { uint Cm = 0x5a827999; uint Mm = 0x6ed9eba1; int Cr = 19; int Mr = 17; /* * Determine the key size here, if required * * if keysize < 256 bytes, pad with 0 * * Typical key sizes => 128, 160, 192, 224, 256 */ for (int i=0; i< 24; i++) { for (int j=0; j< 8; j++) { _Tm[i*8 + j] = Cm; Cm += Mm; //mod 2^32; _Tr[i*8 + j] = Cr; Cr = (Cr + Mr) & 0x1f; // mod 32 } } byte[] tmpKey = new byte[64]; key.CopyTo(tmpKey, 0); // now create ABCDEFGH for (int i = 0; i < 8; i++) { _workingKey[i] = Pack.BE_To_UInt32(tmpKey, i*4); } // Generate the key schedule for (int i = 0; i < 12; i++) { // KAPPA <- W2i(KAPPA) int i2 = i*2 *8; _workingKey[6] ^= F1(_workingKey[7], _Tm[i2], _Tr[i2]); _workingKey[5] ^= F2(_workingKey[6], _Tm[i2+1], _Tr[i2+1]); _workingKey[4] ^= F3(_workingKey[5], _Tm[i2+2], _Tr[i2+2]); _workingKey[3] ^= F1(_workingKey[4], _Tm[i2+3], _Tr[i2+3]); _workingKey[2] ^= F2(_workingKey[3], _Tm[i2+4], _Tr[i2+4]); _workingKey[1] ^= F3(_workingKey[2], _Tm[i2+5], _Tr[i2+5]); _workingKey[0] ^= F1(_workingKey[1], _Tm[i2+6], _Tr[i2+6]); _workingKey[7] ^= F2(_workingKey[0], _Tm[i2+7], _Tr[i2+7]); // KAPPA <- W2i+1(KAPPA) i2 = (i*2 + 1)*8; _workingKey[6] ^= F1(_workingKey[7], _Tm[i2], _Tr[i2]); _workingKey[5] ^= F2(_workingKey[6], _Tm[i2+1], _Tr[i2+1]); _workingKey[4] ^= F3(_workingKey[5], _Tm[i2+2], _Tr[i2+2]); _workingKey[3] ^= F1(_workingKey[4], _Tm[i2+3], _Tr[i2+3]); _workingKey[2] ^= F2(_workingKey[3], _Tm[i2+4], _Tr[i2+4]); _workingKey[1] ^= F3(_workingKey[2], _Tm[i2+5], _Tr[i2+5]); _workingKey[0] ^= F1(_workingKey[1], _Tm[i2+6], _Tr[i2+6]); _workingKey[7] ^= F2(_workingKey[0], _Tm[i2+7], _Tr[i2+7]); // Kr_(i) <- KAPPA _Kr[i*4] = (int)(_workingKey[0] & 0x1f); _Kr[i*4 + 1] = (int)(_workingKey[2] & 0x1f); _Kr[i*4 + 2] = (int)(_workingKey[4] & 0x1f); _Kr[i*4 + 3] = (int)(_workingKey[6] & 0x1f); // Km_(i) <- KAPPA _Km[i*4] = _workingKey[7]; _Km[i*4 + 1] = _workingKey[5]; _Km[i*4 + 2] = _workingKey[3]; _Km[i*4 + 3] = _workingKey[1]; } } /** * Encrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * * @param src The plaintext buffer * @param srcIndex An offset into src * @param dst The ciphertext buffer * @param dstIndex An offset into dst */ internal override int EncryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { // process the input block // batch the units up into 4x32 bit chunks and go for it uint A = Pack.BE_To_UInt32(src, srcIndex); uint B = Pack.BE_To_UInt32(src, srcIndex + 4); uint C = Pack.BE_To_UInt32(src, srcIndex + 8); uint D = Pack.BE_To_UInt32(src, srcIndex + 12); uint[] result = new uint[4]; CAST_Encipher(A, B, C, D, result); // now stuff them into the destination block Pack.UInt32_To_BE(result[0], dst, dstIndex); Pack.UInt32_To_BE(result[1], dst, dstIndex + 4); Pack.UInt32_To_BE(result[2], dst, dstIndex + 8); Pack.UInt32_To_BE(result[3], dst, dstIndex + 12); return BLOCK_SIZE; } /** * Decrypt the given input starting at the given offset and place * the result in the provided buffer starting at the given offset. * * @param src The plaintext buffer * @param srcIndex An offset into src * @param dst The ciphertext buffer * @param dstIndex An offset into dst */ internal override int DecryptBlock( byte[] src, int srcIndex, byte[] dst, int dstIndex) { // process the input block // batch the units up into 4x32 bit chunks and go for it uint A = Pack.BE_To_UInt32(src, srcIndex); uint B = Pack.BE_To_UInt32(src, srcIndex + 4); uint C = Pack.BE_To_UInt32(src, srcIndex + 8); uint D = Pack.BE_To_UInt32(src, srcIndex + 12); uint[] result = new uint[4]; CAST_Decipher(A, B, C, D, result); // now stuff them into the destination block Pack.UInt32_To_BE(result[0], dst, dstIndex); Pack.UInt32_To_BE(result[1], dst, dstIndex + 4); Pack.UInt32_To_BE(result[2], dst, dstIndex + 8); Pack.UInt32_To_BE(result[3], dst, dstIndex + 12); return BLOCK_SIZE; } /** * Does the 12 quad rounds rounds to encrypt the block. * * @param A the 00-31 bits of the plaintext block * @param B the 32-63 bits of the plaintext block * @param C the 64-95 bits of the plaintext block * @param D the 96-127 bits of the plaintext block * @param result the resulting ciphertext */ private void CAST_Encipher( uint A, uint B, uint C, uint D, uint[] result) { for (int i = 0; i < 6; i++) { int x = i*4; // BETA <- Qi(BETA) C ^= F1(D, _Km[x], _Kr[x]); B ^= F2(C, _Km[x + 1], _Kr[x + 1]); A ^= F3(B, _Km[x + 2], _Kr[x + 2]); D ^= F1(A, _Km[x + 3], _Kr[x + 3]); } for (int i = 6; i < 12; i++) { int x = i*4; // BETA <- QBARi(BETA) D ^= F1(A, _Km[x + 3], _Kr[x + 3]); A ^= F3(B, _Km[x + 2], _Kr[x + 2]); B ^= F2(C, _Km[x + 1], _Kr[x + 1]); C ^= F1(D, _Km[x], _Kr[x]); } result[0] = A; result[1] = B; result[2] = C; result[3] = D; } /** * Does the 12 quad rounds rounds to decrypt the block. * * @param A the 00-31 bits of the ciphertext block * @param B the 32-63 bits of the ciphertext block * @param C the 64-95 bits of the ciphertext block * @param D the 96-127 bits of the ciphertext block * @param result the resulting plaintext */ private void CAST_Decipher( uint A, uint B, uint C, uint D, uint[] result) { for (int i = 0; i < 6; i++) { int x = (11-i)*4; // BETA <- Qi(BETA) C ^= F1(D, _Km[x], _Kr[x]); B ^= F2(C, _Km[x + 1], _Kr[x + 1]); A ^= F3(B, _Km[x + 2], _Kr[x + 2]); D ^= F1(A, _Km[x + 3], _Kr[x + 3]); } for (int i=6; i<12; i++) { int x = (11-i)*4; // BETA <- QBARi(BETA) D ^= F1(A, _Km[x + 3], _Kr[x + 3]); A ^= F3(B, _Km[x + 2], _Kr[x + 2]); B ^= F2(C, _Km[x + 1], _Kr[x + 1]); C ^= F1(D, _Km[x], _Kr[x]); } result[0] = A; result[1] = B; result[2] = C; result[3] = D; } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management.Insights; using Microsoft.Azure.Management.Insights.Models; namespace Microsoft.Azure.Management.Insights { public static partial class MonitoringConfigurationOperationsExtensions { /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse CreateOrUpdateConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri, MonitoringConfigurationCreateOrUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).CreateOrUpdateConfigurationAsync(resourceUri, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> CreateOrUpdateConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri, MonitoringConfigurationCreateOrUpdateParameters parameters) { return operations.CreateOrUpdateConfigurationAsync(resourceUri, parameters, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// The create or update monitoring setting response. It's marked as /// empty. We only pass it to ensure json error handling. /// </returns> public static MonitoringConfigurationCreateOrUpdateResponse CreateOrUpdateStorageConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri, CreateOrUpdateStorageConfigurationParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).CreateOrUpdateStorageConfigurationAsync(resourceUri, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// The create or update monitoring setting response. It's marked as /// empty. We only pass it to ensure json error handling. /// </returns> public static Task<MonitoringConfigurationCreateOrUpdateResponse> CreateOrUpdateStorageConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri, CreateOrUpdateStorageConfigurationParameters parameters) { return operations.CreateOrUpdateStorageConfigurationAsync(resourceUri, parameters, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static MonitoringConfigurationGetResponse GetConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).GetConfigurationAsync(resourceUri); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<MonitoringConfigurationGetResponse> GetConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri) { return operations.GetConfigurationAsync(resourceUri, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static StorageConfigurationGetResponse GetStorageConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).GetStorageConfigurationAsync(resourceUri); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<StorageConfigurationGetResponse> GetStorageConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri) { return operations.GetStorageConfigurationAsync(resourceUri, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier for the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse UpdateConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri, MonitoringConfigurationCreateOrUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).UpdateConfigurationAsync(resourceUri, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier for the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> UpdateConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri, MonitoringConfigurationCreateOrUpdateParameters parameters) { return operations.UpdateConfigurationAsync(resourceUri, parameters, CancellationToken.None); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse UpdateStorageConfiguration(this IMonitoringConfigurationOperations operations, string resourceUri, CreateOrUpdateStorageConfigurationParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IMonitoringConfigurationOperations)s).UpdateStorageConfigurationAsync(resourceUri, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.Insights.IMonitoringConfigurationOperations. /// </param> /// <param name='resourceUri'> /// Required. The resource identifier of the configuration. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the operation. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> UpdateStorageConfigurationAsync(this IMonitoringConfigurationOperations operations, string resourceUri, CreateOrUpdateStorageConfigurationParameters parameters) { return operations.UpdateStorageConfigurationAsync(resourceUri, parameters, CancellationToken.None); } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using FluentAssertions; using Its.Log.Instrumentation; using System.Linq; using System.Threading; using System.Threading.Tasks; using NUnit.Framework; namespace Alluvial.Tests.Distributors { [TestFixture] public abstract class DistributorTests { protected abstract IDistributor<int> CreateDistributor( Func<Lease<int>, Task> onReceive = null, Leasable<int>[] leasables = null, int maxDegreesOfParallelism = 5, string pool = null, TimeSpan? defaultLeaseDuration = null); protected abstract TimeSpan DefaultLeaseDuration { get; } protected abstract TimeSpan ClockDriftTolerance { get; } protected Leasable<int>[] DefaultLeasables; [SetUp] public void SetUp() { DefaultLeasables = Enumerable.Range(1, 10) .Select(i => new Leasable<int>(i, i.ToString())) .ToArray(); } [Test] public async Task When_the_distributor_is_started_then_notifications_begin() { var mre = new AsyncManualResetEvent(); var distributor = CreateDistributor(async lease => { mre.Set(); }); await distributor.Start(); await mre.WaitAsync().Timeout(); await distributor.Stop(); // no TimeoutException, success! } [Test] public async Task When_Stop_has_been_called_then_Distributor_can_be_resumed_using_Start() { var distributor = CreateDistributor(maxDegreesOfParallelism: 10) .ReleaseLeasesWhenWorkIsDone() .Trace(); distributor.OnReceive(async lease => { }); await distributor.Start(); await Task.Delay(20); await distributor.Stop(); Console.WriteLine("\n\n\n STOPPED \n\n\n"); var wasCalled = false; distributor.OnReceive(async lease => wasCalled = true); await distributor.Start(); await Task.Delay(20); await distributor.Stop(); wasCalled.Should().BeTrue(); } [Test] public async Task When_Stop_has_been_called_then_Distributor_can_be_resumed_using_Distribute() { var distributor = CreateDistributor(maxDegreesOfParallelism: 1) .ReleaseLeasesWhenWorkIsDone() .Trace(); distributor.OnReceive(async lease => { }); await distributor.Start(); await Task.Delay(20); await distributor.Stop(); Console.WriteLine("\n\n\n STOPPED \n\n\n"); await Task.Delay((int) (DefaultLeaseDuration.TotalMilliseconds*2)); var wasCalled = false; distributor.OnReceive(async lease => wasCalled = true); await distributor.Distribute(1); await Task.Delay(DefaultLeaseDuration); await distributor.Stop(); wasCalled.Should().BeTrue(); } [Test] public async Task No_further_acquisitions_occur_after_Stop_is_called() { var received = 0; var mre = new AsyncManualResetEvent(); var distributor = CreateDistributor(async lease => { await Task.Delay(1); Interlocked.Increment(ref received); mre.Set(); }); await distributor.Start(); await mre.WaitAsync().Timeout(); await distributor.Stop(); var receivedAsOfStop = received; await Task.Delay((int) DefaultLeaseDuration.TotalMilliseconds*2); received.Should().Be(receivedAsOfStop); } [Test] public async Task Any_given_lease_is_never_handed_out_to_more_than_one_handler_at_a_time() { var random = new Random(); var currentlyGranted = new HashSet<string>(); var everGranted = new HashSet<string>(); var leasedConcurrently = ""; var distributor = CreateDistributor().Trace(); var countDown = new AsyncCountdownEvent(10); distributor.OnReceive(async lease => { lock (currentlyGranted) { if (currentlyGranted.Contains(lease.ResourceName)) { leasedConcurrently = lease.ResourceName; } currentlyGranted.Add(lease.ResourceName); everGranted.Add(lease.ResourceName); } await Task.Delay((int) (1000*random.NextDouble())); lock (currentlyGranted) { currentlyGranted.Remove(lease.ResourceName); } countDown.Signal(); }); Enumerable.Range(1, 10).ToList().ForEach(_ => { distributor.Distribute(1); }); await countDown.WaitAsync().Timeout(); leasedConcurrently.Should().BeEmpty(); everGranted.Count.Should().Be(10); } [Test] public async Task The_least_recently_released_lease_is_granted_next() { foreach (var resource in DefaultLeasables) { resource.LeaseLastGranted = DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(2)); resource.LeaseLastReleased = DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(2)); } var stalestLease = DefaultLeasables.Single(l => l.Name == "5"); stalestLease.LeaseLastGranted = DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(2.1)); stalestLease.LeaseLastReleased = DateTimeOffset.UtcNow.Subtract(TimeSpan.FromMinutes(2.1)); var distributor = CreateDistributor(pool: Guid.NewGuid().ToString()).Trace(); Lease<int> receivedLease = null; distributor.OnReceive(async lease => { receivedLease = lease; }); await distributor.Distribute(1); receivedLease.ResourceName.Should().Be("5"); } [Test] public async Task When_receiver_throws_then_work_distribution_continues() { var received = 0; var distributor = CreateDistributor(defaultLeaseDuration: 1.Seconds()).Trace().ReleaseLeasesWhenWorkIsDone(); var countdown = new AsyncCountdownEvent(20); distributor.OnReceive(async lease => { Interlocked.Increment(ref received); if (received < 10) { throw new Exception("dangit!"); } countdown.Signal(); }); await distributor.Start(); await countdown.WaitAsync().Timeout(); await distributor.Stop(); received.Should().BeGreaterOrEqualTo(20); } [Test] public async Task When_a_lease_expires_because_the_recipient_took_too_long_then_it_is_leased_out_again() { var blocked = false; var receiveCount = 0; var mre = new AsyncManualResetEvent(); var distributor = CreateDistributor( leasables: DefaultLeasables.Take(1).ToArray()) .Trace(); distributor.OnReceive(async lease => { if (!blocked) { blocked = true; await Task.Delay((int) (DefaultLeaseDuration.TotalMilliseconds*4)); } Interlocked.Increment(ref receiveCount); mre.Set(); }); await distributor.Start(); await mre.WaitAsync().Timeout(); await distributor.Stop(); receiveCount.Should().Be(1); } [Test] public virtual async Task A_lease_can_be_extended_using_ExpireIn() { var tally = new ConcurrentDictionary<string, int>(); var pool = DateTimeOffset.UtcNow.Ticks.ToString(); var distributor1 = CreateDistributor(pool: pool) .ReleaseLeasesWhenWorkIsDone() .Trace(); var distributor2 = CreateDistributor(pool: pool) .ReleaseLeasesWhenWorkIsDone() .Trace(); Func<Lease<int>, Task> onReceive = async lease => { tally.AddOrUpdate(lease.ResourceName, addValueFactory: s => 1, updateValueFactory: (s, v) => v + 1); if (lease.ResourceName == "5") { Console.WriteLine($"GOT LEASE 5 @ {DateTime.Now}"); // extend the lease await lease.ExpireIn(TimeSpan.FromHours(2)); // wait longer than the lease would normally last await Task.Delay(5.Seconds()); Console.WriteLine($"DONE LEASE 5@ {DateTime.Now}"); } }; distributor1.OnReceive(onReceive); distributor2.OnReceive(onReceive); await Task.WhenAll( distributor1.Start(), distributor2.Start()); await Task.Delay(2.Seconds()); await Task.WhenAll(distributor1.Stop(), distributor2.Stop()); Console.WriteLine(tally.ToLogString()); tally.Should() .ContainKey("5") .And .Subject["5"].Should().Be(1); } [Test] public async Task A_lease_can_be_shortened_using_ExpireIn() { var receivedCount = 0; var distributor = CreateDistributor( defaultLeaseDuration: 1.Hours()).Trace(); distributor.OnReceive(async lease => { Interlocked.Increment(ref receivedCount); await lease.ExpireIn(2.Milliseconds()); }); var numberOfLeases = DefaultLeasables.Length; await distributor.Distribute(numberOfLeases); await distributor.Distribute(numberOfLeases) .Timeout(5.Seconds()); Console.WriteLine(new {receivedCount}); receivedCount.Should().Be(numberOfLeases * 2); } [Test] public async Task When_Start_is_called_before_OnReceive_it_throws() { var distributor = CreateDistributor(); Action start = () => distributor.Start().Wait(); start.ShouldThrow<InvalidOperationException>() .And .Message .Should() .Contain("You must call OnReceive before starting the distributor"); } [Test] public async Task When_Distribute_is_called_before_OnReceive_it_throws() { var distributor = CreateDistributor(); Action distribute = () => distributor.Distribute(1).Wait(); distribute.ShouldThrow<InvalidOperationException>() .And .Message .Should() .Contain("You must call OnReceive"); } [Test] public async Task Unless_work_is_completed_then_lease_is_not_reissued_before_its_duration_has_passed() { var leasesGranted = new ConcurrentBag<string>(); var distributor = CreateDistributor(async l => { Console.WriteLine("GRANTED: " + l); leasesGranted.Add(l.ResourceName); if (l.ResourceName == "2") { await Task.Delay(((int) DefaultLeaseDuration.TotalMilliseconds*6)); } }); await distributor.Start(); await Task.Delay((int) (DefaultLeaseDuration.TotalMilliseconds*.5)); await distributor.Stop(); await Task.Delay(100); leasesGranted.Should().ContainSingle(l => l == "2"); } [Test] public async Task The_received_lease_LastGranted_property_returns_the_time_of_the_previous_grant() { Lease<int> lease = null; var lastGranted = DateTimeOffset.Parse("8/10/2016"); var distributor = CreateDistributor( async l => lease = l, new[] { new Leasable<int>(1, "the only lease in the pool") { LeaseLastGranted = lastGranted } }); await distributor.Distribute(1); lease.LastGranted .ToUniversalTime() .Should() .BeCloseTo(lastGranted, precision: (int) ClockDriftTolerance.TotalMilliseconds); } [Test] public async Task The_received_lease_LastReleased_property_returns_the_time_of_the_previous_release() { Lease<int> lease = null; var lastReleased = DateTimeOffset.Parse("8/10/2016"); var distributor = CreateDistributor( async l => lease = l, new[] { new Leasable<int>(1, "the only lease in the pool") { LeaseLastReleased = lastReleased } }); await distributor.Distribute(1); lease.LastReleased .ToUniversalTime() .Should() .BeCloseTo(lastReleased, precision: (int) ClockDriftTolerance.TotalMilliseconds); } [Test] public async Task Distribute_returns_a_sequence_containing_the_leases_granted() { var received = new ConcurrentBag<int>(); var distributor = CreateDistributor(async l => { received.Add(l.Resource); }); var returned = (await distributor.Distribute(3)).ToArray(); returned.Length.Should().Be(3); foreach (var leasable in received) { returned.Should().Contain(leasable); } } [Test] public async Task Distributor_rate_can_be_slowed_by_extending_leases_using_ExpireIn() { var receivedLeases = new ConcurrentBag<Lease<int>>(); var distributor = CreateDistributor( async lease => receivedLeases.Add(lease), maxDegreesOfParallelism: 10); distributor.OnReceive(async lease => { await lease.ExpireIn(2.Seconds()); }); await distributor.Start(); await Task.Delay(1.Seconds()); await distributor.Stop(); receivedLeases.Count().Should().Be(10); } [Test] public async Task Distribute_will_not_distribute_more_leases_than_there_are_leasables() { var leasesDistributed = 0; var distributor = CreateDistributor(async lease => { Interlocked.Increment(ref leasesDistributed); }); await distributor.Distribute(10000000); leasesDistributed.Should().Be(DefaultLeasables.Length); } [Test] public async Task A_lease_can_be_continuously_extended_as_work_is_being_done_using_KeepExtendingLeasesWhileWorking() { var distributor = CreateDistributor(defaultLeaseDuration: 1000.Milliseconds()); bool? wasReleased = null; distributor.OnReceive(async (lease, next) => { await Task.Delay(1500.Milliseconds()); wasReleased = lease.IsReleased; }); distributor = distributor .KeepExtendingLeasesWhileWorking(frequency: 600.Milliseconds()) .ReleaseLeasesWhenWorkIsDone(); await distributor.Distribute(1); wasReleased.Should().BeFalse(); } [Test] public async Task When_a_lease_has_been_released_and_ExpireIn_is_called_then_OnException_publishes_an_exception() { // arrange var distributor = CreateDistributor(async lease => { // trigger a lease expiration exception await lease.Release(); await lease.ExpireIn(1.Milliseconds()); }); Exception handledException = null; distributor.OnException((exception, lease) => { handledException = exception; }); // act await distributor.Distribute(1); await Task.Delay(10); // assert handledException.Should().NotBeNull(); } [Test] public async Task When_a_lease_has_expired_and_ExpireIn_is_called_then_OnException_publishes_an_exception() { // arrange var distributor = CreateDistributor(async lease => { // trigger a lease expiration exception await lease.Expiration(); await lease.ExpireIn(1.Milliseconds()); }); Exception handledException = null; distributor.OnException((exception, lease) => { handledException = exception; }); // act await distributor.Distribute(1); await Task.Delay(10); // assert handledException.Should().NotBeNull(); } } }
using System; using System.Collections.Generic; using System.Linq; using CoreGraphics; using Foundation; using UIKit; namespace bugTrapKit { public partial class BtAnnotateImageViewController : BtViewController, IUIViewControllerTransitioningDelegate, IUIPopoverPresentationControllerDelegate { BtImageNavigationController imageNavigationController { get { return (NavigationController as BtAnnotateImageNavigationController)?.ImageNavigationController; } } BtAnnotateImageView AnnotateView { get { return View as BtAnnotateImageView; } } Annotate.Tool currentTool; Annotate.Tool CurrentTool { get { return currentTool; } set { currentTool = value; updateCurrentToolDependencies(); Settings.AnnotateTool = currentTool; } } Annotate.Color currentColor; Annotate.Color CurrentColor { get { return currentColor; } set { currentColor = value; updateCurrentColorDependencies(); Settings.AnnotateTool = currentTool; } } public BtAnnotateImageViewController (IntPtr handle) : base(handle) { } public /*async*/ override void ViewDidLoad () { base.ViewDidLoad(); #if DEBUG // addToolAccessibilityLabels(); // addColorAccessibilityLabels(); #endif ScreenName = GAIKeys.ScreenNames.AnnotateSnapshot; #if !UITEST TrapState.Shared.GetAlbumLocalIdentifier(); #endif // var tracker = TrackerService.Shared.CurrentTracker; ConfigureNavBarButtonsForContext(); CurrentTool = Settings.AnnotateTool; CurrentColor = Settings.AnnotateColor; noImageView.Frame = AnnotateView.Bounds; actionButton.Enabled = false; } public override void ViewWillAppear (bool animated) { base.ViewWillAppear(animated); // you just tap to add callout, so showing & hiding the bars each time is annoying NavigationController.HidesBarsOnTap = CurrentTool != Annotate.Tool.Callout; // initial view controller in the context of an extension if (TrapState.Shared.InActionExtension && NavigationController?.ExtensionContext != null) { TrapState.Shared.SetActiveSnapshotImage(0); AnnotateView.RefreshLayoutAndImage(); } if (!TrapState.Shared.HasActiveSnapshotImageIdentifier) { SaveCurrentSnapshotAndResetState(null, null); } // Test Cloud restores the devices, so add our own image to the device, so we can choose it later #if UITEST var image = UIImage.FromBundle("test_screenshot"); image.SaveToPhotosAlbum((i, e) => { }); #endif } public override void ViewDidAppear (bool animated) { base.ViewDidAppear(animated); undoButton.Enabled = TrapState.Shared.HasActiveSnapshotImageIdentifier; redoButton.Enabled = TrapState.Shared.HasActiveSnapshotImageIdentifier; var hasImage = AnnotateView.IncrementImage != null; actionButton.Enabled = hasImage; if (!hasImage) { UIView.AnimateNotifyAsync(0.7, 1.8, UIViewAnimationOptions.Repeat | UIViewAnimationOptions.Autoreverse | UIViewAnimationOptions.CurveEaseInOut | UIViewAnimationOptions.AllowUserInteraction, () => titleButton.Alpha = titleButton.Alpha < 1 ? 1 : (nfloat)0.4 ); } } public override void ViewDidDisappear (bool animated) { base.ViewDidDisappear(animated); titleButton.Layer.RemoveAllAnimations(); titleButton.Alpha = 1; } async partial void titleClicked (UIButton sender) { titleButton.Layer.RemoveAllAnimations(); titleButton.Alpha = 1; if (imageNavigationController != null) { // this will be null in Action Extension if (AnnotateView.ImageHasChanges) { // has the image been altered? var annotatedImage = AnnotateView.IncrementImage; if (annotatedImage != null) { await TrapState.Shared.UpdateActiveSnapshotImage(annotatedImage); if (TrapState.Shared.InSdk) { NavigationController.DismissViewController(true, null); } else { PresentViewController(imageNavigationController, true, null); } } } else { if (TrapState.Shared.InSdk) { NavigationController.DismissViewController(true, null); } else { PresentViewController(imageNavigationController, true, null); } } } else { if (AnnotateView.ImageHasChanges) { // has the image been altered? var annotatedImage = AnnotateView.IncrementImage; if (annotatedImage != null) { await TrapState.Shared.UpdateActiveSnapshotImage(annotatedImage); PerformSegue("BtPopupImageCollectionViewController", this); } } else { PerformSegue("BtPopupImageCollectionViewController", this); } } } partial void settingsClicked (UIBarButtonItem sender) { (NavigationController as BtAnnotateImageNavigationController)?.PresentSettingsNavController(true); } // this is only called in the context of selecting an image in the BtNewBugDetailsTableViewController async partial void saveClicked (UIBarButtonItem sender) { var annotatedImage = AnnotateView.IncrementImage; if (annotatedImage != null) await TrapState.Shared.UpdateActiveSnapshotImage(annotatedImage); DismissViewController(true, null); } async partial void cancelClicked (UIBarButtonItem sender) { if (TrapState.Shared.InActionExtension && NavigationController?.ExtensionContext != null) { (NavigationController as BtAnnotateImageNavigationController)?.CompleteExtensionForCancel(); } else { await DismissViewControllerAsync(true); TrapState.Shared.DeactivateActiveSnapshotImage(); } } async partial void actionClicked (UIBarButtonItem sender) { var annotatedImage = AnnotateView.IncrementImage; if (annotatedImage != null) { var firstActivityItem = annotatedImage.AsJPEG(1); var applicationActivities = new List<UIActivity> (); // var donedoneActivity = BtTrackerActivity(TrackerType.DoneDone); applicationActivities.Add(new BtTrackerActivity (TrackerType.DoneDone)); applicationActivities.Add(new BtTrackerActivity (TrackerType.PivotalTracker)); applicationActivities.Add(new BtTrackerActivity (TrackerType.JIRA)); var activityViewController = new UIActivityViewController (new [] { firstActivityItem }, applicationActivities.ToArray()); activityViewController.ExcludedActivityTypes = new [] { UIActivityType.PostToWeibo, UIActivityType.AddToReadingList, UIActivityType.PostToVimeo, UIActivityType.PostToTencentWeibo }; // This doesn't get called for the custom activities (BtTrackerActivity) activityViewController.SetCompletionHandler((activityType, completed, returnedItems, error) => { if (completed) { // if (activityType != null) Analytics.Shared.Activity(activityType.ToString(), true); Console.WriteLine("{0} -- Completed", activityType); } else if (error != null) { Console.WriteLine(error.LocalizedDescription); //Log.Error("UIActivityViewController", error.LocalizedDescription); } else if (activityType != null) { Console.WriteLine("{0} -- Cancelled", activityType); // Analytics.Shared.Activity(activityType.ToString(), false); } }); // set the action but as the popover's anchor for ipad if (activityViewController.PopoverPresentationController != null) { activityViewController.PopoverPresentationController.BarButtonItem = actionButton; } // has the image been altered? if (AnnotateView.ImageHasChanges) await TrapState.Shared.UpdateActiveSnapshotImage(annotatedImage); NavigationController.PresentViewController(activityViewController, true, null); } } partial void undoClicked (UIBarButtonItem sender) { if (AnnotateView.AnnotateUndoManager.CanUndo) AnnotateView.AnnotateUndoManager.Undo(); updateUndoRedoButtons(); } partial void redoClicked (UIBarButtonItem sender) { if (AnnotateView.AnnotateUndoManager.CanRedo) AnnotateView.AnnotateUndoManager.Redo(); updateUndoRedoButtons(); } partial void barToolClicked (UIBarButtonItem sender) { if (Convert.ToInt32(sender.Tag) != (int)Annotate.Tool.Color) { CurrentTool = (Annotate.Tool)Convert.ToInt32(sender.Tag); } else { SetToolbarItems(barColors.OrderBy(b => b.Tag).ToArray(), true); } } partial void barColorClicked (UIBarButtonItem sender) { CurrentColor = (Annotate.Color)Convert.ToInt32(sender.Tag / 2); SetToolbarItems(barTools, true); } public async void SaveCurrentSnapshotAndResetState (nint? indexOfActiveSnapshot, string localIdentifierOfActiveSnapshot) { var inExtension = TrapState.Shared.InActionExtension; // one of the two had a value if (inExtension ? indexOfActiveSnapshot.HasValue : !string.IsNullOrEmpty(localIdentifierOfActiveSnapshot)) { if (!TrapState.Shared.HasActiveSnapshotImage) { AnnotateView.ResetContextAndState(); if (inExtension) { TrapState.Shared.SetActiveSnapshotImage(indexOfActiveSnapshot.Value); // refresh to view with the new config / settings AnnotateView.RefreshLayoutAndImage(); } else { await TrapState.Shared.SetActiveSnapshotImageAsync(localIdentifierOfActiveSnapshot); // refresh to view with the new config / settings AnnotateView.RefreshLayoutAndImage(); } // the view was displaying the image at index TrapState.Shared.indexOfActiveSnapshot, so // save the state of that image and annotations, then switch to the index passed to this func // if TrapState.Shared.activeSnapshotImageLocalIdentifier != nil && TrapState.Shared.activeSnapshotImageLocalIdentifier != newLocalIdentifier { } else if (inExtension ? !TrapState.Shared.IsActiveSnapshot(indexOfActiveSnapshot.Value) : !TrapState.Shared.IsActiveSnapshot(localIdentifierOfActiveSnapshot)) { var annotatedImage = AnnotateView.IncrementImage; if (annotatedImage != null) { AnnotateView.ResetContextAndState(); if (inExtension) { TrapState.Shared.SetActiveSnapshotImage(indexOfActiveSnapshot.Value); AnnotateView.RefreshLayoutAndImage(); } else { await TrapState.Shared.SetActiveSnapshotImageAsync(localIdentifierOfActiveSnapshot); AnnotateView.RefreshLayoutAndImage(); } } else { AnnotateView.ResetContextAndState(); } // the view was displaying the image at index indexOfActiveSnapshotCache, so // save the image and annotations, then switch to the index passed to this func (newIndex) } // lose the "No Image" view, we have something to show if (noImageView.IsDescendantOfView(AnnotateView)) noImageView.RemoveFromSuperview(); } else if (TrapState.Shared.HasActiveSnapshotImage) { if (noImageView.IsDescendantOfView(AnnotateView)) noImageView.RemoveFromSuperview(); } else { AnnotateView.ResetContextAndState(); // no image index to display, so let the user know AnnotateView.AddSubview(noImageView); actionButton.Enabled = false; } } void updateUndoRedoButtons () { // undoButton.Enabled = AnnotateView.AnnotateUndoManager.CanUndo; redoButton.Enabled = AnnotateView.AnnotateUndoManager.CanRedo; } void updateCurrentToolDependencies () { AnnotateView.ConfigureTool(CurrentTool); NavigationController.HidesBarsOnTap = CurrentTool != Annotate.Tool.Callout; foreach (var tool in barTools) { if (tool.Tag >= 0 && Convert.ToInt32(tool.Tag) != (int)Annotate.Tool.Color) { var bt = (Annotate.Tool)Convert.ToInt32(tool.Tag); tool.Image = bt == CurrentTool ? bt.imageOn(CurrentColor) : bt.image(CurrentColor); } } } void updateCurrentColorDependencies () { var colorTool = barTools.FirstOrDefault(t => Convert.ToInt32(t.Tag) == (int)Annotate.Tool.Color); if (colorTool != null) colorTool.Image = Annotate.Tool.Color.image(CurrentColor); AnnotateView.ConfigureStroke(CurrentColor.color()); } public override void PrepareForSegue (UIStoryboardSegue segue, NSObject sender) { if (segue.Identifier == "BtPopupImageCollectionViewController") { var popoverController = segue.DestinationViewController as BtPopupImageCollectionViewController; if (popoverController != null) { popoverController.PopoverPresentationController.Delegate = this; popoverController.PopoverPresentationController.SourceView = titleButton; popoverController.PreferredContentSize = new CGSize (300, 142); } } } UIModalPresentationStyle AdaptivePresentationStyleForPresentationController (UIPresentationController controller) { return UIModalPresentationStyle.None; } void ConfigureNavBarButtonsForContext () { // extension uses the 'flipped' theme with white bars and red icons if (TrapState.Shared.InActionExtension) { titleButton.SetImage(UIImage.FromBundle("i_logo_web_red"), UIControlState.Normal); } // selected an existing screenshot from the new bug form - cancel, save var newBugDetailsNavController = NavigationController.PresentingViewController as BtNewBugDetailsNavigationController; if (newBugDetailsNavController != null) { NavigationItem.SetLeftBarButtonItems(new []{ cancelButton, undoButton }, false); NavigationItem.SetRightBarButtonItems(new []{ saveButton, redoButton }, false); } else if (TrapState.Shared.InActionExtension) { NavigationItem.SetLeftBarButtonItems(new []{ cancelButton, undoButton }, false); NavigationItem.SetRightBarButtonItems(new []{ actionButton, redoButton }, false); } else { NavigationItem.SetLeftBarButtonItems(new []{ settingsButton, undoButton }, false); NavigationItem.SetRightBarButtonItems(new []{ actionButton, redoButton }, false); } NavigationItem.TitleView = titleButton; } public override void TraitCollectionDidChange (UITraitCollection previousTraitCollection) { AnnotateView.RefreshLayoutAndImageForRotation(); if (noImageView.IsDescendantOfView(AnnotateView)) { noImageView.Frame = AnnotateView.Bounds; } } public override bool PrefersStatusBarHidden () => true; public override UIStatusBarStyle PreferredStatusBarStyle () => UIStatusBarStyle.LightContent; } }
// /* // * Copyright (c) 2016, Alachisoft. All Rights Reserved. // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // */ using System; using System.Diagnostics; using Alachisoft.NosDB.Common.Util; using Microsoft.Win32; using System.Configuration; namespace Alachisoft.NosDB.Common { /// <summary> /// Utility class to help with common tasks. /// </summary> public class AppUtil { public static readonly string InstallDir = ""; public readonly static string DeployedAssemblyDir = "deploy\\"; static readonly int SLogLevel = 7; private static int _csPort = 9950; static AppUtil() { InstallDir = GetInstallDir(); string logLevel = ConfigurationSettings.AppSettings["EventLogLevel"]; if (!string.IsNullOrEmpty(logLevel)) { logLevel = logLevel.ToLower(); switch (logLevel) { case "error": SLogLevel = 1; break; case "warning": SLogLevel = 3; break; case "all": SLogLevel = 7; break; } } } private static string GetInstallDir() { return GetAppSetting("InstallDir"); } public static string GetAppSetting(string key) { return GetAppSetting("", key); } public static string GetClusterName(bool isLocal) { return isLocal ? "local" : "cluster"; } public static string GetUtilityLogo(string toolName) { string logo = "\n" + @"Alachisoft (R) NosDB Utility {0} Version 1.3.0.0" + "\n" + @"Copyright (C) Alachisoft 2016. All rights reserved."; return string.Format(logo, toolName); } public static string GetAppSetting(string section, string key) { section = RegHelper.ROOT_KEY + section; object tempVal = RegHelper.GetRegValue(section, key, 0); if (!(tempVal is String)) { return Convert.ToString(tempVal); } return (String)tempVal; } public static bool IsRunningAsWow64 { get { return false; } } public static bool IsNew { get { return true; } } public static string EventLogSource { get { return "NosDB"; } } /// <summary> /// Writes an error, warning, information, success audit, or failure audit /// entry with the given message text to the event log. /// </summary> /// <param name="source"></param> /// <param name="msg">The string to write to the event log.</param> /// <param name="type">One of the <c>EventLogEntryType</c> values.</param> /// <param name="category"></param> /// <param name="eventId"></param> public static void LogEvent(string source, string msg, EventLogEntryType type, short category, int eventId) { try { var level = (int)type; if ((level & SLogLevel) == level) { using (var nosdbLog = new EventLog("Application")) { nosdbLog.Source = source; nosdbLog.WriteEntry(msg, type, eventId); } } } catch (Exception) { } } /// <summary> /// Writes an error, warning, information, success audit, or failure audit /// entry with the given message text to the event log. /// </summary> /// <param name="msg">The string to write to the event log.</param> /// <param name="type">One of the <c>EventLogEntryType</c> values.</param> public static void LogEvent(string msg, EventLogEntryType type) { string source= EventLogSource; if (type == EventLogEntryType.Information) LogEvent(source, msg, type, EventCategories.Information, EventID.GeneralInformation); else LogEvent(source, msg, type, EventCategories.Warning, EventID.GeneralError); } /// <summary> /// Returns lg(Log2) of a number. /// </summary> /// <param name="val"></param> /// <returns></returns> public static byte Lg(int val) { byte i = 0; while (val > 1) { val >>= 1; i++; } return i; } /// <summary> /// Store all date time values as a difference to this time /// </summary> private static DateTime _startDt = new DateTime(2004, 12, 31, 0, 0, 0, 0, DateTimeKind.Utc); /// <summary> /// Convert DateTime to integer taking 31-12-2004 as base /// and removing millisecond information /// </summary> /// <param name="dt"></param> /// <returns></returns> public static int DiffSeconds(DateTime dt) { dt = dt.ToUniversalTime(); TimeSpan interval = dt - _startDt; return (int)interval.TotalSeconds; } public static int DiffMilliseconds(DateTime dt) { dt = dt.ToUniversalTime(); TimeSpan interval = dt - _startDt; return interval.Milliseconds; } public static long DiffTicks(DateTime dt) { dt = dt.ToUniversalTime(); TimeSpan interval = dt - _startDt; return interval.Ticks; } /// <summary> /// Convert DateTime to integer taking 31-12-2004 as base /// and removing millisecond information /// </summary> /// <param name="absoluteTime"></param> /// <returns></returns> public static DateTime GetDateTime(int absoluteTime) { var dt = new DateTime(_startDt.Ticks, DateTimeKind.Utc); return dt.AddSeconds(absoluteTime); } /// <summary> /// Checks environment to verify if there is 'Any' version of Visual Studio installed. /// and removing millisecond information /// </summary> public static bool IsVsIdeInstalled() { //Check VS.Net 2005 RegistryKey rKey8 = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Microsoft\\VisualStudio\\8.0"); RegistryKey rKey9 = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Microsoft\\VisualStudio\\9.0"); if (rKey8 != null) { if (rKey8.GetValue("InstallDir", "").ToString().Length != 0) return true; } if (rKey9 != null) { if (rKey9.GetValue("InstallDir", "").ToString().Length != 0) return true; } return false; } /// <summary> /// Hashcode algorithm returning same hash code for both 32bit and 64 bit apps. /// Used for data distribution under por/partitioned topologies. /// </summary> /// <param name="strArg"></param> /// <returns></returns> public static unsafe int GetHashCode(string strArg) { fixed (void* str = strArg) { char* chPtr = (char*)str; int num = 0x15051505; int num2 = num; int* numPtr = (int*)chPtr; for (int i = strArg.Length; i > 0; i -= 4) { num = (((num << 5) + num) + (num >> 0x1b)) ^ numPtr[0]; if (i <= 2) { break; } num2 = (((num2 << 5) + num2) + (num2 >> 0x1b)) ^ numPtr[1]; numPtr += 2; } return (num + (num2 * 0x5d588b65)); } } public static int ConfigurationServerPort { get { try { object v = RegHelper.GetRegValue(RegHelper.ROOT_KEY, "ConfigurationServerPort", 0); if (v != null) { int port = Convert.ToInt32(v); if (port >= System.Net.IPEndPoint.MinPort && port <= System.Net.IPEndPoint.MaxPort) return port; } } catch (FormatException) { } catch (OverflowException) { } return _csPort; } } } }
using System; using System.Collections.Generic; using System.Linq; using Contracts; using CraftingGame.Controllers; using CraftingGame.Physics; using CraftingGame.Widgets; using Terrain; using VectorMath; using CraftingGame.Actions; using CraftingGame.State; using System.IO; namespace CraftingGame { public class PlatformerSceene : Sceene { public const int TerrainDepth = 100; public const int TerrainHeight = 100; public const int TerrainSeaLevel = 80; public const int BlockSize = 30; // Use a fixed update step size. private const float UpdateStep = 1.0f; private readonly IStreamProvider streamProvider; private readonly ActionQueue actionQueue; private readonly ITerrainFactory terrainFactory; private SpriteResolver spriteResolver; private PhysicsEngine physics; private IFont debugFont = null; private InputMask[] inputMasks; // Terrain private ProceduralLevel level; // Widgets private TerrainWidget terrainWidget; private DynamicGridWidget dynamicGridWidget; // Actions private CollectAction collectAction; // Input controllers private FreeCameraController freeCameraController; private PlayerController playerController; public View[] ActiveViews { get; private set; } public Camera FirstCamera => ActiveViews.First().Camera; public IModifiableTerrain Terrain => State.Terrain; public ViewportProjection DisplayView { get; private set; } public Grid Grid { get; } = new Grid(BlockSize, BlockSize); public Plane Plane { get; } = new Plane(0); // Game state public GameState State { get; private set; } private Func<IEnumerable<TerrainSector>> SectorProbe; public PlatformerSceene(IStreamProvider streamProvider, IRenderer renderer, IUserInterface ui, ActionQueue actionQueue) : this(streamProvider, renderer, ui, actionQueue, new TerrainFactory(TerrainDepth, TerrainHeight, TerrainSeaLevel)) { } public PlatformerSceene(IStreamProvider streamProvider, IRenderer renderer, IUserInterface ui, ActionQueue actionQueue, ITerrainFactory terrainFactory) : base("Platformer", renderer, ui, actionQueue) { this.streamProvider = streamProvider; this.actionQueue = actionQueue; this.terrainFactory = terrainFactory; // Load latest game state var loaded = false; if (this.streamProvider.FileExists("state.json")) { loaded = LoadGame(this.streamProvider.ReadFile("state.json")); } if (!loaded) { NewGame(); } } public void NewGame() { var state = GameState.Create(actionQueue, Grid, terrainFactory); ChangeGameState(state); } public bool LoadGame(Stream stream) { try { var state = GameState.LoadFromStream(actionQueue, Grid, terrainFactory, stream); ChangeGameState(state); return true; } catch { } return false; } private void ChangeGameState(GameState state) { State = state; SectorProbe = () => State.Terrain.Sectors; this.level = new ProceduralLevel(State.Terrain, Grid); var displaySize = Renderer.GetDisplaySize(); DisplayView = new ViewportProjection(displaySize); DisplayView.Center(new Vector2(0, 0)); ConfigureSingleScreen(); //View.Scale(2.0f); //renderer.Scale(1, -1); } public override void Exiting() { State.SaveToStream(this.streamProvider.WriteFile("state.json")); } public override void Activate(InputMask uiInput, InputMask[] inputMasks) { base.Activate(uiInput, inputMasks); this.inputMasks = inputMasks; spriteResolver = new SpriteResolver(scope); spriteResolver.ResolveBindings(State.KnownPlayers.ToArray()); debugFont = scope.LoadFont("ConsoleFont"); var proceduralManager = new ProceduralObjectManager(State.Terrain, Grid, Plane); physics = new PhysicsEngine(proceduralManager, UpdateStep); level.Load(Plane); State.AddLevel(level.State); collectAction = new CollectAction(); terrainWidget = new TerrainWidget(Renderer, State.Terrain, debugFont); dynamicGridWidget = new DynamicGridWidget(Renderer, debugFont, BlockSize); freeCameraController = new FreeCameraController(() => FirstCamera); playerController = new PlayerController(State, physics); playerController.PlayerActivated += UpdateCameraFocus; playerController.PlayerDeactivated += UpdateCameraFocus; TransitionToLevel(level.Name); } private void UpdateCameraFocus(object sender, PlayerGameStateEvent args) { if(State.ActivePlayers.Count() <= 1) { ConfigureSingleScreen(); } else { ConfigureSplitScreen(); } } private void ConfigureSingleScreen() { ActiveViews = new[] { new View(DisplayView, new Camera(DisplayView)) }; var player = State.ActivePlayers.FirstOrDefault(); if(player != null) { ActiveViews[0].Camera.Track(player); ActiveViews[0].Camera.Follow(); } } private void ConfigureSplitScreen() { // Configure split screen ActiveViews = DisplayView.SplitVertically().Select(v => new View(v, new Camera(v))).ToArray(); var player = State.ActivePlayers.FirstOrDefault(); if(player != null) { ActiveViews[0].Camera.Track(player); ActiveViews[0].Camera.Follow(); //ActiveViews[0].Viewport.Scale(2.0f); ActiveViews[1].Camera.Track(State.ActivePlayers.Skip(1).First()); ActiveViews[1].Camera.Follow(); //ActiveViews[1].Viewport.Scale(2.0f); } } public override void Update(FrameCounter counter, float timestep) { if (counter.HundredFrame) { JoinPlayers(); } // Update level to generate terrain this.level.Update(); // Camera follows player foreach(var view in ActiveViews) { view.Camera.Update(); } // Handle UI inputs freeCameraController.Update(UiInput); foreach (var player in State.BoundPlayers) { playerController.Update(player); } // Apply physics to crate. var zeroVector = Vector2.Zero; foreach (var crate in State.ActiveLevel.Crates) { physics.ApplyToObject(crate, zeroVector); } // Apply physics to enemies. foreach (var enemy in State.ActiveLevel.Enemies) { enemy.Velocity = new Vector2(enemy.Facing.X * Constants.ENEMY_VELOCITY, 0); physics.ApplyToObject(enemy, zeroVector); } foreach (var projectile in State.ActiveLevel.Projectiles) { physics.ApplyToProjectile(projectile); } State.ActiveLevel.CleanUp(); } private void JoinPlayers() { // Check for new players joining the game var unboundControls = inputMasks.Where(i => i.Bound == false); if (unboundControls.Any()) { foreach(var newPlayer in State.BindPlayers(unboundControls)) { spriteResolver.ResolveBindings(newPlayer); playerController.SpawnPlayer(newPlayer); } /*var playerToFollow = State.BoundPlayers.FirstOrDefault(); if(playerToFollow != null) { Camera.Track(playerToFollow); Camera.Follow(); }*/ } } private void TransitionToLevel(string levelName) { State.SetActiveLevel(levelName); } public override void Prerender(FrameCounter counter, double gameTimeMsec) { Renderer.ResetTransform(); foreach (var splitview in ActiveViews) { var viewport = splitview.Viewport; terrainWidget.Prerender(Grid, viewport, Plane); } } public override void Render(FrameCounter counter, double gameTimeMsec) { Renderer.Clear(Color.Black); foreach (var splitview in ActiveViews) { var viewport = splitview.Viewport; Renderer.Begin(); //Renderer.SetScissorRectangle(splitview.MapToViewport(Vector2.Zero), splitview.DisplaySize); Renderer.SetScissorRectangle(-viewport.Origin, viewport.DisplaySize); Renderer.ResetTransform(); Renderer.Scale(1, -1); // Render terrain terrainWidget.Render(Grid, viewport, Plane); foreach (var obj in State.ActivePlayers.Where(p => !p.Dead)) { RenderObject(viewport, obj); } foreach (var obj in State.ActiveLevel.CollectableObjects) { RenderRectangle(viewport, obj); } /* foreach (var obj in objectManager.RenderOrder) { Renderer.RenderOpagueSprite(obj.SpriteBinding.Object, obj.Position, obj.Size, obj.Facing.X < 0); } foreach (var projectile in State.ActiveLevel.Projectiles) { Renderer.RenderRectangle(projectile.Position, projectile.Size, projectile.Color); } */ dynamicGridWidget.Render(viewport); Renderer.End(); } } private void RenderObject(ViewportProjection view, IRenderableObject obj) { // The renderer expects to get the top left screen pixel and a positive size (after scale) // since we have flipped the y axis, we must correct by giving a negative height size // and add the height to the origin. var origin = new Vector2(obj.Position.X, obj.Position.Y + obj.Size.Y); var size = new Vector2(obj.Size.X, -obj.Size.Y); Renderer.RenderOpagueSprite(obj.SpriteBinding.Object, view.MapToViewport(origin), view.MapSizeToViewport(size), obj.Facing.X < 0); } private void RenderRectangle(ViewportProjection view, IRenderableObject obj) { // The renderer expects to get the top left screen pixel and a positive size (after scale) // since we have flipped the y axis, we must correct by giving a negative height size // and add the height to the origin. var origin = new Vector2(obj.Position.X, obj.Position.Y + obj.Size.Y); var size = new Vector2(obj.Size.X, -obj.Size.Y); Renderer.RenderRectangle(view.MapToViewport(origin), view.MapSizeToViewport(size), obj.Color); } public override string[] DiagnosticsString() { var lines = new List<string>(); var sectors = SectorProbe().ToArray(); lines.Add(string.Format("Views:")); foreach(var view in ActiveViews) { lines.Add(string.Format("{0}", view.Viewport.Projection.TopLeft)); } lines.Add(string.Format("Sectors: {0}/{1}", sectors.Count(s => s.FullyLoaded), sectors.Count())); foreach(var player in State.ActivePlayers) { var itemName = player?.EquipedItem?.Name ?? "None"; var itemQuantity = player?.EquipedItem?.Quantity ?? 0; lines.Add( string.Format( "{0}: {1}, {2} - {3} : {4}", player?.PlayerBinding ?? "", player?.Position ?? Vector2.Zero, player?.Inventory?.TotalCount ?? 0, itemName, itemQuantity)); } return lines.ToArray(); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using Nini.Config; using log4net; using System; using System.Reflection; using System.IO; using System.Net; using System.Text; using System.Text.RegularExpressions; using System.Xml; using System.Xml.Serialization; using System.Collections.Generic; using OpenSim.Server.Base; using OpenSim.Services.Interfaces; using OpenSim.Framework; using OpenSim.Framework.Servers.HttpServer; using OpenMetaverse; namespace OpenSim.Server.Handlers.Authentication { public class AuthenticationServerPostHandler : BaseStreamHandler { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private IAuthenticationService m_AuthenticationService; private bool m_AllowGetAuthInfo = false; private bool m_AllowSetAuthInfo = false; private bool m_AllowSetPassword = false; public AuthenticationServerPostHandler(IAuthenticationService service) : this(service, null) {} public AuthenticationServerPostHandler(IAuthenticationService service, IConfig config) : base("POST", "/auth") { m_AuthenticationService = service; if (config != null) { m_AllowGetAuthInfo = config.GetBoolean("AllowGetAuthInfo", m_AllowGetAuthInfo); m_AllowSetAuthInfo = config.GetBoolean("AllowSetAuthInfo", m_AllowSetAuthInfo); m_AllowSetPassword = config.GetBoolean("AllowSetPassword", m_AllowSetPassword); } } public override byte[] Handle(string path, Stream request, IOSHttpRequest httpRequest, IOSHttpResponse httpResponse) { string[] p = SplitParams(path); if (p.Length > 0) { switch (p[0]) { case "plain": StreamReader sr = new StreamReader(request); string body = sr.ReadToEnd(); sr.Close(); return DoPlainMethods(body); case "crypt": byte[] buffer = new byte[request.Length]; long length = request.Length; if (length > 16384) length = 16384; request.Read(buffer, 0, (int)length); return DoEncryptedMethods(buffer); } } return new byte[0]; } private byte[] DoPlainMethods(string body) { Dictionary<string, object> request = ServerUtils.ParseQueryString(body); int lifetime = 30; if (request.ContainsKey("LIFETIME")) { lifetime = Convert.ToInt32(request["LIFETIME"].ToString()); if (lifetime > 30) lifetime = 30; } if (!request.ContainsKey("METHOD")) return FailureResult(); if (!request.ContainsKey("PRINCIPAL")) return FailureResult(); string method = request["METHOD"].ToString(); UUID principalID; string token; if (!UUID.TryParse(request["PRINCIPAL"].ToString(), out principalID)) return FailureResult(); switch (method) { case "authenticate": if (!request.ContainsKey("PASSWORD")) return FailureResult(); token = m_AuthenticationService.Authenticate(principalID, request["PASSWORD"].ToString(), lifetime); if (token != String.Empty) return SuccessResult(token); return FailureResult(); case "setpassword": if (!m_AllowSetPassword) return FailureResult(); if (!request.ContainsKey("PASSWORD")) return FailureResult(); if (m_AuthenticationService.SetPassword(principalID, request["PASSWORD"].ToString())) return SuccessResult(); else return FailureResult(); case "verify": if (!request.ContainsKey("TOKEN")) return FailureResult(); if (m_AuthenticationService.Verify(principalID, request["TOKEN"].ToString(), lifetime)) return SuccessResult(); return FailureResult(); case "release": if (!request.ContainsKey("TOKEN")) return FailureResult(); if (m_AuthenticationService.Release(principalID, request["TOKEN"].ToString())) return SuccessResult(); return FailureResult(); case "getauthinfo": if (m_AllowGetAuthInfo) return GetAuthInfo(principalID); break; case "setauthinfo": if (m_AllowSetAuthInfo) return SetAuthInfo(principalID, request); break; } return FailureResult(); } private byte[] DoEncryptedMethods(byte[] ciphertext) { return new byte[0]; } private byte[] SuccessResult() { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "Result", ""); result.AppendChild(doc.CreateTextNode("Success")); rootElement.AppendChild(result); return DocToBytes(doc); } byte[] GetAuthInfo(UUID principalID) { AuthInfo info = m_AuthenticationService.GetAuthInfo(principalID); if (info != null) { Dictionary<string, object> result = new Dictionary<string, object>(); result["result"] = info.ToKeyValuePairs(); return ResultToBytes(result); } else { return FailureResult(); } } byte[] SetAuthInfo(UUID principalID, Dictionary<string, object> request) { AuthInfo existingInfo = m_AuthenticationService.GetAuthInfo(principalID); if (existingInfo == null) return FailureResult(); if (request.ContainsKey("AccountType")) existingInfo.AccountType = request["AccountType"].ToString(); if (request.ContainsKey("PasswordHash")) existingInfo.PasswordHash = request["PasswordHash"].ToString(); if (request.ContainsKey("PasswordSalt")) existingInfo.PasswordSalt = request["PasswordSalt"].ToString(); if (request.ContainsKey("WebLoginKey")) existingInfo.WebLoginKey = request["WebLoginKey"].ToString(); if (!m_AuthenticationService.SetAuthInfo(existingInfo)) { m_log.ErrorFormat( "[AUTHENTICATION SERVER POST HANDLER]: Authentication info store failed for account {0} {1} {2}", existingInfo.PrincipalID); return FailureResult(); } return SuccessResult(); } private byte[] FailureResult() { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "Result", ""); result.AppendChild(doc.CreateTextNode("Failure")); rootElement.AppendChild(result); return DocToBytes(doc); } private byte[] SuccessResult(string token) { XmlDocument doc = new XmlDocument(); XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration, "", ""); doc.AppendChild(xmlnode); XmlElement rootElement = doc.CreateElement("", "ServerResponse", ""); doc.AppendChild(rootElement); XmlElement result = doc.CreateElement("", "Result", ""); result.AppendChild(doc.CreateTextNode("Success")); rootElement.AppendChild(result); XmlElement t = doc.CreateElement("", "Token", ""); t.AppendChild(doc.CreateTextNode(token)); rootElement.AppendChild(t); return DocToBytes(doc); } private byte[] DocToBytes(XmlDocument doc) { MemoryStream ms = new MemoryStream(); XmlTextWriter xw = new XmlTextWriter(ms, null); xw.Formatting = Formatting.Indented; doc.WriteTo(xw); xw.Flush(); return ms.GetBuffer(); } private byte[] ResultToBytes(Dictionary<string, object> result) { string xmlString = ServerUtils.BuildXmlResponse(result); return Util.UTF8NoBomEncoding.GetBytes(xmlString); } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ /*************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. This code is licensed under the Visual Studio SDK license terms. THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. ***************************************************************************/ using System; using System.CodeDom; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using EnvDTE; namespace Microsoft.Samples.VisualStudio.CodeDomCodeModel { [ComVisible(true)] [SuppressMessage("Microsoft.Interoperability", "CA1409:ComVisibleTypesShouldBeCreatable")] [SuppressMessage("Microsoft.Interoperability", "CA1405:ComVisibleTypeBaseTypesShouldBeComVisible")] public class CodeDomCodeProperty : CodeDomCodeElement<CodeMemberProperty>, CodeProperty { private CodeClass parent; private CodeFunction getter; private CodeFunction setter; [SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "putName")] [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "0#dte")] public CodeDomCodeProperty(DTE dte, CodeClass parent, string name, string putName, object type, vsCMAccess access) : base(dte, name) { this.parent = parent; CodeMemberProperty prop = new CodeMemberProperty(); prop.Name = name; prop.UserData[CodeKey] = this; CodeObject = prop; prop.Type = CodeDomCodeTypeRef.ToCodeTypeReference(ObjectToTypeRef(type)); prop.Attributes = VSAccessToMemberAccess(access); } public CodeDomCodeProperty(CodeType parent, CodeMemberProperty property) : base((null==parent) ? null : parent.DTE, (null==property) ? null : property.Name) { //!!! need to set parent CodeObject = property; } #region CodeProperty Members public vsCMAccess Access { get { return MemberAccessToVSAccess(CodeObject.Attributes); } [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { CodeObject.Attributes = VSAccessToMemberAccess(value); } } public CodeAttribute AddAttribute(string Name, string Value, object Position) { return AddCustomAttribute(CodeObject.CustomAttributes, Name, Value, Position); } public CodeElements Attributes { get { return GetCustomAttributes(CodeObject.CustomAttributes); } } public string Comment { get { return GetComment(CodeObject.Comments, false); } [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { ReplaceComment(CodeObject.Comments, value, false); CommitChanges(); } } public string DocComment { get { return GetComment(CodeObject.Comments, true); } [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { ReplaceComment(CodeObject.Comments, value, true); CommitChanges(); } } public CodeFunction Getter { get { if (getter == null) { if (CodeObject.HasGet) { getter = new CodeDomCodeFunction(DTE, (CodeElement)parent, "get_" + Name, vsCMFunction.vsCMFunctionPropertyGet, CodeDomCodeTypeRef.FromCodeTypeReference(CodeObject.Type), MemberAccessToVSAccess(CodeObject.Attributes)); } } return getter; } [SuppressMessage("Microsoft.Globalization", "CA1303:DoNotPassLiteralsAsLocalizedParameters", MessageId = "System.ArgumentException.#ctor(System.String)")] [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { CodeDomCodeFunction cdcf = value as CodeDomCodeFunction; if (cdcf == null && value != null) { throw new ArgumentException("value must be CodeDomCodeFunction"); } if (value == null) { CodeObject.HasGet = false; } else { CodeObject.GetStatements.AddRange(cdcf.CodeObject.Statements); CodeObject.HasGet = true; } } } public CodeFunction Setter { get { if (setter == null) { if (CodeObject.HasSet) { setter = new CodeDomCodeFunction(DTE, (CodeElement)parent, "set_" + Name, vsCMFunction.vsCMFunctionPropertyGet, CodeDomCodeTypeRef.FromCodeTypeReference(CodeObject.Type), MemberAccessToVSAccess(CodeObject.Attributes)); } } return setter; } [SuppressMessage("Microsoft.Globalization", "CA1303:DoNotPassLiteralsAsLocalizedParameters", MessageId = "System.ArgumentException.#ctor(System.String)")] [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { CodeDomCodeFunction cdcf = value as CodeDomCodeFunction; if (cdcf == null && value != null) { throw new ArgumentException("value must be CodeDomCodeFunction"); } if (value == null) { CodeObject.HasSet = false; } else { CodeObject.SetStatements.AddRange(cdcf.CodeObject.Statements); CodeObject.HasSet = true; } } } public CodeTypeRef Type { get { return CodeDomCodeTypeRef.FromCodeTypeReference(CodeObject.Type); } [SuppressMessage("Microsoft.Naming", "CA1725:ParameterNamesShouldMatchBaseDeclaration", MessageId = "0#")] set { CodeObject.Type = CodeDomCodeTypeRef.ToCodeTypeReference(value); CommitChanges(); } } public CodeClass Parent { get { return parent; } } public string get_Prototype(int Flags) { throw new NotImplementedException(); } #endregion public override object ParentElement { get { return parent; } } public override CodeElements Children { get { throw new NotImplementedException(); } } public override CodeElements Collection { get { return parent.Children; } } public override string FullName { get { return CodeObject.Name; } } public override vsCMElement Kind { get { return vsCMElement.vsCMElementProperty; } } public override ProjectItem ProjectItem { get { return parent.ProjectItem; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Numerics; namespace System.Security.Cryptography.Asn1 { public sealed partial class AsnWriter { /// <summary> /// Write an Object Identifier with a specified tag. /// </summary> /// <param name="oid">The object identifier to write.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="oid"/> is <c>null</c> /// </exception> /// <exception cref="CryptographicException"> /// <paramref name="oid"/>.<see cref="Oid.Value"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(Oid oid) { if (oid == null) throw new ArgumentNullException(nameof(oid)); CheckDisposed(); if (oid.Value == null) throw new CryptographicException(SR.Argument_InvalidOidValue); WriteObjectIdentifier(oid.Value); } /// <summary> /// Write an Object Identifier with a specified tag. /// </summary> /// <param name="oidValue">The object identifier to write.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="oidValue"/> is <c>null</c> /// </exception> /// <exception cref="CryptographicException"> /// <paramref name="oidValue"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(string oidValue) { if (oidValue == null) throw new ArgumentNullException(nameof(oidValue)); WriteObjectIdentifier(oidValue.AsSpan()); } /// <summary> /// Write an Object Identifier with tag UNIVERSAL 6. /// </summary> /// <param name="oidValue">The object identifier to write.</param> /// <exception cref="CryptographicException"> /// <paramref name="oidValue"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(ReadOnlySpan<char> oidValue) { WriteObjectIdentifierCore(Asn1Tag.ObjectIdentifier, oidValue); } /// <summary> /// Write an Object Identifier with a specified tag. /// </summary> /// <param name="tag">The tag to write.</param> /// <param name="oid">The object identifier to write.</param> /// <exception cref="ArgumentException"> /// <paramref name="tag"/>.<see cref="Asn1Tag.TagClass"/> is /// <see cref="TagClass.Universal"/>, but /// <paramref name="tag"/>.<see cref="Asn1Tag.TagValue"/> is not correct for /// the method /// </exception> /// <exception cref="ArgumentNullException"> /// <paramref name="oid"/> is <c>null</c> /// </exception> /// <exception cref="CryptographicException"> /// <paramref name="oid"/>.<see cref="Oid.Value"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(Asn1Tag tag, Oid oid) { if (oid == null) throw new ArgumentNullException(nameof(oid)); CheckUniversalTag(tag, UniversalTagNumber.ObjectIdentifier); CheckDisposed(); if (oid.Value == null) throw new CryptographicException(SR.Argument_InvalidOidValue); WriteObjectIdentifier(tag, oid.Value); } /// <summary> /// Write an Object Identifier with a specified tag. /// </summary> /// <param name="tag">The tag to write.</param> /// <param name="oidValue">The object identifier to write.</param> /// <exception cref="ArgumentException"> /// <paramref name="tag"/>.<see cref="Asn1Tag.TagClass"/> is /// <see cref="TagClass.Universal"/>, but /// <paramref name="tag"/>.<see cref="Asn1Tag.TagValue"/> is not correct for /// the method /// </exception> /// <exception cref="ArgumentNullException"> /// <paramref name="oidValue"/> is <c>null</c> /// </exception> /// <exception cref="CryptographicException"> /// <paramref name="oidValue"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(Asn1Tag tag, string oidValue) { if (oidValue == null) throw new ArgumentNullException(nameof(oidValue)); WriteObjectIdentifier(tag, oidValue.AsSpan()); } /// <summary> /// Write an Object Identifier with a specified tag. /// </summary> /// <param name="tag">The tag to write.</param> /// <param name="oidValue">The object identifier to write.</param> /// <exception cref="ArgumentException"> /// <paramref name="tag"/>.<see cref="Asn1Tag.TagClass"/> is /// <see cref="TagClass.Universal"/>, but /// <paramref name="tag"/>.<see cref="Asn1Tag.TagValue"/> is not correct for /// the method /// </exception> /// <exception cref="CryptographicException"> /// <paramref name="oidValue"/> is not a valid dotted decimal /// object identifier /// </exception> /// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception> public void WriteObjectIdentifier(Asn1Tag tag, ReadOnlySpan<char> oidValue) { CheckUniversalTag(tag, UniversalTagNumber.ObjectIdentifier); WriteObjectIdentifierCore(tag.AsPrimitive(), oidValue); } // T-REC-X.690-201508 sec 8.19 private void WriteObjectIdentifierCore(Asn1Tag tag, ReadOnlySpan<char> oidValue) { CheckDisposed(); // T-REC-X.690-201508 sec 8.19.4 // The first character is in { 0, 1, 2 }, the second will be a '.', and a third (digit) // will also exist. if (oidValue.Length < 3) throw new CryptographicException(SR.Argument_InvalidOidValue); if (oidValue[1] != '.') throw new CryptographicException(SR.Argument_InvalidOidValue); // The worst case is "1.1.1.1.1", which takes 4 bytes (5 components, with the first two condensed) // Longer numbers get smaller: "2.1.127" is only 2 bytes. (81d (0x51) and 127 (0x7F)) // So length / 2 should prevent any reallocations. var localPool = ArrayPool<byte>.Shared; byte[] tmp = localPool.Rent(oidValue.Length / 2); int tmpOffset = 0; try { int firstComponent; switch (oidValue[0]) { case '0': firstComponent = 0; break; case '1': firstComponent = 1; break; case '2': firstComponent = 2; break; default: throw new CryptographicException(SR.Argument_InvalidOidValue); } // The first two components are special: // ITU X.690 8.19.4: // The numerical value of the first subidentifier is derived from the values of the first two // object identifier components in the object identifier value being encoded, using the formula: // (X*40) + Y // where X is the value of the first object identifier component and Y is the value of the // second object identifier component. // NOTE - This packing of the first two object identifier components recognizes that only // three values are allocated from the root node, and at most 39 subsequent values from // nodes reached by X = 0 and X = 1. // skip firstComponent and the trailing . ReadOnlySpan<char> remaining = oidValue.Slice(2); BigInteger subIdentifier = ParseSubIdentifier(ref remaining); subIdentifier += 40 * firstComponent; int localLen = EncodeSubIdentifier(tmp.AsSpan(tmpOffset), ref subIdentifier); tmpOffset += localLen; while (!remaining.IsEmpty) { subIdentifier = ParseSubIdentifier(ref remaining); localLen = EncodeSubIdentifier(tmp.AsSpan(tmpOffset), ref subIdentifier); tmpOffset += localLen; } Debug.Assert(!tag.IsConstructed); WriteTag(tag); WriteLength(tmpOffset); Buffer.BlockCopy(tmp, 0, _buffer, _offset, tmpOffset); _offset += tmpOffset; } finally { Array.Clear(tmp, 0, tmpOffset); localPool.Return(tmp); } } private static BigInteger ParseSubIdentifier(ref ReadOnlySpan<char> oidValue) { int endIndex = oidValue.IndexOf('.'); if (endIndex == -1) { endIndex = oidValue.Length; } else if (endIndex == 0 || endIndex == oidValue.Length - 1) { throw new CryptographicException(SR.Argument_InvalidOidValue); } // The following code is equivalent to // BigInteger.TryParse(temp, NumberStyles.None, CultureInfo.InvariantCulture, out value) // TODO: Split this for netstandard vs netcoreapp for span-perf?. BigInteger value = BigInteger.Zero; for (int position = 0; position < endIndex; position++) { if (position > 0 && value == 0) { // T-REC X.680-201508 sec 12.26 throw new CryptographicException(SR.Argument_InvalidOidValue); } value *= 10; value += AtoI(oidValue[position]); } oidValue = oidValue.Slice(Math.Min(oidValue.Length, endIndex + 1)); return value; } private static int AtoI(char c) { if (c >= '0' && c <= '9') { return c - '0'; } throw new CryptographicException(SR.Argument_InvalidOidValue); } // ITU-T-X.690-201508 sec 8.19.5 private static int EncodeSubIdentifier(Span<byte> dest, ref BigInteger subIdentifier) { Debug.Assert(dest.Length > 0); if (subIdentifier.IsZero) { dest[0] = 0; return 1; } BigInteger unencoded = subIdentifier; int idx = 0; do { BigInteger cur = unencoded & 0x7F; byte curByte = (byte)cur; if (subIdentifier != unencoded) { curByte |= 0x80; } unencoded >>= 7; dest[idx] = curByte; idx++; } while (unencoded != BigInteger.Zero); Reverse(dest.Slice(0, idx)); return idx; } } }
// lookup.cs // // Copyright 2010 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; namespace Baker.Text { internal enum JsReferenceType { Variable, Function, Constructor } internal sealed class JsLookup : JsExpression, IJsNameReference { public JsVariableField VariableField { get; internal set; } public bool IsGenerated { get; set; } public JsReferenceType RefType { get; set; } public string Name { get; set; } public bool IsAssignment { get { var isAssign = false; // see if our parent is a binary operator. var binaryOp = Parent as JsBinaryOperator; if (binaryOp != null) { // if we are, we are an assignment lookup if the binary operator parent is an assignment // and we are the left-hand side. isAssign = binaryOp.IsAssign && binaryOp.Operand1 == this; } else { // not a binary op -- but we might still be an "assignment" if we are an increment or decrement operator. var unaryOp = Parent as JsUnaryOperator; isAssign = unaryOp != null && (unaryOp.OperatorToken == JsToken.Increment || unaryOp.OperatorToken == JsToken.Decrement); if (!isAssign) { // AND if we are the variable of a for-in statement, we are an "assignment". // (if the forIn variable is a var, then it wouldn't be a lookup, so we don't have to worry about // going up past a var-decl intermediate node) var forIn = Parent as JsForIn; isAssign = forIn != null && this == forIn.Variable; } } return isAssign; } } public JsAstNode AssignmentValue { get { JsAstNode value = null; // see if our parent is a binary operator. var binaryOp = Parent as JsBinaryOperator; if (binaryOp != null) { // the parent is a binary operator. If it is an assignment operator // (not including any of the op-assign which depend on an initial value) // then the value we are assigning is the right-hand side of the = operator. value = binaryOp.OperatorToken == JsToken.Assign && binaryOp.Operand1 == this ? binaryOp.Operand2 : null; } return value; } } public JsLookup(JsContext context, JsParser parser) : base(context, parser) { RefType = JsReferenceType.Variable; } public override void Accept(IJsVisitor visitor) { if (visitor != null) { visitor.Visit(this); } } public override bool IsEquivalentTo(JsAstNode otherNode) { // this one is tricky. If we have a field assigned, then we are equivalent if the // field is the same as the other one. If there is no field, then just check the name var otherLookup = otherNode as JsLookup; if (otherLookup != null) { if (VariableField != null) { // the variable fields should be the same return VariableField.IsSameField(otherLookup.VariableField); } else { // otherwise the names should be identical return string.CompareOrdinal(Name, otherLookup.Name) == 0; } } // if we get here, we're not equivalent return false; } internal override string GetFunctionGuess(JsAstNode target) { // return the source name return Name; } private static bool MatchMemberName(JsAstNode node, string lookup, int startIndex, int endIndex) { // the node needs to be a Member node, and if it is, the appropriate portion of the lookup // string should match the name of the member. var member = node as JsMember; return member != null && string.CompareOrdinal(member.Name, 0, lookup, startIndex, endIndex - startIndex) == 0; } private static bool MatchesMemberChain(JsAstNode parent, string lookup, int startIndex) { // get the NEXT period var period = lookup.IndexOf('.', startIndex); // loop until we run out of periods while (period > 0) { // if the parent isn't a member, or if the name of the parent doesn't match // the current identifier in the chain, then we're no match and can bail if (!MatchMemberName(parent, lookup, startIndex, period)) { return false; } // next parent, next segment, and find the next period parent = parent.Parent; startIndex = period + 1; period = lookup.IndexOf('.', startIndex); } // now check the last segment, from start to the end of the string return MatchMemberName(parent, lookup, startIndex, lookup.Length); } internal override bool IsDebuggerStatement { get { // if we don't want to strip debug statements, then nothing is a debug statement if (Parser.Settings.StripDebugStatements) { // we want to look through the parser's debug lookup list (if there is one) // and see if we match any of the debug lookups specified therein. foreach (var lookup in Parser.DebugLookups) { // see if there's a period in this lookup var firstPeriod = lookup.IndexOf('.'); if (firstPeriod > 0) { // this lookup is a member chain, so check our name against that // first part before the period; if it matches, we need to walk up the parent tree if (string.CompareOrdinal(Name, 0, lookup, 0, firstPeriod) == 0) { // we matched the first one; test the rest of the chain if (MatchesMemberChain(Parent, lookup, firstPeriod + 1)) { return true; } } } else { // just a straight comparison if (string.CompareOrdinal(Name, lookup) == 0) { // we found a match return true; } } } } // if we get here, we didn't find a match return false; } } //code in parser relies on this.name being returned from here public override String ToString() { return Name; } #region INameReference Members public JsActivationObject VariableScope { get { // get the enclosing scope from the node, but that might be // a block scope -- we only want variable scopes: functions or global. // so walk up until we find one. var enclosingScope = this.EnclosingScope; while (enclosingScope is JsBlockScope) { enclosingScope = enclosingScope.Parent; } return enclosingScope; } } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Runtime.InteropServices; using System.Runtime.Serialization; using System.Numerics; using System.Linq; using GlmSharp.Swizzle; // ReSharper disable InconsistentNaming namespace GlmSharp { /// <summary> /// A matrix of type bool with 2 columns and 3 rows. /// </summary> [Serializable] [DataContract(Namespace = "mat")] [StructLayout(LayoutKind.Sequential)] public struct bmat2x3 : IReadOnlyList<bool>, IEquatable<bmat2x3> { #region Fields /// <summary> /// Column 0, Rows 0 /// </summary> [DataMember] public bool m00; /// <summary> /// Column 0, Rows 1 /// </summary> [DataMember] public bool m01; /// <summary> /// Column 0, Rows 2 /// </summary> [DataMember] public bool m02; /// <summary> /// Column 1, Rows 0 /// </summary> [DataMember] public bool m10; /// <summary> /// Column 1, Rows 1 /// </summary> [DataMember] public bool m11; /// <summary> /// Column 1, Rows 2 /// </summary> [DataMember] public bool m12; #endregion #region Constructors /// <summary> /// Component-wise constructor /// </summary> public bmat2x3(bool m00, bool m01, bool m02, bool m10, bool m11, bool m12) { this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m10 = m10; this.m11 = m11; this.m12 = m12; } /// <summary> /// Constructs this matrix from a bmat2. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = false; this.m10 = m.m10; this.m11 = m.m11; this.m12 = false; } /// <summary> /// Constructs this matrix from a bmat3x2. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat3x2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = false; this.m10 = m.m10; this.m11 = m.m11; this.m12 = false; } /// <summary> /// Constructs this matrix from a bmat4x2. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat4x2 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = false; this.m10 = m.m10; this.m11 = m.m11; this.m12 = false; } /// <summary> /// Constructs this matrix from a bmat2x3. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat2x3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a bmat3. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a bmat4x3. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat4x3 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a bmat2x4. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat2x4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a bmat3x4. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat3x4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a bmat4. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bmat4 m) { this.m00 = m.m00; this.m01 = m.m01; this.m02 = m.m02; this.m10 = m.m10; this.m11 = m.m11; this.m12 = m.m12; } /// <summary> /// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bvec2 c0, bvec2 c1) { this.m00 = c0.x; this.m01 = c0.y; this.m02 = false; this.m10 = c1.x; this.m11 = c1.y; this.m12 = false; } /// <summary> /// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix. /// </summary> public bmat2x3(bvec3 c0, bvec3 c1) { this.m00 = c0.x; this.m01 = c0.y; this.m02 = c0.z; this.m10 = c1.x; this.m11 = c1.y; this.m12 = c1.z; } #endregion #region Properties /// <summary> /// Creates a 2D array with all values (address: Values[x, y]) /// </summary> public bool[,] Values => new[,] { { m00, m01, m02 }, { m10, m11, m12 } }; /// <summary> /// Creates a 1D array with all values (internal order) /// </summary> public bool[] Values1D => new[] { m00, m01, m02, m10, m11, m12 }; /// <summary> /// Gets or sets the column nr 0 /// </summary> public bvec3 Column0 { get { return new bvec3(m00, m01, m02); } set { m00 = value.x; m01 = value.y; m02 = value.z; } } /// <summary> /// Gets or sets the column nr 1 /// </summary> public bvec3 Column1 { get { return new bvec3(m10, m11, m12); } set { m10 = value.x; m11 = value.y; m12 = value.z; } } /// <summary> /// Gets or sets the row nr 0 /// </summary> public bvec2 Row0 { get { return new bvec2(m00, m10); } set { m00 = value.x; m10 = value.y; } } /// <summary> /// Gets or sets the row nr 1 /// </summary> public bvec2 Row1 { get { return new bvec2(m01, m11); } set { m01 = value.x; m11 = value.y; } } /// <summary> /// Gets or sets the row nr 2 /// </summary> public bvec2 Row2 { get { return new bvec2(m02, m12); } set { m02 = value.x; m12 = value.y; } } #endregion #region Static Properties /// <summary> /// Predefined all-zero matrix /// </summary> public static bmat2x3 Zero { get; } = new bmat2x3(false, false, false, false, false, false); /// <summary> /// Predefined all-ones matrix /// </summary> public static bmat2x3 Ones { get; } = new bmat2x3(true, true, true, true, true, true); /// <summary> /// Predefined identity matrix /// </summary> public static bmat2x3 Identity { get; } = new bmat2x3(true, false, false, false, true, false); #endregion #region Functions /// <summary> /// Returns an enumerator that iterates through all fields. /// </summary> public IEnumerator<bool> GetEnumerator() { yield return m00; yield return m01; yield return m02; yield return m10; yield return m11; yield return m12; } /// <summary> /// Returns an enumerator that iterates through all fields. /// </summary> IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); #endregion /// <summary> /// Returns the number of Fields (2 x 3 = 6). /// </summary> public int Count => 6; /// <summary> /// Gets/Sets a specific indexed component (a bit slower than direct access). /// </summary> public bool this[int fieldIndex] { get { switch (fieldIndex) { case 0: return m00; case 1: return m01; case 2: return m02; case 3: return m10; case 4: return m11; case 5: return m12; default: throw new ArgumentOutOfRangeException("fieldIndex"); } } set { switch (fieldIndex) { case 0: this.m00 = value; break; case 1: this.m01 = value; break; case 2: this.m02 = value; break; case 3: this.m10 = value; break; case 4: this.m11 = value; break; case 5: this.m12 = value; break; default: throw new ArgumentOutOfRangeException("fieldIndex"); } } } /// <summary> /// Gets/Sets a specific 2D-indexed component (a bit slower than direct access). /// </summary> public bool this[int col, int row] { get { return this[col * 3 + row]; } set { this[col * 3 + row] = value; } } /// <summary> /// Returns true iff this equals rhs component-wise. /// </summary> public bool Equals(bmat2x3 rhs) => (((m00.Equals(rhs.m00) && m01.Equals(rhs.m01)) && m02.Equals(rhs.m02)) && ((m10.Equals(rhs.m10) && m11.Equals(rhs.m11)) && m12.Equals(rhs.m12))); /// <summary> /// Returns true iff this equals rhs type- and component-wise. /// </summary> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; return obj is bmat2x3 && Equals((bmat2x3) obj); } /// <summary> /// Returns true iff this equals rhs component-wise. /// </summary> public static bool operator ==(bmat2x3 lhs, bmat2x3 rhs) => lhs.Equals(rhs); /// <summary> /// Returns true iff this does not equal rhs (component-wise). /// </summary> public static bool operator !=(bmat2x3 lhs, bmat2x3 rhs) => !lhs.Equals(rhs); /// <summary> /// Returns a hash code for this instance. /// </summary> public override int GetHashCode() { unchecked { return ((((((((((m00.GetHashCode()) * 2) ^ m01.GetHashCode()) * 2) ^ m02.GetHashCode()) * 2) ^ m10.GetHashCode()) * 2) ^ m11.GetHashCode()) * 2) ^ m12.GetHashCode(); } } /// <summary> /// Returns a transposed version of this matrix. /// </summary> public bmat3x2 Transposed => new bmat3x2(m00, m10, m01, m11, m02, m12); /// <summary> /// Returns the minimal component of this matrix. /// </summary> public bool MinElement => (((m00 && m01) && m02) && ((m10 && m11) && m12)); /// <summary> /// Returns the maximal component of this matrix. /// </summary> public bool MaxElement => (((m00 || m01) || m02) || ((m10 || m11) || m12)); /// <summary> /// Returns true if all component are true. /// </summary> public bool All => (((m00 && m01) && m02) && ((m10 && m11) && m12)); /// <summary> /// Returns true if any component is true. /// </summary> public bool Any => (((m00 || m01) || m02) || ((m10 || m11) || m12)); /// <summary> /// Executes a component-wise &amp;&amp;. (sorry for different overload but &amp;&amp; cannot be overloaded directly) /// </summary> public static bmat2x3 operator&(bmat2x3 lhs, bmat2x3 rhs) => new bmat2x3(lhs.m00 && rhs.m00, lhs.m01 && rhs.m01, lhs.m02 && rhs.m02, lhs.m10 && rhs.m10, lhs.m11 && rhs.m11, lhs.m12 && rhs.m12); /// <summary> /// Executes a component-wise ||. (sorry for different overload but || cannot be overloaded directly) /// </summary> public static bmat2x3 operator|(bmat2x3 lhs, bmat2x3 rhs) => new bmat2x3(lhs.m00 || rhs.m00, lhs.m01 || rhs.m01, lhs.m02 || rhs.m02, lhs.m10 || rhs.m10, lhs.m11 || rhs.m11, lhs.m12 || rhs.m12); } }
using System; using System.Collections; using System.Drawing; using System.Text.RegularExpressions; namespace GuruComponents.Netrix.UserInterface.StyleParser { /// <summary> /// This is the StyleParser. It parses a stylesheet and creates the style objects from /// the found strings. /// </summary> public class Parser { private static readonly Regex StyleSelectorRegex = new Regex( "\\G(\\s*" + // any leading spaces "(?(selectors){\\s*)" + // if selector was already matched, // match a { and spaces "(?<selectors>[^{]+?)" + // match stylename - chars up to the { "\\s*{\\s*" + // spaces, then the colon, then more spaces "(?<csstext>[^}]+)" + // now match styleval till closing bracket ")*\\s*}?\\s*", // match a trailing } and trailing spaces RegexOptions.Singleline | RegexOptions.Multiline); private static readonly Regex StyleAttribRegex = new Regex( "\\G(\\s*" + // any leading spaces "(?(stylename);\\s*)" + // if stylename was already matched, // match a semicolon and spaces "(?<stylename>[^:]+?)" + // match stylename - chars up to the semicolon "\\s*:\\s*" + // spaces, then the colon, then more spaces "(?<styleval>[^;]+?)" + // now match styleval ")*\\s*;?\\s*$", // match a trailing semicolon and trailing spaces RegexOptions.Singleline | RegexOptions.Multiline); private static readonly Regex rxGroups = new Regex("(.*)\\s+(.*)\\s+(.*)"); private StyleObject so; private ArrayList styles; /// <summary> /// Parses the given string and fires the selector event handler. /// </summary> /// <param name="main">Main class</param> /// <param name="source">Styles to parse</param> public void ParseStylesheet(CssParser main, string source) { if (source != null) { styles = new ArrayList(); Match match; if ((match = StyleSelectorRegex.Match(source, 0)).Success) { do { CaptureCollection selectors = match.Groups["selectors"].Captures; CaptureCollection cssText = match.Groups["csstext"].Captures; for (int i = 0; i < selectors.Count; i++) { so = new StyleObject(); GetStyleObject(cssText[i].ToString()); // create StyleObject from { content } String sn = selectors[i].ToString(); SelectorType st = GetSelectorType(selectors[i].ToString()); // // remove leading signs from names // switch (st) // { // case SelectorType.CLASS_SELECTOR: // case SelectorType.ID_SELECTOR: // sn = sn.Substring(1); // break; // } so.SelectorName = sn; so.SelectorType = st; styles.Add(so); main.OnSelectorReady(so); } match = match.NextMatch(); } while (match.Success); } } } internal IList ParsedStyles { get { return styles; } } private string ReplaceComments(string source) { return source; } private void GetStyleObject(string cssText) { if (cssText != null) { Match match; if ((match = StyleAttribRegex.Match(cssText, 0)).Success) { CaptureCollection stylenames = match.Groups["stylename"].Captures; CaptureCollection stylevalues = match.Groups["styleval"].Captures; for (int i = 0; i < stylenames.Count; i++) { String styleName = stylenames[i].ToString(); String styleValue = stylevalues[i].ToString(); StyleType st = GetStyleType(styleName, styleValue); // do not add style already exists (ignore furthermore ones) if (so.ContainsKey(styleName)) continue; switch (st) { case StyleType.Color: so.Add(styleName, GetStyleColor(styleName, styleValue)); break; case StyleType.List: so.Add(styleName, GetStyleList(styleName, styleValue)); break; case StyleType.Property: so.Add(styleName, GetStyleProperty(styleName, styleValue)); break; case StyleType.Unit: so.Add(styleName, GetStyleUnit(styleName, styleValue)); break; case StyleType.Group: so.Add(styleName, GetStyleGroup(styleName, styleValue)); break; } } } } // end if } // end method private StyleType GetStyleType(string StyleName, string cssAttribute) { // TODO: return fixed type for elements which have only one type of style, then check for others switch (StyleName.ToLower()) { case "background-image": case "list-style-image": return StyleType.Property; case "border-color": case "border-left-color": case "border-right-color": case "border-top-color": case "border-bottom-color": case "color": case "background-color": return StyleType.Color; default: cssAttribute = cssAttribute.Trim(); if (cssAttribute.IndexOf(',') != -1) { // comma sperated is a list return StyleType.List; } else if (cssAttribute.IndexOf(' ') != -1) { // contains a whitespace, maybe list or group if (Util.ExtractNumberString(cssAttribute) != String.Empty) { // contains a number, so it must be a group return StyleType.Group; } else { return StyleType.List; } } else { // property, color or unit; first, check for color ( #XXXXXX or rgb(0,0,0) ) if (cssAttribute.IndexOf('#') == 0 || cssAttribute.StartsWith("rgb") ) { return StyleType.Color; } Color c = Color.FromName(cssAttribute); if (c.A == 0 && c.R == 0 && c.G == 0 && c.B == 0) { // no color, must be property or unit if (Util.ExtractNumberString(cssAttribute) == String.Empty) { // no number, so it must be a property return StyleType.Property; } else { // with number it can be a unit or a simple number if (cssAttribute.Length > Util.ExtractNumberString(cssAttribute).Length) { // more than a number, must be a unit return StyleType.Unit; } else { // only the number, use property to avoid searching for units return StyleType.Property; } } } else { // known color name return StyleType.Color; } } } } private StyleColor GetStyleColor(string styleName, string val) { StyleColor sc = new StyleColor(); Regex rx = new Regex(@"rgb(\s*\d{1,3}\s*,\s*\d{1,3}\s*,\s*\d{1,3}\s*)"); Match m; if ((m = rx.Match(val)).Success) { val = String.Format("{0:X2}{1:X2}:{2:X2}", m.Groups[1], m.Groups[2], m.Groups[3]); } sc.Name = val; return sc; } private StyleList GetStyleList(string styleName, string val) { StyleList sl = new StyleList(); string[] arr = val.Split(new char[] {';', ','}); foreach(string element in arr) { if (element.Trim().Length > 0) { sl.Add(element); } } return sl; } private StyleProperty GetStyleProperty(string styleName, string val) { StyleProperty sp; if (styleName.ToLower().IndexOf("image") > 0 && val.Trim().ToLower().IndexOf("url") == 0) { // remove url() from val; internally we store only the real URL Regex rx = new Regex(@"url\s*\(\s*(.*)\s*\)"); Match m; if ((m = rx.Match(val)).Success) { sp = new StyleProperty(m.Groups[1].Value); } else { sp = new StyleProperty(val); } } else { sp = new StyleProperty(val); } return sp; } private StyleUnit GetStyleUnit(string styleName, string val) { string numString = Util.ExtractNumberString(val); float num = Single.Parse(numString); string unit = val.Substring(numString.Length); StyleUnit su = new StyleUnit(num, unit); return su; } private StyleGroup GetStyleGroup(string styleName, string val) { StyleGroup sg = new StyleGroup(); Match m; if ((m = rxGroups.Match(val)).Success) { for (int i = 0; i < m.Groups.Count; i++) { StyleType st = GetStyleType(styleName, m.Groups[i].Value); switch (st) { case StyleType.Color: sg.Color = GetStyleColor(styleName, m.Groups[i].Value); break; case StyleType.Property: // in groups properties will always appear as lists sg.List = GetStyleList(styleName, m.Groups[i].Value); break; case StyleType.Unit: sg.Unit = GetStyleUnit(styleName, m.Groups[i].Value); break; } } } return sg; } private SelectorType GetSelectorType(string selector) { selector = selector.Trim().ToLower(); if (selector[0] == '#') { return SelectorType.ID_SELECTOR; } if (selector[0] == '.') { return SelectorType.CLASS_SELECTOR; } if (selector.IndexOf('>') != -1) { return SelectorType.CHILD_SELECTOR; } if (selector.IndexOf(',') != -1) { return SelectorType.ANY_NODE_SELECTOR; } if (selector.IndexOf("a:") != -1) { return SelectorType.PSEUDO_CLASS_SELECTOR; } if (selector.IndexOf(':') != -1) { return SelectorType.PSEUDO_ELEMENT_SELECTOR; } if (selector.IndexOf('[') > 0) { return SelectorType.ATTRIBUTE_SELECTOR; } if (selector.IndexOf(' ') > 0) { return SelectorType.DESCENDANT_SELECTOR; } return SelectorType.ELEMENT_NODE_SELECTOR; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsPaging { using System.Threading.Tasks; using Microsoft.Rest.Azure; using Models; /// <summary> /// Extension methods for PagingOperations. /// </summary> public static partial class PagingOperationsExtensions { /// <summary> /// A paging operation that finishes on the first call without a nextlink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetSinglePages(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetSinglePagesAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that finishes on the first call without a nextlink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetSinglePagesAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetSinglePagesWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePages(this IPagingOperations operations, string clientRequestId = default(string), PagingGetMultiplePagesOptions pagingGetMultiplePagesOptions = default(PagingGetMultiplePagesOptions)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesAsync(clientRequestId, pagingGetMultiplePagesOptions), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesAsync(this IPagingOperations operations, string clientRequestId = default(string), PagingGetMultiplePagesOptions pagingGetMultiplePagesOptions = default(PagingGetMultiplePagesOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesWithHttpMessagesAsync(clientRequestId, pagingGetMultiplePagesOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink in odata format that has 10 /// pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetOdataMultiplePagesOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetOdataMultiplePages(this IPagingOperations operations, string clientRequestId = default(string), PagingGetOdataMultiplePagesOptions pagingGetOdataMultiplePagesOptions = default(PagingGetOdataMultiplePagesOptions)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetOdataMultiplePagesAsync(clientRequestId, pagingGetOdataMultiplePagesOptions), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink in odata format that has 10 /// pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetOdataMultiplePagesOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetOdataMultiplePagesAsync(this IPagingOperations operations, string clientRequestId = default(string), PagingGetOdataMultiplePagesOptions pagingGetOdataMultiplePagesOptions = default(PagingGetOdataMultiplePagesOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetOdataMultiplePagesWithHttpMessagesAsync(clientRequestId, pagingGetOdataMultiplePagesOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='pagingGetMultiplePagesWithOffsetOptions'> /// Additional parameters for the operation /// </param> /// <param name='clientRequestId'> /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesWithOffset(this IPagingOperations operations, PagingGetMultiplePagesWithOffsetOptions pagingGetMultiplePagesWithOffsetOptions, string clientRequestId = default(string)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesWithOffsetAsync(pagingGetMultiplePagesWithOffsetOptions, clientRequestId), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='pagingGetMultiplePagesWithOffsetOptions'> /// Additional parameters for the operation /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesWithOffsetAsync(this IPagingOperations operations, PagingGetMultiplePagesWithOffsetOptions pagingGetMultiplePagesWithOffsetOptions, string clientRequestId = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesWithOffsetWithHttpMessagesAsync(pagingGetMultiplePagesWithOffsetOptions, clientRequestId, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that fails on the first call with 500 and then retries /// and then get a response including a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesRetryFirst(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesRetryFirstAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that fails on the first call with 500 and then retries /// and then get a response including a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesRetryFirstAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesRetryFirstWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages, of which /// the 2nd call fails first with 500. The client should retry and finish all /// 10 pages eventually. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesRetrySecond(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesRetrySecondAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages, of which /// the 2nd call fails first with 500. The client should retry and finish all /// 10 pages eventually. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesRetrySecondAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesRetrySecondWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives a 400 on the first call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetSinglePagesFailure(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetSinglePagesFailureAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives a 400 on the first call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetSinglePagesFailureAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetSinglePagesFailureWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives a 400 on the second call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesFailure(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesFailureAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives a 400 on the second call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesFailureAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesFailureWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives an invalid nextLink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesFailureUri(this IPagingOperations operations) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesFailureUriAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives an invalid nextLink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesFailureUriAsync(this IPagingOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesFailureUriWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that doesn't return a full URL, just a fragment /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='apiVersion'> /// Sets the api version to use. /// </param> /// <param name='tenant'> /// Sets the tenant to use. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesFragmentNextLink(this IPagingOperations operations, string apiVersion, string tenant) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesFragmentNextLinkAsync(apiVersion, tenant), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that doesn't return a full URL, just a fragment /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='apiVersion'> /// Sets the api version to use. /// </param> /// <param name='tenant'> /// Sets the tenant to use. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesFragmentNextLinkAsync(this IPagingOperations operations, string apiVersion, string tenant, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesFragmentNextLinkWithHttpMessagesAsync(apiVersion, tenant, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that doesn't return a full URL, just a fragment /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='apiVersion'> /// Sets the api version to use. /// </param> /// <param name='tenant'> /// Sets the tenant to use. /// </param> /// <param name='nextLink'> /// Next link for list operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> NextFragment(this IPagingOperations operations, string apiVersion, string tenant, string nextLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).NextFragmentAsync(apiVersion, tenant, nextLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that doesn't return a full URL, just a fragment /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='apiVersion'> /// Sets the api version to use. /// </param> /// <param name='tenant'> /// Sets the tenant to use. /// </param> /// <param name='nextLink'> /// Next link for list operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> NextFragmentAsync(this IPagingOperations operations, string apiVersion, string tenant, string nextLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.NextFragmentWithHttpMessagesAsync(apiVersion, tenant, nextLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that finishes on the first call without a nextlink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetSinglePagesNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetSinglePagesNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that finishes on the first call without a nextlink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetSinglePagesNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetSinglePagesNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesNext(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetMultiplePagesOptions pagingGetMultiplePagesOptions = default(PagingGetMultiplePagesOptions)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesNextAsync(nextPageLink, clientRequestId, pagingGetMultiplePagesOptions), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesNextAsync(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetMultiplePagesOptions pagingGetMultiplePagesOptions = default(PagingGetMultiplePagesOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesNextWithHttpMessagesAsync(nextPageLink, clientRequestId, pagingGetMultiplePagesOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink in odata format that has 10 /// pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetOdataMultiplePagesOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetOdataMultiplePagesNext(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetOdataMultiplePagesOptions pagingGetOdataMultiplePagesOptions = default(PagingGetOdataMultiplePagesOptions)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetOdataMultiplePagesNextAsync(nextPageLink, clientRequestId, pagingGetOdataMultiplePagesOptions), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink in odata format that has 10 /// pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetOdataMultiplePagesOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetOdataMultiplePagesNextAsync(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetOdataMultiplePagesOptions pagingGetOdataMultiplePagesOptions = default(PagingGetOdataMultiplePagesOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetOdataMultiplePagesNextWithHttpMessagesAsync(nextPageLink, clientRequestId, pagingGetOdataMultiplePagesOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesWithOffsetNextOptions'> /// Additional parameters for the operation /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesWithOffsetNext(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetMultiplePagesWithOffsetNextOptions pagingGetMultiplePagesWithOffsetNextOptions = default(PagingGetMultiplePagesWithOffsetNextOptions)) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesWithOffsetNextAsync(nextPageLink, clientRequestId, pagingGetMultiplePagesWithOffsetNextOptions), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='clientRequestId'> /// </param> /// <param name='pagingGetMultiplePagesWithOffsetNextOptions'> /// Additional parameters for the operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesWithOffsetNextAsync(this IPagingOperations operations, string nextPageLink, string clientRequestId = default(string), PagingGetMultiplePagesWithOffsetNextOptions pagingGetMultiplePagesWithOffsetNextOptions = default(PagingGetMultiplePagesWithOffsetNextOptions), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesWithOffsetNextWithHttpMessagesAsync(nextPageLink, clientRequestId, pagingGetMultiplePagesWithOffsetNextOptions, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that fails on the first call with 500 and then retries /// and then get a response including a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesRetryFirstNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesRetryFirstNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that fails on the first call with 500 and then retries /// and then get a response including a nextLink that has 10 pages /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesRetryFirstNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesRetryFirstNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that includes a nextLink that has 10 pages, of which /// the 2nd call fails first with 500. The client should retry and finish all /// 10 pages eventually. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesRetrySecondNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesRetrySecondNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that includes a nextLink that has 10 pages, of which /// the 2nd call fails first with 500. The client should retry and finish all /// 10 pages eventually. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesRetrySecondNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesRetrySecondNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives a 400 on the first call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetSinglePagesFailureNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetSinglePagesFailureNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives a 400 on the first call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetSinglePagesFailureNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetSinglePagesFailureNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives a 400 on the second call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesFailureNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesFailureNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives a 400 on the second call /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesFailureNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesFailureNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// A paging operation that receives an invalid nextLink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static Microsoft.Rest.Azure.IPage<Product> GetMultiplePagesFailureUriNext(this IPagingOperations operations, string nextPageLink) { return System.Threading.Tasks.Task.Factory.StartNew(s => ((IPagingOperations)s).GetMultiplePagesFailureUriNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// A paging operation that receives an invalid nextLink /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<Microsoft.Rest.Azure.IPage<Product>> GetMultiplePagesFailureUriNextAsync(this IPagingOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { using (var _result = await operations.GetMultiplePagesFailureUriNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using NUnit.Framework; using QuantConnect.Algorithm; using QuantConnect.Configuration; using QuantConnect.Data; using QuantConnect.Interfaces; using QuantConnect.Logging; using QuantConnect.Packets; using QuantConnect.Indicators; using QuantConnect.Tests.Engine.DataFeeds; using Python.Runtime; using QuantConnect.Data.UniverseSelection; using QuantConnect.Lean.Engine.DataFeeds; using QuantConnect.Lean.Engine.HistoricalData; using QuantConnect.Util; namespace QuantConnect.Tests.Algorithm { [TestFixture] public class AlgorithmWarmupTests { private TestWarmupAlgorithm _algorithm; [TearDown] public void TearDown() { Config.Reset(); } [TestCase(Resolution.Tick, SecurityType.Forex)] [TestCase(Resolution.Second, SecurityType.Forex)] [TestCase(Resolution.Hour, SecurityType.Forex)] [TestCase(Resolution.Minute, SecurityType.Forex)] [TestCase(Resolution.Daily, SecurityType.Forex)] [TestCase(Resolution.Tick, SecurityType.Equity)] [TestCase(Resolution.Second, SecurityType.Equity)] [TestCase(Resolution.Hour, SecurityType.Equity)] [TestCase(Resolution.Minute, SecurityType.Equity)] [TestCase(Resolution.Daily, SecurityType.Equity)] [TestCase(Resolution.Minute, SecurityType.Crypto)] [TestCase(Resolution.Daily, SecurityType.Crypto)] public void WarmupDifferentResolutions(Resolution resolution, SecurityType securityType) { _algorithm = TestSetupHandler.TestAlgorithm = new TestWarmupAlgorithm(resolution); _algorithm.SecurityType = securityType; if (securityType == SecurityType.Forex) { _algorithm.StartDateToUse = new DateTime(2014, 05, 03); _algorithm.EndDateToUse = new DateTime(2014, 05, 04); } else if (securityType == SecurityType.Equity) { _algorithm.StartDateToUse = new DateTime(2013, 10, 09); _algorithm.EndDateToUse = new DateTime(2013, 10, 10); } else if (securityType == SecurityType.Crypto) { _algorithm.StartDateToUse = new DateTime(2018, 04, 06); _algorithm.EndDateToUse = new DateTime(2018, 04, 07); } AlgorithmRunner.RunLocalBacktest(nameof(TestWarmupAlgorithm), new Dictionary<string, string> { { "Total Trades", "1" } }, null, Language.CSharp, AlgorithmStatus.Completed, setupHandler: "TestSetupHandler"); int estimateExpectedDataCount; switch (resolution) { case Resolution.Tick: estimateExpectedDataCount = 2 * (securityType == SecurityType.Forex ? 19 : 4) * 60; break; case Resolution.Second: estimateExpectedDataCount = 2 * (securityType == SecurityType.Forex ? 19 : 6) * 60 * 60; break; case Resolution.Minute: estimateExpectedDataCount = 2 * (securityType == SecurityType.Forex ? 19 : 6) * 60; break; case Resolution.Hour: estimateExpectedDataCount = 2 * (securityType == SecurityType.Forex ? 19 : 6); break; case Resolution.Daily: estimateExpectedDataCount = 2; break; default: throw new ArgumentOutOfRangeException(nameof(resolution), resolution, null); } Log.Trace($"WarmUpDataCount: {_algorithm.WarmUpDataCount}. Resolution {resolution}. SecurityType {securityType}"); Assert.GreaterOrEqual(_algorithm.WarmUpDataCount, estimateExpectedDataCount); } [Test] public void WarmUpInternalSubscriptionsHistoryRequest() { var algo = new AlgorithmStub(new MockDataFeed()) { HistoryProvider = new SubscriptionDataReaderHistoryProvider() }; algo.SetStartDate(2013, 10, 08); algo.AddCfd("DE30EUR", Resolution.Second, Market.Oanda); algo.SetWarmup(10); algo.PostInitialize(); algo.OnEndOfTimeStep(); algo.DataManager.UniverseSelection.EnsureCurrencyDataFeeds(SecurityChanges.None); var result = algo.GetWarmupHistoryRequests(); foreach (var historyRequest in result) { Assert.AreEqual(Resolution.Second, historyRequest.Resolution); Assert.AreEqual(TimeSpan.FromSeconds(10), historyRequest.EndTimeUtc - historyRequest.StartTimeUtc); } } [Test] public void WarmUpPythonIndicatorProperly() { var algo = new AlgorithmStub { HistoryProvider = new SubscriptionDataReaderHistoryProvider() }; var zipCacheProvider = new ZipDataCacheProvider(TestGlobals.DataProvider); algo.HistoryProvider.Initialize(new HistoryProviderInitializeParameters( null, null, TestGlobals.DataProvider, zipCacheProvider, TestGlobals.MapFileProvider, TestGlobals.FactorFileProvider, null, false, new DataPermissionManager())); algo.SetStartDate(2013, 10, 08); algo.AddEquity("SPY", Resolution.Minute); // Different types of indicators var indicatorDataPoint = new SimpleMovingAverage("SPY", 10); var indicatorDataBar = new AverageTrueRange("SPY", 10); var indicatorTradeBar = new VolumeWeightedAveragePriceIndicator("SPY", 10); using (Py.GIL()) { var sma = indicatorDataPoint.ToPython(); var atr = indicatorTradeBar.ToPython(); var vwapi = indicatorDataBar.ToPython(); Assert.DoesNotThrow(() => algo.WarmUpIndicator("SPY", sma, Resolution.Minute)); Assert.DoesNotThrow(() => algo.WarmUpIndicator("SPY", atr, Resolution.Minute)); Assert.DoesNotThrow(() => algo.WarmUpIndicator("SPY", vwapi, Resolution.Minute)); var smaIsReady = ((dynamic)sma).IsReady; var atrIsReady = ((dynamic)atr).IsReady; var vwapiIsReady = ((dynamic)vwapi).IsReady; Assert.IsTrue(smaIsReady.IsTrue()); Assert.IsTrue(atrIsReady.IsTrue()); Assert.IsTrue(vwapiIsReady.IsTrue()); } zipCacheProvider.DisposeSafely(); } private class TestSetupHandler : AlgorithmRunner.RegressionSetupHandlerWrapper { public static TestWarmupAlgorithm TestAlgorithm { get; set; } public override IAlgorithm CreateAlgorithmInstance(AlgorithmNodePacket algorithmNodePacket, string assemblyPath) { Algorithm = TestAlgorithm; return Algorithm; } } private class TestWarmupAlgorithm : QCAlgorithm { private readonly Resolution _resolution; private Symbol _symbol; public SecurityType SecurityType { get; set; } public DateTime StartDateToUse { get; set; } public DateTime EndDateToUse { get; set; } public int WarmUpDataCount { get; set; } public TestWarmupAlgorithm(Resolution resolution) { _resolution = resolution; } public override void Initialize() { SetStartDate(StartDateToUse); SetEndDate(EndDateToUse); if (SecurityType == SecurityType.Forex) { SetCash("NZD", 1); _symbol = AddForex("EURUSD", _resolution).Symbol; } else if (SecurityType == SecurityType.Equity) { _symbol = AddEquity("SPY", _resolution).Symbol; } else if (SecurityType == SecurityType.Crypto) { _symbol = AddCrypto("BTCUSD", _resolution).Symbol; } SetWarmUp(TimeSpan.FromDays(2)); } public override void OnData(Slice data) { if (IsWarmingUp) { WarmUpDataCount += data.Count; } else { if (!Portfolio.Invested) { SetHoldings(_symbol, 1); } } } } } }
#region Copyright /*Copyright (C) 2015 Konstantin Udilovich Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #endregion using System; using System.Collections.Generic; using System.Windows.Controls; using Dynamo.Controls; using Dynamo.Models; using Dynamo.Wpf; using ProtoCore.AST.AssociativeAST; using Kodestruct.Common.CalculationLogger; using Kodestruct.Dynamo.Common; using Dynamo.Nodes; using System.Windows.Input; using System.Windows; using System.Xml; using Kodestruct.Dynamo.Common.Infra.TreeItems; using Kodestruct.Dynamo.UI.Views.Analysis.Beam.Flexure; using GalaSoft.MvvmLight.CommandWpf; using Kodestruct.Dynamo.UI.Common.TreeItems; using Dynamo.Graph; using Dynamo.Graph.Nodes; namespace Kodestruct.Analysis.Beam.Flexure { /// <summary> ///Selection of beam load and boundary condition case for force calculation /// </summary> [NodeName("Beam forces case selection")] [NodeCategory("Kodestruct.Analysis.Beam.Flexure")] [NodeDescription("Selection of beam load and boundary condition case for force calculation")] [IsDesignScriptCompatible] public class BeamForceCaseSelection : UiNodeBase { public BeamForceCaseSelection() { ReportEntry=""; BeamForcesCaseId = "C1B_1"; BeamForcesCaseDescription = "Simply supported. Uniform load on full span. Uniformly distributed load"; //OutPortData.Add(new PortData("ReportEntry", "Calculation log entries (for reporting)")); OutPortData.Add(new PortData("BeamForcesCaseId", "Case ID used in calculation of the beam forces")); RegisterAllPorts(); //PropertyChanged += NodePropertyChanged; } /// <summary> /// Gets the type of this class, to be used in base class for reflection /// </summary> protected override Type GetModelType() { return GetType(); } #region Properties #region InputProperties #endregion #region OutputProperties #region BeamForcesCaseIdProperty /// <summary> /// BeamForcesCaseId property /// </summary> /// <value>Case ID used in calculation of the beam forces</value> public string _BeamForcesCaseId; public string BeamForcesCaseId { get { return _BeamForcesCaseId; } set { _BeamForcesCaseId = value; RaisePropertyChanged("BeamForcesCaseId"); OnNodeModified(true); } } #endregion #region BeamForcesCaseDescription Property private string _BeamForcesCaseDescription; public string BeamForcesCaseDescription { get { return _BeamForcesCaseDescription; } set { _BeamForcesCaseDescription = value; RaisePropertyChanged("BeamForcesCaseDescription"); } } #endregion #region ReportEntryProperty /// <summary> /// log property /// </summary> /// <value>Calculation entries that can be converted into a report.</value> public string reportEntry; public string ReportEntry { get { return reportEntry; } set { reportEntry = value; RaisePropertyChanged("ReportEntry"); OnNodeModified(true); } } #endregion #endregion #endregion #region Serialization /// <summary> ///Saves property values to be retained when opening the node /// </summary> protected override void SerializeCore(XmlElement nodeElement, SaveContext context) { base.SerializeCore(nodeElement, context); nodeElement.SetAttribute("BeamForcesCaseId", BeamForcesCaseId); } /// <summary> ///Retrieved property values when opening the node /// </summary> protected override void DeserializeCore(XmlElement nodeElement, SaveContext context) { base.DeserializeCore(nodeElement, context); var attrib = nodeElement.Attributes["BeamForcesCaseId"]; if (attrib == null) return; BeamForcesCaseId = attrib.Value; SetCaseDescription(); } private void SetCaseDescription() { Uri uri = new Uri("pack://application:,,,/KodestructDynamoUI;component/Views/Analysis/Beam/Flexure/BeamForceCaseTreeData.xml"); XmlTreeHelper treeHelper = new XmlTreeHelper(); treeHelper.ExamineXmlTreeFile(uri, new EvaluateXmlNodeDelegate(FindCaseDescription)); } private void FindCaseDescription(XmlNode node) { if (null != node.Attributes["Id"]) { if (node.Attributes["Id"].Value == BeamForcesCaseId) { BeamForcesCaseDescription = node.Attributes["Description"].Value; } } } public void UpdateSelectionEvents() { if (TreeViewControl != null) { TreeViewControl.SelectedItemChanged += OnTreeViewSelectionChanged; } } private void OnTreeViewSelectionChanged(object sender, RoutedPropertyChangedEventArgs<object> e) { OnSelectedItemChanged(e.NewValue); } #endregion #region TreeView elements public TreeView TreeViewControl { get; set; } private ICommand selectedItemChanged; public ICommand SelectedItemChanged { get { selectedItemChanged = new RelayCommand<object>((i) => { OnSelectedItemChanged(i); }); return selectedItemChanged; } } public void DisplayComponentUI(XTreeItem selectedComponent) { } private XTreeItem selectedItem; public XTreeItem SelectedItem { get { return selectedItem; } set { selectedItem = value; } } private void OnSelectedItemChanged(object i) { XmlElement item = i as XmlElement; XTreeItem xtreeItem = new XTreeItem() { Header = item.GetAttribute("Header"), Description = item.GetAttribute("Description"), Id = item.GetAttribute("Id"), ResourcePath = item.GetAttribute("ResourcePath"), Tag = item.GetAttribute("Tag"), TemplateName = item.GetAttribute("TemplateName") }; if (item != null) { string id = xtreeItem.Id; if (id != "X") { BeamForcesCaseId = xtreeItem.Id; SelectedItem = xtreeItem; BeamForcesCaseDescription = xtreeItem.Description; } } } #endregion /// <summary> ///Customization of WPF view in Dynamo UI /// </summary> public class BeamForceCaseSelectionViewCustomization : UiNodeBaseViewCustomization, INodeViewCustomization<BeamForceCaseSelection> { public void CustomizeView(BeamForceCaseSelection model, NodeView nodeView) { base.CustomizeView(model, nodeView); BeamForceCaseView control = new BeamForceCaseView(); control.DataContext = model; //remove this part if control does not contain tree TreeView tv = control.FindName("selectionTree") as TreeView; if (tv!=null) { model.TreeViewControl = tv; model.UpdateSelectionEvents(); } nodeView.inputGrid.Children.Add(control); base.CustomizeView(model, nodeView); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.IO; using System.Text; using System.Diagnostics; using System.Globalization; namespace System.Xml { // This is mostly just a copy of code in SqlTypes.SqlDecimal internal struct BinXmlSqlDecimal { internal byte m_bLen; internal byte m_bPrec; internal byte m_bScale; internal byte m_bSign; internal uint m_data1; internal uint m_data2; internal uint m_data3; internal uint m_data4; public bool IsPositive { get { return (m_bSign == 0); } } private static readonly byte s_NUMERIC_MAX_PRECISION = 38; // Maximum precision of numeric private static readonly byte s_maxPrecision = s_NUMERIC_MAX_PRECISION; // max SS precision private static readonly byte s_maxScale = s_NUMERIC_MAX_PRECISION; // max SS scale private static readonly int s_cNumeMax = 4; private static readonly long s_lInt32Base = ((long)1) << 32; // 2**32 private static readonly ulong s_ulInt32Base = ((ulong)1) << 32; // 2**32 private static readonly ulong s_ulInt32BaseForMod = s_ulInt32Base - 1; // 2**32 - 1 (0xFFF...FF) internal static readonly ulong x_llMax = long.MaxValue; // Max of Int64 //private static readonly uint x_ulBase10 = 10; private static readonly double s_DUINT_BASE = (double)s_lInt32Base; // 2**32 private static readonly double s_DUINT_BASE2 = s_DUINT_BASE * s_DUINT_BASE; // 2**64 private static readonly double s_DUINT_BASE3 = s_DUINT_BASE2 * s_DUINT_BASE; // 2**96 //private static readonly double DMAX_NUME = 1.0e+38; // Max value of numeric //private static readonly uint DBL_DIG = 17; // Max decimal digits of double //private static readonly byte x_cNumeDivScaleMin = 6; // Minimum result scale of numeric division // Array of multipliers for lAdjust and Ceiling/Floor. private static readonly uint[] s_rgulShiftBase = new uint[9] { 10, 10 * 10, 10 * 10 * 10, 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10 }; public BinXmlSqlDecimal(byte[] data, int offset, bool trim) { byte b = data[offset]; switch (b) { case 7: m_bLen = 1; break; case 11: m_bLen = 2; break; case 15: m_bLen = 3; break; case 19: m_bLen = 4; break; default: throw new XmlException(SR.XmlBinary_InvalidSqlDecimal, (string[])null); } m_bPrec = data[offset + 1]; m_bScale = data[offset + 2]; m_bSign = 0 == data[offset + 3] ? (byte)1 : (byte)0; m_data1 = UIntFromByteArray(data, offset + 4); m_data2 = (m_bLen > 1) ? UIntFromByteArray(data, offset + 8) : 0; m_data3 = (m_bLen > 2) ? UIntFromByteArray(data, offset + 12) : 0; m_data4 = (m_bLen > 3) ? UIntFromByteArray(data, offset + 16) : 0; if (m_bLen == 4 && m_data4 == 0) m_bLen = 3; if (m_bLen == 3 && m_data3 == 0) m_bLen = 2; if (m_bLen == 2 && m_data2 == 0) m_bLen = 1; AssertValid(); if (trim) { TrimTrailingZeros(); AssertValid(); } } private static uint UIntFromByteArray(byte[] data, int offset) { int val = (data[offset]) << 0; val |= (data[offset + 1]) << 8; val |= (data[offset + 2]) << 16; val |= (data[offset + 3]) << 24; return unchecked((uint)val); } // Multi-precision one super-digit divide in place. // U = U / D, // R = U % D // Length of U can decrease private static void MpDiv1(uint[] rgulU, // InOut| U ref int ciulU, // InOut| # of digits in U uint iulD, // In | D out uint iulR // Out | R ) { Debug.Assert(rgulU.Length == s_cNumeMax); uint ulCarry = 0; ulong dwlAccum; ulong ulD = (ulong)iulD; int idU = ciulU; Debug.Assert(iulD != 0, "iulD != 0", "Divided by zero!"); Debug.Assert(iulD > 0, "iulD > 0", "Invalid data: less than zero"); Debug.Assert(ciulU > 0, "ciulU > 0", "No data in the array"); while (idU > 0) { idU--; dwlAccum = (((ulong)ulCarry) << 32) + (ulong)(rgulU[idU]); rgulU[idU] = (uint)(dwlAccum / ulD); ulCarry = (uint)(dwlAccum - (ulong)rgulU[idU] * ulD); // (ULONG) (dwlAccum % iulD) } iulR = ulCarry; MpNormalize(rgulU, ref ciulU); } // Normalize multi-precision number - remove leading zeroes private static void MpNormalize(uint[] rgulU, // In | Number ref int ciulU // InOut| # of digits ) { while (ciulU > 1 && rgulU[ciulU - 1] == 0) ciulU--; } //Determine the number of uints needed for a numeric given a precision //Precision Length // 0 invalid // 1-9 1 // 10-19 2 // 20-28 3 // 29-38 4 // The array in Shiloh. Listed here for comparison. //private static readonly byte[] rgCLenFromPrec = new byte[] {5,5,5,5,5,5,5,5,5,9,9,9,9,9, // 9,9,9,9,9,13,13,13,13,13,13,13,13,13,17,17,17,17,17,17,17,17,17,17}; private static readonly byte[] s_rgCLenFromPrec = new byte[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 }; private static byte CLenFromPrec(byte bPrec) { Debug.Assert(bPrec <= s_maxPrecision && bPrec > 0, "bPrec <= MaxPrecision && bPrec > 0", "Invalid numeric precision"); return s_rgCLenFromPrec[bPrec - 1]; } private static char ChFromDigit(uint uiDigit) { Debug.Assert(uiDigit < 10); return (char)(uiDigit + '0'); } public decimal ToDecimal() { if ((int)m_data4 != 0 || m_bScale > 28) throw new XmlException(SR.SqlTypes_ArithOverflow, (string)null); return new decimal((int)m_data1, (int)m_data2, (int)m_data3, !IsPositive, m_bScale); } private void TrimTrailingZeros() { uint[] rgulNumeric = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; int culLen = m_bLen; uint ulRem; //Remainder of a division by x_ulBase10, i.e.,least significant digit // special-case 0 if (culLen == 1 && rgulNumeric[0] == 0) { m_bScale = 0; return; } while (m_bScale > 0 && (culLen > 1 || rgulNumeric[0] != 0)) { MpDiv1(rgulNumeric, ref culLen, 10, out ulRem); if (ulRem == 0) { m_data1 = rgulNumeric[0]; m_data2 = rgulNumeric[1]; m_data3 = rgulNumeric[2]; m_data4 = rgulNumeric[3]; m_bScale--; } else { break; } } if (m_bLen == 4 && m_data4 == 0) m_bLen = 3; if (m_bLen == 3 && m_data3 == 0) m_bLen = 2; if (m_bLen == 2 && m_data2 == 0) m_bLen = 1; } public override string ToString() { AssertValid(); // Make local copy of data to avoid modifying input. uint[] rgulNumeric = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; int culLen = m_bLen; char[] pszTmp = new char[s_NUMERIC_MAX_PRECISION + 1]; //Local Character buffer to hold //the decimal digits, from the //lowest significant to highest significant int iDigits = 0;//Number of significant digits uint ulRem; //Remainder of a division by x_ulBase10, i.e.,least significant digit // Build the final numeric string by inserting the sign, reversing // the order and inserting the decimal number at the correct position //Retrieve each digit from the lowest significant digit while (culLen > 1 || rgulNumeric[0] != 0) { MpDiv1(rgulNumeric, ref culLen, 10, out ulRem); //modulo x_ulBase10 is the lowest significant digit pszTmp[iDigits++] = ChFromDigit(ulRem); } // if scale of the number has not been // reached pad remaining number with zeros. while (iDigits <= m_bScale) { pszTmp[iDigits++] = ChFromDigit(0); } bool fPositive = IsPositive; // Increment the result length if negative (need to add '-') int uiResultLen = fPositive ? iDigits : iDigits + 1; // Increment the result length if scale > 0 (need to add '.') if (m_bScale > 0) uiResultLen++; char[] szResult = new char[uiResultLen]; int iCurChar = 0; if (!fPositive) szResult[iCurChar++] = '-'; while (iDigits > 0) { if (iDigits-- == m_bScale) szResult[iCurChar++] = '.'; szResult[iCurChar++] = pszTmp[iDigits]; } AssertValid(); return new string(szResult); } // Is this RE numeric valid? [System.Diagnostics.Conditional("DEBUG")] private void AssertValid() { // Scale,Prec in range Debug.Assert(m_bScale <= s_NUMERIC_MAX_PRECISION, "m_bScale <= NUMERIC_MAX_PRECISION", "In AssertValid"); Debug.Assert(m_bScale <= m_bPrec, "m_bScale <= m_bPrec", "In AssertValid"); Debug.Assert(m_bScale >= 0, "m_bScale >= 0", "In AssertValid"); Debug.Assert(m_bPrec > 0, "m_bPrec > 0", "In AssertValid"); Debug.Assert(CLenFromPrec(m_bPrec) >= m_bLen, "CLenFromPrec(m_bPrec) >= m_bLen", "In AssertValid"); Debug.Assert(m_bLen <= s_cNumeMax, "m_bLen <= x_cNumeMax", "In AssertValid"); uint[] rglData = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; // highest UI4 is non-0 unless value "zero" if (rglData[m_bLen - 1] == 0) { Debug.Assert(m_bLen == 1, "m_bLen == 1", "In AssertValid"); } // All UI4s from length to end are 0 for (int iulData = m_bLen; iulData < s_cNumeMax; iulData++) Debug.Assert(rglData[iulData] == 0, "rglData[iulData] == 0", "In AssertValid"); } } internal struct BinXmlSqlMoney { private long _data; public BinXmlSqlMoney(int v) { _data = v; } public BinXmlSqlMoney(long v) { _data = v; } public decimal ToDecimal() { bool neg; ulong v; if (_data < 0) { neg = true; v = (ulong)unchecked(-_data); } else { neg = false; v = (ulong)_data; } // SQL Server stores money8 as ticks of 1/10000. const byte MoneyScale = 4; return new decimal(unchecked((int)v), unchecked((int)(v >> 32)), 0, neg, MoneyScale); } public override string ToString() { decimal money = ToDecimal(); // Formatting of SqlMoney: At least two digits after decimal point return money.ToString("#0.00##", CultureInfo.InvariantCulture); } } internal abstract class BinXmlDateTime { private const int MaxFractionDigits = 7; internal static int[] KatmaiTimeScaleMultiplicator = new int[8] { 10000000, 1000000, 100000, 10000, 1000, 100, 10, 1, }; private static void Write2Dig(StringBuilder sb, int val) { Debug.Assert(val >= 0 && val < 100); sb.Append((char)('0' + (val / 10))); sb.Append((char)('0' + (val % 10))); } private static void Write4DigNeg(StringBuilder sb, int val) { Debug.Assert(val > -10000 && val < 10000); if (val < 0) { val = -val; sb.Append('-'); } Write2Dig(sb, val / 100); Write2Dig(sb, val % 100); } private static void Write3Dec(StringBuilder sb, int val) { Debug.Assert(val >= 0 && val < 1000); int c3 = val % 10; val /= 10; int c2 = val % 10; val /= 10; int c1 = val; sb.Append('.'); sb.Append((char)('0' + c1)); sb.Append((char)('0' + c2)); sb.Append((char)('0' + c3)); } private static void WriteDate(StringBuilder sb, int yr, int mnth, int day) { Write4DigNeg(sb, yr); sb.Append('-'); Write2Dig(sb, mnth); sb.Append('-'); Write2Dig(sb, day); } private static void WriteTime(StringBuilder sb, int hr, int min, int sec, int ms) { Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); sb.Append(':'); Write2Dig(sb, sec); if (ms != 0) { Write3Dec(sb, ms); } } private static void WriteTimeFullPrecision(StringBuilder sb, int hr, int min, int sec, int fraction) { Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); sb.Append(':'); Write2Dig(sb, sec); if (fraction != 0) { int fractionDigits = MaxFractionDigits; while (fraction % 10 == 0) { fractionDigits--; fraction /= 10; } char[] charArray = new char[fractionDigits]; while (fractionDigits > 0) { fractionDigits--; charArray[fractionDigits] = (char)(fraction % 10 + '0'); fraction /= 10; } sb.Append('.'); sb.Append(charArray); } } private static void WriteTimeZone(StringBuilder sb, TimeSpan zone) { bool negTimeZone = true; if (zone.Ticks < 0) { negTimeZone = false; zone = zone.Negate(); } WriteTimeZone(sb, negTimeZone, zone.Hours, zone.Minutes); } private static void WriteTimeZone(StringBuilder sb, bool negTimeZone, int hr, int min) { if (hr == 0 && min == 0) { sb.Append('Z'); } else { sb.Append(negTimeZone ? '+' : '-'); Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); } } private static void BreakDownXsdDateTime(long val, out int yr, out int mnth, out int day, out int hr, out int min, out int sec, out int ms) { if (val < 0) goto Error; long date = val / 4; // trim indicator bits ms = (int)(date % 1000); date /= 1000; sec = (int)(date % 60); date /= 60; min = (int)(date % 60); date /= 60; hr = (int)(date % 24); date /= 24; day = (int)(date % 31) + 1; date /= 31; mnth = (int)(date % 12) + 1; date /= 12; yr = (int)(date - 9999); if (yr < -9999 || yr > 9999) goto Error; return; Error: throw new XmlException(SR.SqlTypes_ArithOverflow, (string)null); } private static void BreakDownXsdDate(long val, out int yr, out int mnth, out int day, out bool negTimeZone, out int hr, out int min) { if (val < 0) goto Error; val = val / 4; // trim indicator bits int totalMin = (int)(val % (29 * 60)) - 60 * 14; long totalDays = val / (29 * 60); if (negTimeZone = (totalMin < 0)) totalMin = -totalMin; min = totalMin % 60; hr = totalMin / 60; day = (int)(totalDays % 31) + 1; totalDays /= 31; mnth = (int)(totalDays % 12) + 1; yr = (int)(totalDays / 12) - 9999; if (yr < -9999 || yr > 9999) goto Error; return; Error: throw new XmlException(SR.SqlTypes_ArithOverflow, (string)null); } private static void BreakDownXsdTime(long val, out int hr, out int min, out int sec, out int ms) { if (val < 0) goto Error; val = val / 4; // trim indicator bits ms = (int)(val % 1000); val /= 1000; sec = (int)(val % 60); val /= 60; min = (int)(val % 60); hr = (int)(val / 60); if (0 > hr || hr > 23) goto Error; return; Error: throw new XmlException(SR.SqlTypes_ArithOverflow, (string)null); } public static string XsdDateTimeToString(long val) { int yr; int mnth; int day; int hr; int min; int sec; int ms; BreakDownXsdDateTime(val, out yr, out mnth, out day, out hr, out min, out sec, out ms); StringBuilder sb = new StringBuilder(20); WriteDate(sb, yr, mnth, day); sb.Append('T'); WriteTime(sb, hr, min, sec, ms); sb.Append('Z'); return sb.ToString(); } public static DateTime XsdDateTimeToDateTime(long val) { int yr; int mnth; int day; int hr; int min; int sec; int ms; BreakDownXsdDateTime(val, out yr, out mnth, out day, out hr, out min, out sec, out ms); return new DateTime(yr, mnth, day, hr, min, sec, ms, DateTimeKind.Utc); } public static string XsdDateToString(long val) { int yr; int mnth; int day; int hr; int min; bool negTimeZ; BreakDownXsdDate(val, out yr, out mnth, out day, out negTimeZ, out hr, out min); StringBuilder sb = new StringBuilder(20); WriteDate(sb, yr, mnth, day); WriteTimeZone(sb, negTimeZ, hr, min); return sb.ToString(); } public static DateTime XsdDateToDateTime(long val) { int yr; int mnth; int day; int hr; int min; bool negTimeZ; BreakDownXsdDate(val, out yr, out mnth, out day, out negTimeZ, out hr, out min); DateTime d = new DateTime(yr, mnth, day, 0, 0, 0, DateTimeKind.Utc); // adjust for timezone int adj = (negTimeZ ? -1 : 1) * ((hr * 60) + min); return TimeZoneInfo.ConvertTime(d.AddMinutes(adj), TimeZoneInfo.Local); } public static string XsdTimeToString(long val) { int hr; int min; int sec; int ms; BreakDownXsdTime(val, out hr, out min, out sec, out ms); StringBuilder sb = new StringBuilder(16); WriteTime(sb, hr, min, sec, ms); sb.Append('Z'); return sb.ToString(); } public static DateTime XsdTimeToDateTime(long val) { int hr; int min; int sec; int ms; BreakDownXsdTime(val, out hr, out min, out sec, out ms); return new DateTime(1, 1, 1, hr, min, sec, ms, DateTimeKind.Utc); } public static string SqlDateTimeToString(int dateticks, uint timeticks) { DateTime dateTime = SqlDateTimeToDateTime(dateticks, timeticks); string format = (dateTime.Millisecond != 0) ? "yyyy/MM/dd\\THH:mm:ss.ffff" : "yyyy/MM/dd\\THH:mm:ss"; return dateTime.ToString(format, CultureInfo.InvariantCulture); } public static DateTime SqlDateTimeToDateTime(int dateticks, uint timeticks) { DateTime SQLBaseDate = new DateTime(1900, 1, 1); //long millisecond = (long)(((ulong)timeticks * 20 + (ulong)3) / (ulong)6); long millisecond = (long)(timeticks / s_SQLTicksPerMillisecond + 0.5); return SQLBaseDate.Add(new TimeSpan(dateticks * TimeSpan.TicksPerDay + millisecond * TimeSpan.TicksPerMillisecond)); } // Number of (100ns) ticks per time unit private static readonly double s_SQLTicksPerMillisecond = 0.3; public static readonly int SQLTicksPerSecond = 300; public static readonly int SQLTicksPerMinute = SQLTicksPerSecond * 60; public static readonly int SQLTicksPerHour = SQLTicksPerMinute * 60; private static readonly int s_SQLTicksPerDay = SQLTicksPerHour * 24; public static string SqlSmallDateTimeToString(short dateticks, ushort timeticks) { DateTime dateTime = SqlSmallDateTimeToDateTime(dateticks, timeticks); return dateTime.ToString("yyyy/MM/dd\\THH:mm:ss", CultureInfo.InvariantCulture); } public static DateTime SqlSmallDateTimeToDateTime(short dateticks, ushort timeticks) { return SqlDateTimeToDateTime((int)dateticks, (uint)(timeticks * SQLTicksPerMinute)); } // Conversions of the Katmai date & time types to DateTime public static DateTime XsdKatmaiDateToDateTime(byte[] data, int offset) { // Katmai SQL type "DATE" long dateTicks = GetKatmaiDateTicks(data, ref offset); DateTime dt = new DateTime(dateTicks); return dt; } public static DateTime XsdKatmaiDateTimeToDateTime(byte[] data, int offset) { // Katmai SQL type "DATETIME2" long timeTicks = GetKatmaiTimeTicks(data, ref offset); long dateTicks = GetKatmaiDateTicks(data, ref offset); DateTime dt = new DateTime(dateTicks + timeTicks); return dt; } public static DateTime XsdKatmaiTimeToDateTime(byte[] data, int offset) { // TIME without zone is stored as DATETIME2 return XsdKatmaiDateTimeToDateTime(data, offset); } public static DateTime XsdKatmaiDateOffsetToDateTime(byte[] data, int offset) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiDateOffsetToDateTimeOffset(data, offset).LocalDateTime; } public static DateTime XsdKatmaiDateTimeOffsetToDateTime(byte[] data, int offset) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset).LocalDateTime; } public static DateTime XsdKatmaiTimeOffsetToDateTime(byte[] data, int offset) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiTimeOffsetToDateTimeOffset(data, offset).LocalDateTime; } public static DateTimeOffset XsdKatmaiDateOffsetToDateTimeOffset(byte[] data, int offset) { // DATE with zone is stored as DATETIMEOFFSET return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); } public static DateTimeOffset XsdKatmaiDateTimeOffsetToDateTimeOffset(byte[] data, int offset) { // Katmai SQL type "DATETIMEOFFSET" long timeTicks = GetKatmaiTimeTicks(data, ref offset); long dateTicks = GetKatmaiDateTicks(data, ref offset); long zoneTicks = GetKatmaiTimeZoneTicks(data, offset); // The DATETIMEOFFSET values are serialized in UTC, but DateTimeOffset takes adjusted time -> we need to add zoneTicks DateTimeOffset dto = new DateTimeOffset(dateTicks + timeTicks + zoneTicks, new TimeSpan(zoneTicks)); return dto; } public static DateTimeOffset XsdKatmaiTimeOffsetToDateTimeOffset(byte[] data, int offset) { // TIME with zone is stored as DATETIMEOFFSET return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); } // Conversions of the Katmai date & time types to string public static string XsdKatmaiDateToString(byte[] data, int offset) { DateTime dt = XsdKatmaiDateToDateTime(data, offset); StringBuilder sb = new StringBuilder(10); WriteDate(sb, dt.Year, dt.Month, dt.Day); return sb.ToString(); } public static string XsdKatmaiDateTimeToString(byte[] data, int offset) { DateTime dt = XsdKatmaiDateTimeToDateTime(data, offset); StringBuilder sb = new StringBuilder(33); WriteDate(sb, dt.Year, dt.Month, dt.Day); sb.Append('T'); WriteTimeFullPrecision(sb, dt.Hour, dt.Minute, dt.Second, GetFractions(dt)); return sb.ToString(); } public static string XsdKatmaiTimeToString(byte[] data, int offset) { DateTime dt = XsdKatmaiTimeToDateTime(data, offset); StringBuilder sb = new StringBuilder(16); WriteTimeFullPrecision(sb, dt.Hour, dt.Minute, dt.Second, GetFractions(dt)); return sb.ToString(); } public static string XsdKatmaiDateOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiDateOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(16); WriteDate(sb, dto.Year, dto.Month, dto.Day); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } public static string XsdKatmaiDateTimeOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(39); WriteDate(sb, dto.Year, dto.Month, dto.Day); sb.Append('T'); WriteTimeFullPrecision(sb, dto.Hour, dto.Minute, dto.Second, GetFractions(dto)); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } public static string XsdKatmaiTimeOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiTimeOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(22); WriteTimeFullPrecision(sb, dto.Hour, dto.Minute, dto.Second, GetFractions(dto)); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } // Helper methods for the Katmai date & time types private static long GetKatmaiDateTicks(byte[] data, ref int pos) { int p = pos; pos = p + 3; return (data[p] | data[p + 1] << 8 | data[p + 2] << 16) * TimeSpan.TicksPerDay; } private static long GetKatmaiTimeTicks(byte[] data, ref int pos) { int p = pos; byte scale = data[p]; long timeTicks; p++; if (scale <= 2) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); pos = p + 3; } else if (scale <= 4) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); timeTicks |= ((long)data[p + 3] << 24); pos = p + 4; } else if (scale <= 7) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); timeTicks |= ((long)data[p + 3] << 24) | ((long)data[p + 4] << 32); pos = p + 5; } else { throw new XmlException(SR.SqlTypes_ArithOverflow, (string)null); } return timeTicks * KatmaiTimeScaleMultiplicator[scale]; } private static long GetKatmaiTimeZoneTicks(byte[] data, int pos) { return (short)(data[pos] | data[pos + 1] << 8) * TimeSpan.TicksPerMinute; } private static int GetFractions(DateTime dt) { return (int)(dt.Ticks - new DateTime(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second).Ticks); } private static int GetFractions(DateTimeOffset dt) { return (int)(dt.Ticks - new DateTime(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second).Ticks); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #pragma warning disable CS0067 // events are declared but not used using System.IO; using System.Reflection; using System.Runtime.ExceptionServices; using System.Runtime.Loader; using System.Runtime.Remoting; using System.Security; using System.Security.Permissions; using System.Security.Principal; using System.Threading; namespace System { #if PROJECTN [Internal.Runtime.CompilerServices.RelocatedType("System.Runtime.Extensions")] #endif public sealed partial class AppDomain : MarshalByRefObject { private static readonly AppDomain s_domain = new AppDomain(); private readonly object _forLock = new object(); private IPrincipal _defaultPrincipal; private PrincipalPolicy _principalPolicy = PrincipalPolicy.NoPrincipal; private Func<IPrincipal> s_getWindowsPrincipal; private Func<IPrincipal> s_getUnauthenticatedPrincipal; private AppDomain() { } public static AppDomain CurrentDomain => s_domain; public string BaseDirectory => AppContext.BaseDirectory; public string RelativeSearchPath => null; public AppDomainSetup SetupInformation => new AppDomainSetup(); public PermissionSet PermissionSet => new PermissionSet(PermissionState.Unrestricted); public event UnhandledExceptionEventHandler UnhandledException { add { AppContext.UnhandledException += value; } remove { AppContext.UnhandledException -= value; } } public string DynamicDirectory => null; [ObsoleteAttribute("AppDomain.SetDynamicBase has been deprecated. Please investigate the use of AppDomainSetup.DynamicBase instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void SetDynamicBase(string path) { } public string FriendlyName { get { Assembly assembly = Assembly.GetEntryAssembly(); return assembly != null ? assembly.GetName().Name : "DefaultDomain"; } } public int Id => 1; public bool IsFullyTrusted => true; public bool IsHomogenous => true; public event EventHandler DomainUnload; public event EventHandler<FirstChanceExceptionEventArgs> FirstChanceException { add { AppContext.FirstChanceException += value; } remove { AppContext.FirstChanceException -= value; } } public event EventHandler ProcessExit { add { AppContext.ProcessExit += value; } remove { AppContext.ProcessExit -= value; } } public string ApplyPolicy(string assemblyName) { if (assemblyName == null) { throw new ArgumentNullException(nameof(assemblyName)); } if (assemblyName.Length == 0 || assemblyName[0] == '\0') { throw new ArgumentException(SR.Argument_StringZeroLength, nameof(assemblyName)); } return assemblyName; } public static AppDomain CreateDomain(string friendlyName) { if (friendlyName == null) throw new ArgumentNullException(nameof(friendlyName)); throw new PlatformNotSupportedException(SR.PlatformNotSupported_AppDomains); } public int ExecuteAssembly(string assemblyFile) => ExecuteAssembly(assemblyFile, null); public int ExecuteAssembly(string assemblyFile, string[] args) { if (assemblyFile == null) { throw new ArgumentNullException(nameof(assemblyFile)); } string fullPath = Path.GetFullPath(assemblyFile); Assembly assembly = Assembly.LoadFile(fullPath); return ExecuteAssembly(assembly, args); } public int ExecuteAssembly(string assemblyFile, string[] args, byte[] hashValue, Configuration.Assemblies.AssemblyHashAlgorithm hashAlgorithm) { throw new PlatformNotSupportedException(SR.PlatformNotSupported_CAS); // This api is only meaningful for very specific partial trust/CAS scenarios } private int ExecuteAssembly(Assembly assembly, string[] args) { MethodInfo entry = assembly.EntryPoint; if (entry == null) { throw new MissingMethodException(SR.Arg_EntryPointNotFoundException); } object result = entry.Invoke( obj: null, invokeAttr: BindingFlags.DoNotWrapExceptions, binder: null, parameters: entry.GetParameters().Length > 0 ? new object[] { args } : null, culture: null); return result != null ? (int)result : 0; } public int ExecuteAssemblyByName(AssemblyName assemblyName, params string[] args) => ExecuteAssembly(Assembly.Load(assemblyName), args); public int ExecuteAssemblyByName(string assemblyName) => ExecuteAssemblyByName(assemblyName, null); public int ExecuteAssemblyByName(string assemblyName, params string[] args) => ExecuteAssembly(Assembly.Load(assemblyName), args); public object GetData(string name) => AppContext.GetData(name); public void SetData(string name, object data) => AppContext.SetData(name, data); public bool? IsCompatibilitySwitchSet(string value) { bool result; return AppContext.TryGetSwitch(value, out result) ? result : default(bool?); } public bool IsDefaultAppDomain() => true; public bool IsFinalizingForUnload() => false; public override string ToString() => SR.AppDomain_Name + FriendlyName + Environment.NewLine + SR.AppDomain_NoContextPolicies; public static void Unload(AppDomain domain) { if (domain == null) { throw new ArgumentNullException(nameof(domain)); } throw new CannotUnloadAppDomainException(SR.Arg_PlatformNotSupported); } public Assembly Load(byte[] rawAssembly) => Assembly.Load(rawAssembly); public Assembly Load(byte[] rawAssembly, byte[] rawSymbolStore) => Assembly.Load(rawAssembly, rawSymbolStore); public Assembly Load(AssemblyName assemblyRef) => Assembly.Load(assemblyRef); public Assembly Load(string assemblyString) => Assembly.Load(assemblyString); public Assembly[] ReflectionOnlyGetAssemblies() => Array.Empty<Assembly>(); public static bool MonitoringIsEnabled { get { return false; } set { if (!value) { throw new ArgumentException(SR.Arg_MustBeTrue); } throw new PlatformNotSupportedException(SR.PlatformNotSupported_AppDomain_ResMon); } } public long MonitoringSurvivedMemorySize { get { throw CreateResMonNotAvailException(); } } public static long MonitoringSurvivedProcessMemorySize { get { throw CreateResMonNotAvailException(); } } public long MonitoringTotalAllocatedMemorySize { get { throw CreateResMonNotAvailException(); } } public TimeSpan MonitoringTotalProcessorTime { get { throw CreateResMonNotAvailException(); } } private static Exception CreateResMonNotAvailException() => new InvalidOperationException(SR.PlatformNotSupported_AppDomain_ResMon); [ObsoleteAttribute("AppDomain.GetCurrentThreadId has been deprecated because it does not provide a stable Id when managed threads are running on fibers (aka lightweight threads). To get a stable identifier for a managed thread, use the ManagedThreadId property on Thread. https://go.microsoft.com/fwlink/?linkid=14202", false)] public static int GetCurrentThreadId() => Environment.CurrentManagedThreadId; public bool ShadowCopyFiles => false; [ObsoleteAttribute("AppDomain.AppendPrivatePath has been deprecated. Please investigate the use of AppDomainSetup.PrivateBinPath instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void AppendPrivatePath(string path) { } [ObsoleteAttribute("AppDomain.ClearPrivatePath has been deprecated. Please investigate the use of AppDomainSetup.PrivateBinPath instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void ClearPrivatePath() { } [ObsoleteAttribute("AppDomain.ClearShadowCopyPath has been deprecated. Please investigate the use of AppDomainSetup.ShadowCopyDirectories instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void ClearShadowCopyPath() { } [ObsoleteAttribute("AppDomain.SetCachePath has been deprecated. Please investigate the use of AppDomainSetup.CachePath instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void SetCachePath(string path) { } [ObsoleteAttribute("AppDomain.SetShadowCopyFiles has been deprecated. Please investigate the use of AppDomainSetup.ShadowCopyFiles instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void SetShadowCopyFiles() { } [ObsoleteAttribute("AppDomain.SetShadowCopyPath has been deprecated. Please investigate the use of AppDomainSetup.ShadowCopyDirectories instead. https://go.microsoft.com/fwlink/?linkid=14202")] public void SetShadowCopyPath(string path) { } public Assembly[] GetAssemblies() => AssemblyLoadContext.GetLoadedAssemblies(); public event AssemblyLoadEventHandler AssemblyLoad { add { AssemblyLoadContext.AssemblyLoad += value; } remove { AssemblyLoadContext.AssemblyLoad -= value; } } public event ResolveEventHandler AssemblyResolve { add { AssemblyLoadContext.AssemblyResolve += value; } remove { AssemblyLoadContext.AssemblyResolve -= value; } } public event ResolveEventHandler ReflectionOnlyAssemblyResolve; public event ResolveEventHandler TypeResolve { add { AssemblyLoadContext.TypeResolve += value; } remove { AssemblyLoadContext.TypeResolve -= value; } } public event ResolveEventHandler ResourceResolve { add { AssemblyLoadContext.ResourceResolve += value; } remove { AssemblyLoadContext.ResourceResolve -= value; } } public void SetPrincipalPolicy(PrincipalPolicy policy) { _principalPolicy = policy; } public void SetThreadPrincipal(IPrincipal principal) { if (principal == null) { throw new ArgumentNullException(nameof(principal)); } lock (_forLock) { // Check that principal has not been set previously. if (_defaultPrincipal != null) { throw new SystemException(SR.AppDomain_Policy_PrincipalTwice); } _defaultPrincipal = principal; } } public ObjectHandle CreateInstance(string assemblyName, string typeName) { if (assemblyName == null) { throw new ArgumentNullException(nameof(assemblyName)); } return Activator.CreateInstance(assemblyName, typeName); } public ObjectHandle CreateInstance(string assemblyName, string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder binder, object[] args, System.Globalization.CultureInfo culture, object[] activationAttributes) { if (assemblyName == null) { throw new ArgumentNullException(nameof(assemblyName)); } return Activator.CreateInstance(assemblyName, typeName, ignoreCase, bindingAttr, binder, args, culture, activationAttributes); } public ObjectHandle CreateInstance(string assemblyName, string typeName, object[] activationAttributes) { if (assemblyName == null) { throw new ArgumentNullException(nameof(assemblyName)); } return Activator.CreateInstance(assemblyName, typeName, activationAttributes); } public object CreateInstanceAndUnwrap(string assemblyName, string typeName) { ObjectHandle oh = CreateInstance(assemblyName, typeName); return oh?.Unwrap(); } public object CreateInstanceAndUnwrap(string assemblyName, string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder binder, object[] args, System.Globalization.CultureInfo culture, object[] activationAttributes) { ObjectHandle oh = CreateInstance(assemblyName, typeName, ignoreCase, bindingAttr, binder, args, culture, activationAttributes); return oh?.Unwrap(); } public object CreateInstanceAndUnwrap(string assemblyName, string typeName, object[] activationAttributes) { ObjectHandle oh = CreateInstance(assemblyName, typeName, activationAttributes); return oh?.Unwrap(); } public ObjectHandle CreateInstanceFrom(string assemblyFile, string typeName) { return Activator.CreateInstanceFrom(assemblyFile, typeName); } public ObjectHandle CreateInstanceFrom(string assemblyFile, string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder binder, object[] args, System.Globalization.CultureInfo culture, object[] activationAttributes) { return Activator.CreateInstanceFrom(assemblyFile, typeName, ignoreCase, bindingAttr, binder, args, culture, activationAttributes); } public ObjectHandle CreateInstanceFrom(string assemblyFile, string typeName, object[] activationAttributes) { return Activator.CreateInstanceFrom(assemblyFile, typeName, activationAttributes); } public object CreateInstanceFromAndUnwrap(string assemblyFile, string typeName) { ObjectHandle oh = CreateInstanceFrom(assemblyFile, typeName); return oh?.Unwrap(); } public object CreateInstanceFromAndUnwrap(string assemblyFile, string typeName, bool ignoreCase, BindingFlags bindingAttr, Binder binder, object[] args, System.Globalization.CultureInfo culture, object[] activationAttributes) { ObjectHandle oh = CreateInstanceFrom(assemblyFile, typeName, ignoreCase, bindingAttr, binder, args, culture, activationAttributes); return oh?.Unwrap(); } public object CreateInstanceFromAndUnwrap(string assemblyFile, string typeName, object[] activationAttributes) { ObjectHandle oh = CreateInstanceFrom(assemblyFile, typeName, activationAttributes); return oh?.Unwrap(); } public IPrincipal GetThreadPrincipal() { IPrincipal principal = _defaultPrincipal; if (principal == null) { switch (_principalPolicy) { case PrincipalPolicy.UnauthenticatedPrincipal: if (s_getUnauthenticatedPrincipal == null) { Type type = Type.GetType("System.Security.Principal.GenericPrincipal, System.Security.Claims", throwOnError: true); // Don't throw PNSE if null like for WindowsPrincipal as UnauthenticatedPrincipal should // be available on all platforms. Volatile.Write(ref s_getUnauthenticatedPrincipal, (Func<IPrincipal>)Delegate.CreateDelegate(typeof(Func<IPrincipal>), type, "GetDefaultInstance")); } principal = s_getUnauthenticatedPrincipal(); break; case PrincipalPolicy.WindowsPrincipal: if (s_getWindowsPrincipal == null) { Type type = Type.GetType("System.Security.Principal.WindowsPrincipal, System.Security.Principal.Windows", throwOnError: true); Volatile.Write(ref s_getWindowsPrincipal, (Func<IPrincipal>)Delegate.CreateDelegate(typeof(Func<IPrincipal>), type, "GetDefaultInstance", ignoreCase: false, throwOnBindFailure: false) ?? throw new PlatformNotSupportedException(SR.PlatformNotSupported_Principal)); } principal = s_getWindowsPrincipal(); break; } } return principal; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: Capture execution context for a thread ** ** ===========================================================*/ using System.Diagnostics; using System.Runtime.ExceptionServices; using System.Runtime.Serialization; using Thread = Internal.Runtime.Augments.RuntimeThread; namespace System.Threading { public delegate void ContextCallback(object state); public sealed class ExecutionContext : IDisposable, ISerializable { internal static readonly ExecutionContext Default = new ExecutionContext(isDefault: true); internal static readonly ExecutionContext DefaultFlowSuppressed = new ExecutionContext(AsyncLocalValueMap.Empty, Array.Empty<IAsyncLocal>(), isFlowSuppressed: true); private readonly IAsyncLocalValueMap m_localValues; private readonly IAsyncLocal[] m_localChangeNotifications; private readonly bool m_isFlowSuppressed; private readonly bool m_isDefault; private ExecutionContext(bool isDefault) { m_isDefault = isDefault; } private ExecutionContext( IAsyncLocalValueMap localValues, IAsyncLocal[] localChangeNotifications, bool isFlowSuppressed) { m_localValues = localValues; m_localChangeNotifications = localChangeNotifications; m_isFlowSuppressed = isFlowSuppressed; } public void GetObjectData(SerializationInfo info, StreamingContext context) { throw new PlatformNotSupportedException(); } public static ExecutionContext Capture() { ExecutionContext executionContext = Thread.CurrentThread.ExecutionContext; return executionContext == null ? Default : executionContext.m_isFlowSuppressed ? null : executionContext; } private ExecutionContext ShallowClone(bool isFlowSuppressed) { Debug.Assert(isFlowSuppressed != m_isFlowSuppressed); if (m_localValues == null || AsyncLocalValueMap.IsEmpty(m_localValues)) { return isFlowSuppressed ? DefaultFlowSuppressed : null; // implies the default context } return new ExecutionContext(m_localValues, m_localChangeNotifications, isFlowSuppressed); } public static AsyncFlowControl SuppressFlow() { Thread currentThread = Thread.CurrentThread; ExecutionContext executionContext = currentThread.ExecutionContext ?? Default; if (executionContext.m_isFlowSuppressed) { throw new InvalidOperationException(SR.InvalidOperation_CannotSupressFlowMultipleTimes); } executionContext = executionContext.ShallowClone(isFlowSuppressed: true); var asyncFlowControl = new AsyncFlowControl(); currentThread.ExecutionContext = executionContext; asyncFlowControl.Initialize(currentThread); return asyncFlowControl; } public static void RestoreFlow() { Thread currentThread = Thread.CurrentThread; ExecutionContext executionContext = currentThread.ExecutionContext; if (executionContext == null || !executionContext.m_isFlowSuppressed) { throw new InvalidOperationException(SR.InvalidOperation_CannotRestoreUnsupressedFlow); } currentThread.ExecutionContext = executionContext.ShallowClone(isFlowSuppressed: false); } public static bool IsFlowSuppressed() { ExecutionContext executionContext = Thread.CurrentThread.ExecutionContext; return executionContext != null && executionContext.m_isFlowSuppressed; } internal bool HasChangeNotifications => m_localChangeNotifications != null; internal bool IsDefault => m_isDefault; public static void Run(ExecutionContext executionContext, ContextCallback callback, object state) { // Note: ExecutionContext.Run is an extremely hot function and used by every await, ThreadPool execution, etc. if (executionContext == null) { ThrowNullContext(); } RunInternal(executionContext, callback, state); } internal static void RunInternal(ExecutionContext executionContext, ContextCallback callback, object state) { // Note: ExecutionContext.RunInternal is an extremely hot function and used by every await, ThreadPool execution, etc. // Note: Manual enregistering may be addressed by "Exception Handling Write Through Optimization" // https://github.com/dotnet/coreclr/blob/master/Documentation/design-docs/eh-writethru.md // Enregister variables with 0 post-fix so they can be used in registers without EH forcing them to stack // Capture references to Thread Contexts Thread currentThread0 = Thread.CurrentThread; Thread currentThread = currentThread0; ExecutionContext previousExecutionCtx0 = currentThread0.ExecutionContext; // Store current ExecutionContext and SynchronizationContext as "previousXxx". // This allows us to restore them and undo any Context changes made in callback.Invoke // so that they won't "leak" back into caller. // These variables will cross EH so be forced to stack ExecutionContext previousExecutionCtx = previousExecutionCtx0; SynchronizationContext previousSyncCtx = currentThread0.SynchronizationContext; if (executionContext != null && executionContext.m_isDefault) { // Default is a null ExecutionContext internally executionContext = null; } if (previousExecutionCtx0 != executionContext) { // Restore changed ExecutionContext currentThread0.ExecutionContext = executionContext; if ((executionContext != null && executionContext.HasChangeNotifications) || (previousExecutionCtx0 != null && previousExecutionCtx0.HasChangeNotifications)) { // There are change notifications; trigger any affected OnValuesChanged(previousExecutionCtx0, executionContext); } } ExceptionDispatchInfo edi = null; try { callback.Invoke(state); } catch (Exception ex) { // Note: we have a "catch" rather than a "finally" because we want // to stop the first pass of EH here. That way we can restore the previous // context before any of our callers' EH filters run. edi = ExceptionDispatchInfo.Capture(ex); } // Re-enregistrer variables post EH with 1 post-fix so they can be used in registers rather than from stack SynchronizationContext previousSyncCtx1 = previousSyncCtx; Thread currentThread1 = currentThread; // The common case is that these have not changed, so avoid the cost of a write barrier if not needed. if (currentThread1.SynchronizationContext != previousSyncCtx1) { // Restore changed SynchronizationContext back to previous currentThread1.SynchronizationContext = previousSyncCtx1; } ExecutionContext previousExecutionCtx1 = previousExecutionCtx; ExecutionContext currentExecutionCtx1 = currentThread1.ExecutionContext; if (currentExecutionCtx1 != previousExecutionCtx1) { // Restore changed ExecutionContext back to previous currentThread1.ExecutionContext = previousExecutionCtx1; if ((currentExecutionCtx1 != null && currentExecutionCtx1.HasChangeNotifications) || (previousExecutionCtx1 != null && previousExecutionCtx1.HasChangeNotifications)) { // There are change notifications; trigger any affected OnValuesChanged(currentExecutionCtx1, previousExecutionCtx1); } } // If exception was thrown by callback, rethrow it now original contexts are restored edi?.Throw(); } internal static void OnValuesChanged(ExecutionContext previousExecutionCtx, ExecutionContext nextExecutionCtx) { Debug.Assert(previousExecutionCtx != nextExecutionCtx); // Collect Change Notifications IAsyncLocal[] previousChangeNotifications = previousExecutionCtx?.m_localChangeNotifications; IAsyncLocal[] nextChangeNotifications = nextExecutionCtx?.m_localChangeNotifications; // At least one side must have notifications Debug.Assert(previousChangeNotifications != null || nextChangeNotifications != null); // Fire Change Notifications try { if (previousChangeNotifications != null && nextChangeNotifications != null) { // Notifications can't exist without values Debug.Assert(previousExecutionCtx.m_localValues != null); Debug.Assert(nextExecutionCtx.m_localValues != null); // Both contexts have change notifications, check previousExecutionCtx first foreach (IAsyncLocal local in previousChangeNotifications) { previousExecutionCtx.m_localValues.TryGetValue(local, out object previousValue); nextExecutionCtx.m_localValues.TryGetValue(local, out object currentValue); if (previousValue != currentValue) { local.OnValueChanged(previousValue, currentValue, contextChanged: true); } } if (nextChangeNotifications != previousChangeNotifications) { // Check for additional notifications in nextExecutionCtx foreach (IAsyncLocal local in nextChangeNotifications) { // If the local has a value in the previous context, we already fired the event // for that local in the code above. if (!previousExecutionCtx.m_localValues.TryGetValue(local, out object previousValue)) { nextExecutionCtx.m_localValues.TryGetValue(local, out object currentValue); if (previousValue != currentValue) { local.OnValueChanged(previousValue, currentValue, contextChanged: true); } } } } } else if (previousChangeNotifications != null) { // Notifications can't exist without values Debug.Assert(previousExecutionCtx.m_localValues != null); // No current values, so just check previous against null foreach (IAsyncLocal local in previousChangeNotifications) { previousExecutionCtx.m_localValues.TryGetValue(local, out object previousValue); if (previousValue != null) { local.OnValueChanged(previousValue, null, contextChanged: true); } } } else // Implied: nextChangeNotifications != null { // Notifications can't exist without values Debug.Assert(nextExecutionCtx.m_localValues != null); // No previous values, so just check current against null foreach (IAsyncLocal local in nextChangeNotifications) { nextExecutionCtx.m_localValues.TryGetValue(local, out object currentValue); if (currentValue != null) { local.OnValueChanged(null, currentValue, contextChanged: true); } } } } catch (Exception ex) { Environment.FailFast( SR.ExecutionContext_ExceptionInAsyncLocalNotification, ex); } } [StackTraceHidden] private static void ThrowNullContext() { throw new InvalidOperationException(SR.InvalidOperation_NullContext); } internal static object GetLocalValue(IAsyncLocal local) { ExecutionContext current = Thread.CurrentThread.ExecutionContext; if (current == null) { return null; } current.m_localValues.TryGetValue(local, out object value); return value; } internal static void SetLocalValue(IAsyncLocal local, object newValue, bool needChangeNotifications) { ExecutionContext current = Thread.CurrentThread.ExecutionContext; object previousValue = null; bool hadPreviousValue = false; if (current != null) { hadPreviousValue = current.m_localValues.TryGetValue(local, out previousValue); } if (previousValue == newValue) { return; } // Regarding 'treatNullValueAsNonexistent: !needChangeNotifications' below: // - When change notifications are not necessary for this IAsyncLocal, there is no observable difference between // storing a null value and removing the IAsyncLocal from 'm_localValues' // - When change notifications are necessary for this IAsyncLocal, the IAsyncLocal's absence in 'm_localValues' // indicates that this is the first value change for the IAsyncLocal and it needs to be registered for change // notifications. So in this case, a null value must be stored in 'm_localValues' to indicate that the IAsyncLocal // is already registered for change notifications. IAsyncLocal[] newChangeNotifications = null; IAsyncLocalValueMap newValues; bool isFlowSuppressed = false; if (current != null) { isFlowSuppressed = current.m_isFlowSuppressed; newValues = current.m_localValues.Set(local, newValue, treatNullValueAsNonexistent: !needChangeNotifications); newChangeNotifications = current.m_localChangeNotifications; } else { // First AsyncLocal newValues = AsyncLocalValueMap.Create(local, newValue, treatNullValueAsNonexistent: !needChangeNotifications); } // // Either copy the change notification array, or create a new one, depending on whether we need to add a new item. // if (needChangeNotifications) { if (hadPreviousValue) { Debug.Assert(newChangeNotifications != null); Debug.Assert(Array.IndexOf(newChangeNotifications, local) >= 0); } else if (newChangeNotifications == null) { newChangeNotifications = new IAsyncLocal[1] { local }; } else { int newNotificationIndex = newChangeNotifications.Length; Array.Resize(ref newChangeNotifications, newNotificationIndex + 1); newChangeNotifications[newNotificationIndex] = local; } } Thread.CurrentThread.ExecutionContext = (!isFlowSuppressed && AsyncLocalValueMap.IsEmpty(newValues)) ? null : // No values, return to Default context new ExecutionContext(newValues, newChangeNotifications, isFlowSuppressed); if (needChangeNotifications) { local.OnValueChanged(previousValue, newValue, contextChanged: false); } } public ExecutionContext CreateCopy() { return this; // since CoreCLR's ExecutionContext is immutable, we don't need to create copies. } public void Dispose() { // For CLR compat only } } public struct AsyncFlowControl : IDisposable { private Thread _thread; internal void Initialize(Thread currentThread) { Debug.Assert(currentThread == Thread.CurrentThread); _thread = currentThread; } public void Undo() { if (_thread == null) { throw new InvalidOperationException(SR.InvalidOperation_CannotUseAFCMultiple); } if (Thread.CurrentThread != _thread) { throw new InvalidOperationException(SR.InvalidOperation_CannotUseAFCOtherThread); } // An async flow control cannot be undone when a different execution context is applied. The desktop framework // mutates the execution context when its state changes, and only changes the instance when an execution context // is applied (for instance, through ExecutionContext.Run). The framework prevents a suppressed-flow execution // context from being applied by returning null from ExecutionContext.Capture, so the only type of execution // context that can be applied is one whose flow is not suppressed. After suppressing flow and changing an async // local's value, the desktop framework verifies that a different execution context has not been applied by // checking the execution context instance against the one saved from when flow was suppressed. In .NET Core, // since the execution context instance will change after changing the async local's value, it verifies that a // different execution context has not been applied, by instead ensuring that the current execution context's // flow is suppressed. if (!ExecutionContext.IsFlowSuppressed()) { throw new InvalidOperationException(SR.InvalidOperation_AsyncFlowCtrlCtxMismatch); } _thread = null; ExecutionContext.RestoreFlow(); } public void Dispose() { Undo(); } public override bool Equals(object obj) { return obj is AsyncFlowControl && Equals((AsyncFlowControl)obj); } public bool Equals(AsyncFlowControl obj) { return _thread == obj._thread; } public override int GetHashCode() { return _thread?.GetHashCode() ?? 0; } public static bool operator ==(AsyncFlowControl a, AsyncFlowControl b) { return a.Equals(b); } public static bool operator !=(AsyncFlowControl a, AsyncFlowControl b) { return !(a == b); } } }
// Contains code from bplusdotnet project (BSD License) by Aaron Watters, Copyright 2004: http://bplusdotnet.sourceforge.net/ using System.IO; using System.Text; namespace McBits.LanguageLib.BPTree { /// <summary> /// Bplustree with unlimited length strings (but only a fixed prefix is indexed in the tree directly). /// </summary> public class XBPTreeBytes : BPTreeBase<byte[]> { public int BucketSizeLimit = -1; public int PrefixLength; protected readonly BPTreeBytes Tree; public XBPTreeBytes(BPTreeBytes tree, int prefixLength) { if (prefixLength < 3) throw new BPTreeException("Prefix must be at least 3: " + prefixLength); if (prefixLength > tree.MaxKeyLength()) throw new BPTreeException("Prefix length cannot exceed internal tree key length"); Tree = tree; PrefixLength = prefixLength; } public XBPTreeBytes(string treefileName, string blockfileName, int prefixLength, int cultureId, int nodesize, int buffersize) : this(new BPTreeBytes(treefileName, blockfileName, prefixLength, cultureId, nodesize, buffersize), prefixLength) {} public XBPTreeBytes(Stream treefile, Stream blockfile, int prefixLength, int cultureId, int nodesize, int buffersize) : this(new BPTreeBytes(treefile, blockfile, prefixLength, cultureId, nodesize, buffersize), prefixLength) {} public XBPTreeBytes(string treefileName, string blockfileName, int prefixLength, int cultureId) : this(new BPTreeBytes(treefileName, blockfileName, prefixLength, cultureId), prefixLength) {} public XBPTreeBytes(string treefileName, string blockfileName, int prefixLength) : this(new BPTreeBytes(treefileName, blockfileName, prefixLength), prefixLength) {} public XBPTreeBytes(Stream treefile, Stream blockfile, int prefixLength, int cultureId) : this(new BPTreeBytes(treefile, blockfile, prefixLength, cultureId), prefixLength) {} public XBPTreeBytes(Stream treefile, Stream blockfile, int prefixLength) : this(new BPTreeBytes(treefile, blockfile, prefixLength), prefixLength) {} public static XBPTreeBytes ReOpen(Stream treefile, Stream blockfile) { var tree = new BPTreeBytes(treefile, blockfile); int prefixLength = tree.MaxKeyLength(); return new XBPTreeBytes(tree, prefixLength); } public static XBPTreeBytes ReOpen(string treefileName, string blockfileName) { var tree = new BPTreeBytes(treefileName, blockfileName); int prefixLength = tree.MaxKeyLength(); return new XBPTreeBytes(tree, prefixLength); } public static XBPTreeBytes ReadOnly(string treefileName, string blockfileName) { var tree = BPTreeBytes.ReadOnly(treefileName, blockfileName); int prefixLength = tree.MaxKeyLength(); return new XBPTreeBytes(tree, prefixLength); } public void LimitBucketSize(int limit) { BucketSizeLimit = limit; } public virtual string PrefixForByteCount(string key, int maxByteCount) { if (key.Length < 1) return ""; int prefixCharCount = maxByteCount; if (prefixCharCount > key.Length) prefixCharCount = key.Length; var encoder = Encoding.UTF8.GetEncoder(); var chars = key.ToCharArray(0, prefixCharCount); long length = encoder.GetByteCount(chars, 0, prefixCharCount, true); while (length > maxByteCount) { --prefixCharCount; length = encoder.GetByteCount(chars, 0, prefixCharCount, true); } return key.Substring(0, prefixCharCount); } public bool FindBucketForPrefix(string key, out XBucket bucket, out string prefix, bool keyIsPrefix) { prefix = keyIsPrefix ? key : PrefixForByteCount(key, PrefixLength); byte[] bytes; if (!Tree.TryGetValue(prefix, out bytes)) { bucket = null; return false; } bucket = new XBucket(this); bucket.Load(bytes); if (bucket.Count < 1) throw new BPTreeException("Empty bucket loaded"); return true; } #region ITreeIndex Members public override int Compare(string left, string right) { return Tree.Compare(left, right); } public override void Recover(bool correctErrors) { Tree.Recover(correctErrors); } public override void RemoveKey(string key) { XBucket bucket; string prefix; bool found = FindBucketForPrefix(key, out bucket, out prefix, false); if (!found) throw new BPTreeKeyMissing("Key not found"); bucket.Remove(key); if (bucket.Count < 1) Tree.RemoveKey(prefix); else Tree.Set(prefix, bucket.Dump()); } public override string FirstKey() { string prefix = Tree.FirstKey(); if (prefix == null) return null; XBucket bucket; string noPrefix; bool found = FindBucketForPrefix(prefix, out bucket, out noPrefix, true); if (!found) throw new BPTreeException("Internal tree gave bad first key"); return bucket.FirstKey(); } public override string NextKey(string currentKey) { XBucket bucket; string prefix; bool found = FindBucketForPrefix(currentKey, out bucket, out prefix, false); if (found) { string result = bucket.NextKey(currentKey); if (result != null) return result; } // otherwise look in the next bucket string nextPrefix = Tree.NextKey(prefix); if (nextPrefix == null) return null; var databytes = Tree[nextPrefix]; bucket = new XBucket(this); bucket.Load(databytes); if (bucket.Count < 1) throw new BPTreeException("empty bucket loaded"); return bucket.FirstKey(); } public override bool ContainsKey(string key) { XBucket bucket; string prefix; bool found = FindBucketForPrefix(key, out bucket, out prefix, false); if (!found) return false; byte[] map; return bucket.TryGetValue(key, out map); } public override bool TryGetValue(string key, out byte[] value) { XBucket bucket; string prefix; if (FindBucketForPrefix(key, out bucket, out prefix, false)) { byte[] map; if (bucket.TryGetValue(key, out map)) { value = map; return true; } } value = null; return false; } public override byte[] this[string key] { get { XBucket bucket; string prefix; if (FindBucketForPrefix(key, out bucket, out prefix, false)) { byte[] map; if (bucket.TryGetValue(key, out map)) return map; } throw new BPTreeKeyMissing("Key not found: " + key); } set { XBucket bucket; string prefix; if (!FindBucketForPrefix(key, out bucket, out prefix, false)) bucket = new XBucket(this); bucket.Add(key, value); Tree[prefix] = bucket.Dump(); } } public override void Commit() { Tree.Commit(); } public override void Abort() { Tree.Abort(); } public override void SetFootprintLimit(int limit) { Tree.SetFootprintLimit(limit); } public override void Shutdown() { Tree.Shutdown(); } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using Anemonis.JsonRpc.UnitTests.Resources; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace Anemonis.JsonRpc.UnitTests { [TestClass] public sealed class JsonRpcSerializerTestsV2Spec { #region Example T01: RPC call with positional parameters [TestMethod] public void DeserializeRequestDataT010() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("subtract", new JsonRpcRequestContract(new[] { typeof(long), typeof(long) })); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(1L, jrm.Id); Assert.AreEqual("subtract", jrm.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm.ParametersType); CollectionAssert.AreEqual(new object[] { 42L, 23L }, jrm.ParametersByPosition?.ToArray()); } [TestMethod] public void SerializeRequestT010() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.0_req.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcRequest(1L, "subtract", new object[] { 42L, 23L }); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeResponseDataT010() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddResponseContract("subtract", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseBinding(1L, "subtract"); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(1L, jrm.Id); Assert.IsInstanceOfType(jrm.Result, typeof(long)); Assert.AreEqual(19L, jrm.Result); } [TestMethod] public void SerializeResponseT010() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(1L, 19L); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeRequestDataT011() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.1_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("subtract", new JsonRpcRequestContract(new[] { typeof(long), typeof(long) })); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(2L, jrm.Id); Assert.AreEqual("subtract", jrm.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm.ParametersType); CollectionAssert.AreEqual(new object[] { 23L, 42L }, jrm.ParametersByPosition?.ToArray()); } [TestMethod] public void SerializeRequestT011() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.1_req.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcRequest(2L, "subtract", new object[] { 23L, 42L }); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeResponseDataT011() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.1_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddResponseContract("subtract", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseBinding(2L, "subtract"); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(2L, jrm.Id); Assert.IsInstanceOfType(jrm.Result, typeof(long)); Assert.AreEqual(-19L, jrm.Result); } [TestMethod] public void SerializeResponseT011() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_01.1_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(2L, -19L); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T02: RPC call with named parameters [TestMethod] public void DeserializeRequestDataT020() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrmp = new Dictionary<string, Type> { ["subtrahend"] = typeof(long), ["minuend"] = typeof(long) }; jrcr.AddRequestContract("subtract", new JsonRpcRequestContract(jrmp)); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(3L, jrm.Id); Assert.AreEqual("subtract", jrm.Method); Assert.AreEqual(JsonRpcParametersType.ByName, jrm.ParametersType); Assert.AreEqual(23L, jrm.ParametersByName["subtrahend"]); Assert.AreEqual(42L, jrm.ParametersByName["minuend"]); } [TestMethod] public void SerializeRequestT020() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.0_req.json"); var jrs = new JsonRpcSerializer(); var jrmp = new Dictionary<string, object> { ["subtrahend"] = 23L, ["minuend"] = 42L }; var jrm = new JsonRpcRequest(3L, "subtract", jrmp); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeResponseDataT020() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddResponseContract("subtract", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseBinding(3L, "subtract"); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(3L, jrm.Id); Assert.IsInstanceOfType(jrm.Result, typeof(long)); Assert.AreEqual(19L, jrm.Result); } [TestMethod] public void SerializeResponseT020() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(3L, 19L); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeRequestDataT021() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.1_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrmp = new Dictionary<string, Type> { ["subtrahend"] = typeof(long), ["minuend"] = typeof(long) }; jrcr.AddRequestContract("subtract", new JsonRpcRequestContract(jrmp)); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(4L, jrm.Id); Assert.AreEqual("subtract", jrm.Method); Assert.AreEqual(JsonRpcParametersType.ByName, jrm.ParametersType); Assert.AreEqual(23L, jrm.ParametersByName["subtrahend"]); Assert.AreEqual(42L, jrm.ParametersByName["minuend"]); } [TestMethod] public void SerializeRequestT021() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.1_req.json"); var jrs = new JsonRpcSerializer(); var jrmp = new Dictionary<string, object> { ["subtrahend"] = 23L, ["minuend"] = 42L }; var jrm = new JsonRpcRequest(4L, "subtract", jrmp); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeResponseDataT021() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.1_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddResponseContract("subtract", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseBinding(4L, "subtract"); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(4L, jrm.Id); Assert.IsInstanceOfType(jrm.Result, typeof(long)); Assert.AreEqual(19L, jrm.Result); } [TestMethod] public void SerializeResponseT021() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_02.1_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(4L, 19L); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T03: RPC notification [TestMethod] public void DeserializeRequestDataT030() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_03.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrrcp = new[] { typeof(long), typeof(long), typeof(long), typeof(long), typeof(long) }; jrcr.AddRequestContract("update", new JsonRpcRequestContract(jrrcp)); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.AreEqual("update", jrm.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm.ParametersType); CollectionAssert.AreEqual(new object[] { 1L, 2L, 3L, 4L, 5L }, jrm.ParametersByPosition?.ToArray()); } [TestMethod] public void SerializeRequestT030() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_03.0_req.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcRequest(default, "update", new object[] { 1L, 2L, 3L, 4L, 5L }); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeRequestDataT031() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_03.1_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("foobar", new JsonRpcRequestContract()); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.AreEqual("foobar", jrm.Method); Assert.AreEqual(JsonRpcParametersType.None, jrm.ParametersType); } [TestMethod] public void SerializeRequestT031() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_03.1_req.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcRequest(default, "foobar"); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T04: RPC call of non-existent method [TestMethod] public void DeserializeRequestDataT040() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_04.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("foobar", new JsonRpcRequestContract()); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual("1", jrm.Id); Assert.AreEqual("foobar", jrm.Method); Assert.AreEqual(JsonRpcParametersType.None, jrm.ParametersType); } [TestMethod] public void SerializeRequestT040() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_04.0_req.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcRequest("1", "foobar"); var jsonr = jrs.SerializeRequest(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } [TestMethod] public void DeserializeResponseDataT040() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_04.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual("1", jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMethod, jre.Code); Assert.IsNotNull(jre.Message); Assert.AreEqual("Method not found", jre.Message); Assert.IsFalse(jre.HasData); } [TestMethod] public void SerializeResponseT040() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_04.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse("1", new JsonRpcError(JsonRpcErrorCode.InvalidMethod, "Method not found")); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T05: RPC call with invalid JSON [TestMethod] public void DeserializeRequestDataT050() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_05.0_req.json"); var jrs = new JsonRpcSerializer(); Assert.ThrowsException<InvalidOperationException>(() => jrs.DeserializeRequestData(jsont)); } [TestMethod] public void DeserializeResponseDataT050() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_05.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidFormat, jre.Code); Assert.IsFalse(jre.HasData); } [TestMethod] public void SerializeResponseT050() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_05.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidFormat, "Parse error")); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T06: RPC call with invalid request object [TestMethod] public void DeserializeRequestDataT060() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_06.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("subtract", new JsonRpcRequestContract()); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsFalse(jrmi.IsValid); Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jrmi.Exception.ErrorCode); } [TestMethod] public void DeserializeResponseDataT060() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_06.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jre.Code); Assert.IsFalse(jre.HasData); } [TestMethod] public void SerializeResponseT060() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_06.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T07: RPC call batch, invalid JSON [TestMethod] public void DeserializeRequestDataT070() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_07.0_req.json"); var jrs = new JsonRpcSerializer(); Assert.ThrowsException<InvalidOperationException>(() => jrs.DeserializeRequestData(jsont)); } [TestMethod] public void DeserializeResponseDataT070() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_07.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidFormat, jre.Code); Assert.IsFalse(jre.HasData); } [TestMethod] public void SerializeResponseT070() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_07.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidFormat, "Parse error")); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T08: RPC call with an empty array [TestMethod] public void DeserializeRequestDataT080() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_08.0_req.json"); var jrs = new JsonRpcSerializer(); Assert.ThrowsException<InvalidOperationException>(() => jrs.DeserializeRequestData(jsont)); } [TestMethod] public void DeserializeResponseDataT080() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_08.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsFalse(jrd.IsBatch); var jrmi = jrd.Item; Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jre.Code); Assert.IsFalse(jre.HasData); } [TestMethod] public void SerializeResponseT080() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_08.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")); var jsonr = jrs.SerializeResponse(jrm); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T09: RPC call with an invalid batch (but not empty) [TestMethod] public void DeserializeRequestDataT090() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_09.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(1, jrd.Items.Count); var jrmi0 = jrd.Items[0]; Assert.IsFalse(jrmi0.IsValid); Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jrmi0.Exception.ErrorCode); } [TestMethod] public void DeserializeResponseDataT090() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_09.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(1, jrd.Items.Count); var jrmi0 = jrd.Items[0]; Assert.IsTrue(jrmi0.IsValid); var jrm0 = jrmi0.Message; Assert.AreEqual(default, jrm0.Id); Assert.IsFalse(jrm0.Success); var jre0 = jrm0.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jre0.Code); Assert.IsFalse(jre0.HasData); } [TestMethod] public void SerializeResponseT090() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_09.0_res.json"); var jrs = new JsonRpcSerializer(); var jrm = new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")); var jsonr = jrs.SerializeResponses(new[] { jrm }); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T10: RPC call with invalid batch [TestMethod] public void DeserializeRequestDataT100() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_10.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(3, jrd.Items.Count); foreach (var jrmi in jrd.Items) { Assert.IsFalse(jrmi.IsValid); } } [TestMethod] public void DeserializeResponseDataT100() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_10.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(3, jrd.Items.Count); foreach (var jrmi in jrd.Items) { Assert.IsTrue(jrmi.IsValid); var jrm = jrmi.Message; Assert.AreEqual(default, jrm.Id); Assert.IsFalse(jrm.Success); var jre = jrm.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jre.Code); Assert.IsFalse(jre.HasData); } } [TestMethod] public void SerializeResponseT100() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_10.0_res.json"); var jrs = new JsonRpcSerializer(); var jrms = new[] { new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")), new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")), new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")) }; var result = jrs.SerializeResponses(jrms); JsonRpcSerializerTests.CompareJsonStrings(jsont, result); } #endregion #region Example T11: RPC call batch [TestMethod] public void DeserializeRequestDataT110() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_11.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("sum", new JsonRpcRequestContract(new[] { typeof(long), typeof(long), typeof(long) })); jrcr.AddRequestContract("notify_hello", new JsonRpcRequestContract(new[] { typeof(long) })); jrcr.AddRequestContract("subtract", new JsonRpcRequestContract(new[] { typeof(long), typeof(long) })); jrcr.AddRequestContract("get_data", new JsonRpcRequestContract()); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(6, jrd.Items.Count); var jrmi0 = jrd.Items[0]; Assert.IsTrue(jrmi0.IsValid); var jrm0 = jrmi0.Message; Assert.AreEqual("1", jrm0.Id); Assert.AreEqual("sum", jrm0.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm0.ParametersType); CollectionAssert.AreEqual(new object[] { 1L, 2L, 4L }, jrm0.ParametersByPosition?.ToArray()); var jrmi1 = jrd.Items[1]; Assert.IsTrue(jrmi1.IsValid); var jrm1 = jrmi1.Message; Assert.AreEqual(default, jrm1.Id); Assert.AreEqual("notify_hello", jrm1.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm1.ParametersType); CollectionAssert.AreEqual(new object[] { 7L }, jrm1.ParametersByPosition?.ToArray()); var jrmi2 = jrd.Items[2]; Assert.IsTrue(jrmi2.IsValid); var jrm2 = jrmi2.Message; Assert.AreEqual("2", jrm2.Id); Assert.AreEqual("subtract", jrm2.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm2.ParametersType); CollectionAssert.AreEqual(new object[] { 42L, 23L }, jrm2.ParametersByPosition?.ToArray()); var jrmi3 = jrd.Items[3]; Assert.IsFalse(jrmi3.IsValid); var jrmi4 = jrd.Items[4]; Assert.IsFalse(jrmi4.IsValid); var jrmi5 = jrd.Items[5]; Assert.IsTrue(jrmi5.IsValid); var jrm5 = jrmi5.Message; Assert.AreEqual("9", jrm5.Id); Assert.AreEqual("get_data", jrm5.Method); Assert.AreEqual(JsonRpcParametersType.None, jrm5.ParametersType); } [TestMethod] public void DeserializeResponseDataT110() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_11.0_res.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddResponseContract("sum", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseContract("subtract", new JsonRpcResponseContract(typeof(long))); jrcr.AddResponseContract("get_data", new JsonRpcResponseContract(typeof(object[]))); jrcr.AddResponseBinding("1", "sum"); jrcr.AddResponseBinding("2", "subtract"); jrcr.AddResponseBinding("5", "foo.get"); jrcr.AddResponseBinding("9", "get_data"); var jrd = jrs.DeserializeResponseData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(5, jrd.Items.Count); var jrmi0 = jrd.Items[0]; Assert.IsTrue(jrmi0.IsValid); var jrm0 = jrmi0.Message; Assert.AreEqual("1", jrm0.Id); Assert.IsInstanceOfType(jrm0.Result, typeof(long)); Assert.AreEqual(7L, jrm0.Result); var jrmi1 = jrd.Items[1]; Assert.IsTrue(jrmi1.IsValid); var jrm1 = jrmi1.Message; Assert.AreEqual("2", jrm1.Id); Assert.IsInstanceOfType(jrm1.Result, typeof(long)); Assert.AreEqual(19L, jrm1.Result); var jrmi2 = jrd.Items[2]; Assert.IsTrue(jrmi2.IsValid); var jrm2 = jrmi2.Message; Assert.AreEqual(default, jrm2.Id); Assert.IsFalse(jrm2.Success); var jre2 = jrm2.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMessage, jre2.Code); Assert.IsFalse(jre2.HasData); var jrmi3 = jrd.Items[3]; Assert.IsTrue(jrmi3.IsValid); var jrm3 = jrmi3.Message; Assert.AreEqual("5", jrm3.Id); Assert.IsFalse(jrm3.Success); var jre3 = jrm3.Error; Assert.AreEqual(JsonRpcErrorCode.InvalidMethod, jre3.Code); Assert.IsFalse(jre3.HasData); var jrmi4 = jrd.Items[4]; Assert.IsTrue(jrmi4.IsValid); var jrm4 = jrmi4.Message; Assert.AreEqual("9", jrm4.Id); Assert.IsInstanceOfType(jrm4.Result, typeof(object[])); CollectionAssert.AreEqual(new object[] { "hello", 5L }, (object[])jrm4.Result); } [TestMethod] public void SerializeResponseT110() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_11.0_res.json"); var jrs = new JsonRpcSerializer(); var jrms = new[] { new JsonRpcResponse("1", 7L), new JsonRpcResponse("2", 19L), new JsonRpcResponse(default, new JsonRpcError(JsonRpcErrorCode.InvalidMessage, "Invalid Request")), new JsonRpcResponse("5", new JsonRpcError(JsonRpcErrorCode.InvalidMethod, "Method not found")), new JsonRpcResponse("9", new object[] { "hello", 5L }) }; var jsonr = jrs.SerializeResponses(jrms); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion #region Example T12: RPC call batch (all notifications) [TestMethod] public void DeserializeRequestDataT120() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_12.0_req.json"); var jrcr = new JsonRpcContractResolver(); var jrs = new JsonRpcSerializer(jrcr); jrcr.AddRequestContract("notify_sum", new JsonRpcRequestContract(new[] { typeof(long), typeof(long), typeof(long) })); jrcr.AddRequestContract("notify_hello", new JsonRpcRequestContract(new[] { typeof(long) })); var jrd = jrs.DeserializeRequestData(jsont); Assert.IsTrue(jrd.IsBatch); Assert.AreEqual(2, jrd.Items.Count); var jrmi0 = jrd.Items[0]; Assert.IsTrue(jrmi0.IsValid); var jrm0 = jrmi0.Message; Assert.AreEqual(default, jrm0.Id); Assert.AreEqual("notify_sum", jrm0.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm0.ParametersType); CollectionAssert.AreEqual(new object[] { 1L, 2L, 4L }, jrm0.ParametersByPosition?.ToArray()); var jrmi1 = jrd.Items[1]; Assert.IsTrue(jrmi1.IsValid); var jrm1 = jrmi1.Message; Assert.AreEqual(default, jrm1.Id); Assert.AreEqual("notify_hello", jrm1.Method); Assert.AreEqual(JsonRpcParametersType.ByPosition, jrm1.ParametersType); CollectionAssert.AreEqual(new object[] { 7L }, jrm1.ParametersByPosition?.ToArray()); } [TestMethod] public void SerializeRequestT120() { var jsont = EmbeddedResourceManager.GetString("Assets.v2_spec_12.0_req.json"); var jrs = new JsonRpcSerializer(); var jrms = new[] { new JsonRpcRequest(default, "notify_sum", new object[] { 1L, 2L, 4L }), new JsonRpcRequest(default, "notify_hello", new object[] { 7L }) }; var jsonr = jrs.SerializeRequests(jrms); JsonRpcSerializerTests.CompareJsonStrings(jsont, jsonr); } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Runtime.InteropServices; namespace System.Security { // TODO: Issue #1387. // This implementation lacks encryption. We need to investigate adding such encryption support, at which point // we could potentially remove the current implementation's reliance on mlock and mprotect (mlock places additional // constraints on non-privileged processes due to RLIMIT_MEMLOCK), neither of which provides a guarantee that the // data-at-rest in memory can't be accessed; they just make it more difficult. If we don't encrypt, at least on Linux // we should consider also using madvise to set MADV_DONTDUMP and MADV_DONTFORK for the allocated pages. And we // should ensure the documentation gets updated appropriately. public sealed partial class SecureString { private ProtectedBuffer _buffer; [System.Security.SecurityCritical] // auto-generated internal unsafe SecureString(SecureString str) { // Allocate enough space to store the provided string EnsureCapacity(str._decryptedLength); _decryptedLength = str._decryptedLength; // Copy the string into the newly allocated space if (_decryptedLength > 0) using (str._buffer.Unprotect()) ProtectedBuffer.Copy(str._buffer, _buffer, (ulong)(str._decryptedLength * sizeof(char))); // Protect the buffer _buffer.Protect(); } [System.Security.SecurityCritical] // auto-generated private unsafe void InitializeSecureString(char* value, int length) { // Allocate enough space to store the provided string EnsureCapacity(length); _decryptedLength = length; if (length == 0) return; // Copy the string into the newly allocated space byte* ptr = null; try { _buffer.AcquirePointer(ref ptr); Buffer.MemoryCopy(value, ptr, _buffer.ByteLength, (ulong)(length * sizeof(char))); } finally { if (ptr != null) _buffer.ReleasePointer(); } // Protect the buffer _buffer.Protect(); } [System.Security.SecuritySafeCritical] // auto-generated private void DisposeCore() { if (_buffer != null && !_buffer.IsInvalid) { _buffer.Dispose(); _buffer = null; } } [System.Security.SecurityCritical] // auto-generated private void EnsureNotDisposed() { if (_buffer == null) throw new ObjectDisposedException(GetType().Name); } // clears the current contents. Only available if writable [System.Security.SecuritySafeCritical] // auto-generated private void ClearCore() { _decryptedLength = 0; using (_buffer.Unprotect()) _buffer.Clear(); } [System.Security.SecuritySafeCritical] // auto-generated private unsafe void AppendCharCore(char c) { // Make sure we have enough space for the new character, // then write it at the end. EnsureCapacity(_decryptedLength + 1); using (_buffer.Unprotect()) _buffer.Write((ulong)(_decryptedLength * sizeof(char)), c); _decryptedLength++; } [System.Security.SecuritySafeCritical] // auto-generated private unsafe void InsertAtCore(int index, char c) { // Make sure we have enough space for the new character, // then shift all of the characters above it and insert it. EnsureCapacity(_decryptedLength + 1); byte* ptr = null; try { _buffer.AcquirePointer(ref ptr); char* charPtr = (char*)ptr; using (_buffer.Unprotect()) { for (int i = _decryptedLength; i > index; i--) charPtr[i] = charPtr[i - 1]; charPtr[index] = c; } ++_decryptedLength; } finally { if (ptr != null) _buffer.ReleasePointer(); } } [System.Security.SecuritySafeCritical] // auto-generated private unsafe void RemoveAtCore(int index) { // Shift down all values above the specified index, // then null out the empty space at the end. byte* ptr = null; try { _buffer.AcquirePointer(ref ptr); char* charPtr = (char*)ptr; using (_buffer.Unprotect()) { for (int i = index; i < _decryptedLength - 1; i++) charPtr[i] = charPtr[i + 1]; charPtr[--_decryptedLength] = (char)0; } } finally { if (ptr != null) _buffer.ReleasePointer(); } } [System.Security.SecuritySafeCritical] // auto-generated private void SetAtCore(int index, char c) { // Overwrite the character at the specified index using (_buffer.Unprotect()) _buffer.Write((ulong)(index * sizeof(char)), c); } [System.Security.SecurityCritical] // auto-generated internal unsafe IntPtr ToUniStrCore() { int length = _decryptedLength; byte* bufferPtr = null; IntPtr stringPtr = IntPtr.Zero, result = IntPtr.Zero; try { // Allocate space for the string to be returned, including space for a null terminator stringPtr = Marshal.AllocCoTaskMem((length + 1) * sizeof(char)); _buffer.AcquirePointer(ref bufferPtr); // Copy all of our data into it using (_buffer.Unprotect()) Buffer.MemoryCopy( source: bufferPtr, destination: (byte*)stringPtr.ToPointer(), destinationSizeInBytes: ((length + 1) * sizeof(char)), sourceBytesToCopy: length * sizeof(char)); // Add the null termination *(length + (char*)stringPtr.ToPointer()) = '\0'; // Finally store the string pointer into our result. We maintain // a separate result variable to make clean up in the finally easier. result = stringPtr; } finally { // If there was a failure, such that result isn't initialized, // release the string if we had one. if (stringPtr != IntPtr.Zero && result == IntPtr.Zero) { ProtectedBuffer.ZeroMemory((byte*)stringPtr, (ulong)(length * sizeof(char))); Marshal.FreeCoTaskMem(stringPtr); } if (bufferPtr != null) _buffer.ReleasePointer(); } return result; } // ----------------------------- // ---- PAL layer ends here ---- // ----------------------------- private void EnsureCapacity(int capacity) { // Make sure the requested capacity doesn't exceed SecureString's defined limit if (capacity > MaxLength) throw new ArgumentOutOfRangeException("capacity", SR.ArgumentOutOfRange_Capacity); // If we already have enough space allocated, we're done if (_buffer != null && (capacity * sizeof(char)) <= (int)_buffer.ByteLength) return; // We need more space, so allocate a new buffer, copy all our data into it, // and then swap the new for the old. ProtectedBuffer newBuffer = ProtectedBuffer.Allocate(capacity * sizeof(char)); if (_buffer != null) { using (_buffer.Unprotect()) ProtectedBuffer.Copy(_buffer, newBuffer, _buffer.ByteLength); newBuffer.Protect(); _buffer.Dispose(); } _buffer = newBuffer; } /// <summary>SafeBuffer for managing memory meant to be kept confidential.</summary> private sealed class ProtectedBuffer : SafeBuffer { private static readonly int s_pageSize = Interop.libc.sysconf(Interop.libc.SysConfNames._SC_PAGESIZE); internal ProtectedBuffer() : base(true) { } internal static ProtectedBuffer Allocate(int bytes) { Debug.Assert(bytes >= 0); // Round the number of bytes up to the next page size boundary. mmap // is going to allocate pages, anyway, and we lock/protect entire pages, // so we might as well benefit from being able to use all of that space, // rather than allocating it and having it be unusable. As a SecureString // grows, this will significantly help in avoiding unnecessary recreations // of the buffer. Debug.Assert(s_pageSize > 0); bytes = RoundUpToPageSize(bytes); Debug.Assert(bytes % s_pageSize == 0); ProtectedBuffer buffer = new ProtectedBuffer(); IntPtr ptr = IntPtr.Zero; try { // Allocate the page(s) for the buffer. ptr = Interop.libc.mmap( IntPtr.Zero, (IntPtr)bytes, Interop.libc.MemoryMappedProtections.PROT_READ | Interop.libc.MemoryMappedProtections.PROT_WRITE, Interop.libc.MemoryMappedFlags.MAP_ANONYMOUS | Interop.libc.MemoryMappedFlags.MAP_PRIVATE, 0, 0); if (ptr == IntPtr.Zero) throw CreateExceptionFromErrno(); // Lock the pages into memory to minimize the chances that the pages get // swapped out, making the contents available on disk. if (Interop.libc.mlock(ptr, (IntPtr)bytes) != 0) throw CreateExceptionFromErrno(); } catch { // Something failed; release the allocation if (ptr != IntPtr.Zero) Interop.libc.munmap(ptr, (IntPtr)bytes); // ignore any errors throw; } // The memory was allocated; initialize the buffer with it. buffer.SetHandle(ptr); buffer.Initialize((ulong)bytes); return buffer; } internal void Protect() { // Make the pages unreadable/writable; attempts to read/write this memory will result in seg faults. ChangeProtection(Interop.libc.MemoryMappedProtections.PROT_NONE); } internal ProtectOnDispose Unprotect() { // Make the pages readable/writable; attempts to read/write this memory will succeed. // Then return a disposable that will re-protect the memory when done with it. ChangeProtection(Interop.libc.MemoryMappedProtections.PROT_READ | Interop.libc.MemoryMappedProtections.PROT_WRITE); return new ProtectOnDispose(this); } internal struct ProtectOnDispose : IDisposable { private readonly ProtectedBuffer _buffer; internal ProtectOnDispose(ProtectedBuffer buffer) { Debug.Assert(buffer != null); _buffer = buffer; } public void Dispose() { _buffer.Protect(); } } private unsafe void ChangeProtection(Interop.libc.MemoryMappedProtections prots) { byte* ptr = null; try { AcquirePointer(ref ptr); if (Interop.libc.mprotect((IntPtr)ptr, (IntPtr)ByteLength, prots) != 0) throw CreateExceptionFromErrno(); } finally { if (ptr != null) ReleasePointer(); } } internal unsafe void Clear() { byte* ptr = null; try { AcquirePointer(ref ptr); ZeroMemory(ptr, ByteLength); } finally { if (ptr != null) ReleasePointer(); } } internal static unsafe void Copy(ProtectedBuffer source, ProtectedBuffer destination, ulong bytesLength) { if (bytesLength == 0) return; byte* srcPtr = null, dstPtr = null; try { source.AcquirePointer(ref srcPtr); destination.AcquirePointer(ref dstPtr); Buffer.MemoryCopy(srcPtr, dstPtr, destination.ByteLength, bytesLength); } finally { if (dstPtr != null) destination.ReleasePointer(); if (srcPtr != null) source.ReleasePointer(); } } protected override unsafe bool ReleaseHandle() { bool success = true; IntPtr h = handle; if (h != IntPtr.Zero) { IntPtr len = (IntPtr)ByteLength; success &= Interop.libc.mprotect(h, len, Interop.libc.MemoryMappedProtections.PROT_READ | Interop.libc.MemoryMappedProtections.PROT_WRITE) == 0; if (success) { ZeroMemory((byte*)h, ByteLength); success &= (Interop.libc.munlock(h, len) == 0); } success &= (Interop.libc.munmap(h, len) == 0); } return success; } internal static unsafe void ZeroMemory(byte* ptr, ulong len) { for (ulong i = 0; i < len; i++) *ptr++ = 0; } private static Exception CreateExceptionFromErrno() { Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo(); return (errorInfo.Error == Interop.Error.ENOMEM || errorInfo.Error == Interop.Error.EPERM) ? (Exception)new OutOfMemoryException(SR.OutOfMemory_MemoryResourceLimits) : (Exception)new InvalidOperationException(errorInfo.GetErrorMessage()); } private static int RoundUpToPageSize(int bytes) { return bytes > 0 ? (bytes + (s_pageSize - 1)) & ~(s_pageSize - 1) : s_pageSize; } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Net; using System.Net.Sockets; using System.Security.Cryptography; using System.Text; using Orleans.Core.Abstractions.Internal; namespace Orleans.Runtime { /// <summary> /// Data class encapsulating the details of silo addresses. /// </summary> [Serializable] [DebuggerDisplay("SiloAddress {ToString()}")] public class SiloAddress : IEquatable<SiloAddress>, IComparable<SiloAddress>, IComparable { internal static readonly int SizeBytes = 24; // 16 for the address, 4 for the port, 4 for the generation /// <summary> Special constant value to indicate an empty SiloAddress. </summary> public static SiloAddress Zero { get; private set; } private const int INTERN_CACHE_INITIAL_SIZE = InternerConstants.SIZE_MEDIUM; private static readonly TimeSpan internCacheCleanupInterval = TimeSpan.Zero; private int hashCode = 0; private bool hashCodeSet = false; [NonSerialized] private List<uint> uniformHashCache; public IPEndPoint Endpoint { get; private set; } public int Generation { get; private set; } private const char SEPARATOR = '@'; private static readonly DateTime epoch = new DateTime(2010, 1, 1, 0, 0, 0, DateTimeKind.Utc); private static readonly Interner<SiloAddress, SiloAddress> siloAddressInterningCache; static SiloAddress() { siloAddressInterningCache = new Interner<SiloAddress, SiloAddress>(INTERN_CACHE_INITIAL_SIZE, internCacheCleanupInterval); var sa = new SiloAddress(new IPEndPoint(0, 0), 0); Zero = siloAddressInterningCache.Intern(sa, sa); } /// <summary> /// Factory for creating new SiloAddresses with specified IP endpoint address and silo generation number. /// </summary> /// <param name="ep">IP endpoint address of the silo.</param> /// <param name="gen">Generation number of the silo.</param> /// <returns>SiloAddress object initialized with specified address and silo generation.</returns> public static SiloAddress New(IPEndPoint ep, int gen) { var sa = new SiloAddress(ep, gen); return siloAddressInterningCache.Intern(sa, sa); } private SiloAddress(IPEndPoint endpoint, int gen) { Endpoint = endpoint; Generation = gen; } public bool IsClient { get { return Generation < 0; } } /// <summary> Allocate a new silo generation number. </summary> /// <returns>A new silo generation number.</returns> public static int AllocateNewGeneration() { long elapsed = (DateTime.UtcNow.Ticks - epoch.Ticks) / TimeSpan.TicksPerSecond; return unchecked((int)elapsed); // Unchecked to truncate any bits beyond the lower 32 } /// <summary> /// Return this SiloAddress in a standard string form, suitable for later use with the <c>FromParsableString</c> method. /// </summary> /// <returns>SiloAddress in a standard string format.</returns> public string ToParsableString() { // This must be the "inverse" of FromParsableString, and must be the same across all silos in a deployment. // Basically, this should never change unless the data content of SiloAddress changes return String.Format("{0}:{1}@{2}", Endpoint.Address, Endpoint.Port, Generation); } /// <summary> /// Create a new SiloAddress object by parsing string in a standard form returned from <c>ToParsableString</c> method. /// </summary> /// <param name="addr">String containing the SiloAddress info to be parsed.</param> /// <returns>New SiloAddress object created from the input data.</returns> public static SiloAddress FromParsableString(string addr) { // This must be the "inverse" of ToParsableString, and must be the same across all silos in a deployment. // Basically, this should never change unless the data content of SiloAddress changes // First is the IPEndpoint; then '@'; then the generation int atSign = addr.LastIndexOf(SEPARATOR); if (atSign < 0) { throw new FormatException("Invalid string SiloAddress: " + addr); } var epString = addr.Substring(0, atSign); var genString = addr.Substring(atSign + 1); // IPEndpoint is the host, then ':', then the port int lastColon = epString.LastIndexOf(':'); if (lastColon < 0) throw new FormatException("Invalid string SiloAddress: " + addr); var hostString = epString.Substring(0, lastColon); var portString = epString.Substring(lastColon + 1); var host = IPAddress.Parse(hostString); int port = Int32.Parse(portString); return New(new IPEndPoint(host, port), Int32.Parse(genString)); } /// <summary> Object.ToString method override. </summary> public override string ToString() { return String.Format("{0}{1}:{2}", (IsClient ? "C" : "S"), Endpoint, Generation); } /// <summary> /// Return a long string representation of this SiloAddress. /// </summary> /// <remarks> /// Note: This string value is not comparable with the <c>FromParsableString</c> method -- use the <c>ToParsableString</c> method for that purpose. /// </remarks> /// <returns>String representaiton of this SiloAddress.</returns> public string ToLongString() { return ToString(); } /// <summary> /// Return a long string representation of this SiloAddress, including it's consistent hash value. /// </summary> /// <remarks> /// Note: This string value is not comparable with the <c>FromParsableString</c> method -- use the <c>ToParsableString</c> method for that purpose. /// </remarks> /// <returns>String representaiton of this SiloAddress.</returns> public string ToStringWithHashCode() { return String.Format("{0}/x{1, 8:X8}", ToString(), GetConsistentHashCode()); } /// <summary> Object.Equals method override. </summary> public override bool Equals(object obj) { return Equals(obj as SiloAddress); } /// <summary> Object.GetHashCode method override. </summary> public override int GetHashCode() { // Note that Port cannot be used because Port==0 matches any non-zero Port value for .Equals return Endpoint.GetHashCode() ^ Generation.GetHashCode(); } /// <summary>Get a consistent hash value for this silo address.</summary> /// <returns>Consistent hash value for this silo address.</returns> public int GetConsistentHashCode() { if (hashCodeSet) return hashCode; // Note that Port cannot be used because Port==0 matches any non-zero Port value for .Equals string siloAddressInfoToHash = Endpoint + Generation.ToString(CultureInfo.InvariantCulture); hashCode = CalculateIdHash(siloAddressInfoToHash); hashCodeSet = true; return hashCode; } // This is the same method as Utils.CalculateIdHash private static int CalculateIdHash(string text) { SHA256 sha = SHA256.Create(); // This is one implementation of the abstract class SHA1. int hash = 0; try { byte[] data = Encoding.Unicode.GetBytes(text); byte[] result = sha.ComputeHash(data); for (int i = 0; i < result.Length; i += 4) { int tmp = (result[i] << 24) | (result[i + 1] << 16) | (result[i + 2] << 8) | (result[i + 3]); hash = hash ^ tmp; } } finally { sha.Dispose(); } return hash; } public List<uint> GetUniformHashCodes(int numHashes) { if (uniformHashCache != null) return uniformHashCache; uniformHashCache = GetUniformHashCodesImpl(numHashes); return uniformHashCache; } private List<uint> GetUniformHashCodesImpl(int numHashes) { var hashes = new List<uint>(); var bytes = new byte[16 + sizeof(int) + sizeof(int) + sizeof(int)]; // ip + port + generation + extraBit var tmpInt = new int[1]; for (int i = 0; i < bytes.Length; i++) { bytes[i] = 9; } // Endpoint IP Address if (this.Endpoint.AddressFamily == AddressFamily.InterNetwork) // IPv4 { for (int i = 0; i < 12; i++) { bytes[i] = 0; } Buffer.BlockCopy(this.Endpoint.Address.GetAddressBytes(), 0, bytes, 12, 4); } else // IPv6 { Buffer.BlockCopy(this.Endpoint.Address.GetAddressBytes(), 0, bytes, 0, 16); } var offset = 16; // Port tmpInt[0] = this.Endpoint.Port; Buffer.BlockCopy(tmpInt, 0, bytes, offset, sizeof(int)); offset += sizeof(int); // Generation tmpInt[0] = this.Generation; Buffer.BlockCopy(tmpInt, 0, bytes, offset, sizeof(int)); offset += sizeof(int); for (int extraBit = 0; extraBit < numHashes; extraBit++) { // extraBit tmpInt[0] = extraBit; Buffer.BlockCopy(tmpInt, 0, bytes, offset, sizeof(int)); hashes.Add(JenkinsHash.ComputeHash(bytes)); } return hashes; } /// <summary> /// Two silo addresses match if they are equal or if one generation or the other is 0 /// </summary> /// <param name="other"> The other SiloAddress to compare this one with. </param> /// <returns> Returns <c>true</c> if the two SiloAddresses are considered to match -- if they are equal or if one generation or the other is 0. </returns> internal bool Matches(SiloAddress other) { return other != null && Endpoint.Address.Equals(other.Endpoint.Address) && (Endpoint.Port == other.Endpoint.Port) && ((Generation == other.Generation) || (Generation == 0) || (other.Generation == 0)); } #region IEquatable<SiloAddress> Members /// <summary> IEquatable.Equals method override. </summary> public bool Equals(SiloAddress other) { return other != null && Endpoint.Address.Equals(other.Endpoint.Address) && (Endpoint.Port == other.Endpoint.Port) && ((Generation == other.Generation)); } #endregion // non-generic version of CompareTo is needed by some contexts. Just calls generic version. public int CompareTo(object obj) { return CompareTo((SiloAddress)obj); } public int CompareTo(SiloAddress other) { if (other == null) return 1; // Compare Generation first. It gives a cheap and fast way to compare, avoiding allocations // and is also semantically meaningfull - older silos (with smaller Generation) will appear first in the comparison order. // Only if Generations are the same, go on to compare Ports and IPAddress (which is more expansive to compare). // Alternatively, we could compare ConsistentHashCode or UniformHashCode. int comp = Generation.CompareTo(other.Generation); if (comp != 0) return comp; comp = Endpoint.Port.CompareTo(other.Endpoint.Port); if (comp != 0) return comp; return CompareIpAddresses(Endpoint.Address, other.Endpoint.Address); } // The comparions code is taken from: http://www.codeproject.com/Articles/26550/Extending-the-IPAddress-object-to-allow-relative-c // Also note that this comparison does not handle semantic equivalence of IPv4 and IPv6 addresses. // In particular, 127.0.0.1 and::1 are semanticaly the same, but not syntacticaly. // For more information refer to: http://stackoverflow.com/questions/16618810/compare-ipv4-addresses-in-ipv6-notation // and http://stackoverflow.com/questions/22187690/ip-address-class-getaddressbytes-method-putting-octets-in-odd-indices-of-the-byt // and dual stack sockets, described at https://msdn.microsoft.com/en-us/library/system.net.ipaddress.maptoipv6(v=vs.110).aspx private static int CompareIpAddresses(IPAddress one, IPAddress two) { int returnVal = 0; if (one.AddressFamily == two.AddressFamily) { byte[] b1 = one.GetAddressBytes(); byte[] b2 = two.GetAddressBytes(); for (int i = 0; i < b1.Length; i++) { if (b1[i] < b2[i]) { returnVal = -1; break; } else if (b1[i] > b2[i]) { returnVal = 1; break; } } } else { returnVal = one.AddressFamily.CompareTo(two.AddressFamily); } return returnVal; } } }
namespace Rhino.Etl.Core.Operations { using System; using System.Collections.Generic; using System.Configuration; using System.Data; using System.Data.SqlClient; using DataReaders; using Rhino.Commons; /// <summary> /// Allows to execute an operation that perform a bulk insert into a sql server database /// </summary> public abstract class SqlBulkInsertOperation : AbstractDatabaseOperation { /// <summary> /// The schema of the destination table /// </summary> private IDictionary<string, Type> _schema = new Dictionary<string, Type>(); /// <summary> /// The mapping of columns from the row to the database schema. /// Important: The column name in the database is case sensitive! /// </summary> public IDictionary<string, string> Mappings = new Dictionary<string, string>(); private IDictionary<string, Type> _inputSchema = new Dictionary<string, Type>(); private SqlBulkCopy sqlBulkCopy; private string targetTable; private int timeout; private SqlBulkCopyOptions bulkCopyOptions = SqlBulkCopyOptions.Default; /// <summary> /// Initializes a new instance of the <see cref="SqlBulkInsertOperation"/> class. /// </summary> /// <param name="connectionStringName">Name of the connection string.</param> /// <param name="targetTable">The target table.</param> protected SqlBulkInsertOperation(string connectionStringName, string targetTable) : this(connectionStringName, targetTable, 600) { } /// <summary> /// Initializes a new instance of the <see cref="SqlBulkInsertOperation"/> class. /// </summary> /// <param name="connectionStringName">Name of the connection string.</param> /// <param name="targetTable">The target table.</param> /// <param name="timeout">The timeout.</param> protected SqlBulkInsertOperation(string connectionStringName, string targetTable, int timeout) : base(connectionStringName) { Guard.Against(string.IsNullOrEmpty(targetTable), "TargetTable was not set, but it is mandatory"); this.targetTable = targetTable; this.timeout = timeout; } /// <summary>The timeout value of the bulk insert operation</summary> public virtual int Timeout { get { return timeout; } set { timeout = value; } } /// <summary>The table or view to bulk load the data into.</summary> public string TargetTable { get { return targetTable; } set { targetTable = value; } } /// <summary><c>true</c> to turn the <see cref="SqlBulkCopyOptions.TableLock"/> option on, otherwise <c>false</c>.</summary> public virtual bool LockTable { get { return IsOptionOn(SqlBulkCopyOptions.TableLock); } set { ToggleOption(SqlBulkCopyOptions.TableLock, value); } } /// <summary><c>true</c> to turn the <see cref="SqlBulkCopyOptions.KeepIdentity"/> option on, otherwise <c>false</c>.</summary> public virtual bool KeepIdentity { get { return IsOptionOn(SqlBulkCopyOptions.KeepIdentity); } set { ToggleOption(SqlBulkCopyOptions.KeepIdentity, value); } } /// <summary><c>true</c> to turn the <see cref="SqlBulkCopyOptions.KeepNulls"/> option on, otherwise <c>false</c>.</summary> public virtual bool KeepNulls { get { return IsOptionOn(SqlBulkCopyOptions.KeepNulls); } set { ToggleOption(SqlBulkCopyOptions.KeepNulls, value); } } /// <summary><c>true</c> to turn the <see cref="SqlBulkCopyOptions.CheckConstraints"/> option on, otherwise <c>false</c>.</summary> public virtual bool CheckConstraints { get { return IsOptionOn(SqlBulkCopyOptions.CheckConstraints); } set { ToggleOption(SqlBulkCopyOptions.CheckConstraints, value); } } /// <summary><c>true</c> to turn the <see cref="SqlBulkCopyOptions.FireTriggers"/> option on, otherwise <c>false</c>.</summary> public virtual bool FireTriggers { get { return IsOptionOn(SqlBulkCopyOptions.FireTriggers); } set { ToggleOption(SqlBulkCopyOptions.FireTriggers, value); } } /// <summary>Turns a <see cref="bulkCopyOptions"/> on or off depending on the value of <paramref name="on"/></summary> /// <param name="option">The <see cref="SqlBulkCopyOptions"/> to turn on or off.</param> /// <param name="on"><c>true</c> to set the <see cref="SqlBulkCopyOptions"/> <paramref name="option"/> on otherwise <c>false</c> to turn the <paramref name="option"/> off.</param> protected void ToggleOption(SqlBulkCopyOptions option, bool on) { if (on) { TurnOptionOn(option); } else { TurnOptionOff(option); } } /// <summary>Returns <c>true</c> if the <paramref name="option"/> is turned on, otherwise <c>false</c></summary> /// <param name="option">The <see cref="SqlBulkCopyOptions"/> option to test for.</param> /// <returns></returns> protected bool IsOptionOn(SqlBulkCopyOptions option) { return (bulkCopyOptions & option) == option; } /// <summary>Turns the <paramref name="option"/> on.</summary> /// <param name="option"></param> protected void TurnOptionOn(SqlBulkCopyOptions option) { bulkCopyOptions |= option; } /// <summary>Turns the <paramref name="option"/> off.</summary> /// <param name="option"></param> protected void TurnOptionOff(SqlBulkCopyOptions option) { if (IsOptionOn(option)) bulkCopyOptions ^= option; } /// <summary>The table or view's schema information.</summary> public IDictionary<string, Type> Schema { get { return _schema; } set { _schema = value; } } /// <summary> /// Prepares the mapping for use, by default, it uses the schema mapping. /// This is the preferred appraoch /// </summary> public virtual void PrepareMapping() { foreach (KeyValuePair<string, Type> pair in _schema) { Mappings[pair.Key] = pair.Key; } } /// <summary>Use the destination Schema and Mappings to create the /// operations input schema so it can build the adapter for sending /// to the WriteToServer method.</summary> public virtual void CreateInputSchema() { foreach(KeyValuePair<string, string> pair in Mappings) { _inputSchema.Add(pair.Key, _schema[pair.Value]); } } /// <summary> /// Executes this operation /// </summary> public override IEnumerable<Row> Execute(IEnumerable<Row> rows) { Guard.Against<ArgumentException>(rows == null, "SqlBulkInsertOperation cannot accept a null enumerator"); PrepareSchema(); PrepareMapping(); CreateInputSchema(); using (SqlConnection connection = (SqlConnection)Use.Connection(ConnectionStringName)) using (SqlTransaction transaction = connection.BeginTransaction()) { sqlBulkCopy = CreateSqlBulkCopy(connection, transaction); DictionaryEnumeratorDataReader adapter = new DictionaryEnumeratorDataReader(_inputSchema, rows); sqlBulkCopy.WriteToServer(adapter); if (PipelineExecuter.HasErrors) { Warn("Rolling back transaction in {0}", Name); transaction.Rollback(); Warn("Rolled back transaction in {0}", Name); } else { Debug("Committing {0}", Name); transaction.Commit(); Debug("Committed {0}", Name); } } yield break; } /// <summary> /// Prepares the schema of the target table /// </summary> protected abstract void PrepareSchema(); /// <summary> /// Creates the SQL bulk copy instance /// </summary> private SqlBulkCopy CreateSqlBulkCopy(SqlConnection connection, SqlTransaction transaction) { SqlBulkCopy copy = new SqlBulkCopy(connection, bulkCopyOptions, transaction); foreach (KeyValuePair<string, string> pair in Mappings) { copy.ColumnMappings.Add(pair.Key, pair.Value); } copy.DestinationTableName = TargetTable; copy.BulkCopyTimeout = Timeout; return copy; } } }
using System; using Lucene.Net.Attributes; using Lucene.Net.Documents; using Lucene.Net.Index; namespace Lucene.Net.Analysis { using Lucene.Net.Randomized.Generators; using Lucene.Net.Support; using NUnit.Framework; using System.IO; using AtomicReader = Lucene.Net.Index.AtomicReader; using Automaton = Lucene.Net.Util.Automaton.Automaton; using AutomatonTestUtil = Lucene.Net.Util.Automaton.AutomatonTestUtil; using BasicAutomata = Lucene.Net.Util.Automaton.BasicAutomata; using BasicOperations = Lucene.Net.Util.Automaton.BasicOperations; using BytesRef = Lucene.Net.Util.BytesRef; using CharacterRunAutomaton = Lucene.Net.Util.Automaton.CharacterRunAutomaton; using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Document = Documents.Document; using Field = Field; using Fields = Lucene.Net.Index.Fields; using FieldType = FieldType; using IOUtils = Lucene.Net.Util.IOUtils; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using RegExp = Lucene.Net.Util.Automaton.RegExp; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; using TestUtil = Lucene.Net.Util.TestUtil; [TestFixture] public class TestMockAnalyzer : BaseTokenStreamTestCase { /// <summary> /// Test a configuration that behaves a lot like WhitespaceAnalyzer </summary> [Test] public virtual void TestWhitespace() { Analyzer a = new MockAnalyzer(Random()); AssertAnalyzesTo(a, "A bc defg hiJklmn opqrstuv wxy z ", new string[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" }); AssertAnalyzesTo(a, "aba cadaba shazam", new string[] { "aba", "cadaba", "shazam" }); AssertAnalyzesTo(a, "break on whitespace", new string[] { "break", "on", "whitespace" }); } /// <summary> /// Test a configuration that behaves a lot like SimpleAnalyzer </summary> [Test] public virtual void TestSimple() { Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true); AssertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new string[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" }); AssertAnalyzesTo(a, "aba4cadaba-Shazam", new string[] { "aba", "cadaba", "shazam" }); AssertAnalyzesTo(a, "break+on/Letters", new string[] { "break", "on", "letters" }); } /// <summary> /// Test a configuration that behaves a lot like KeywordAnalyzer </summary> [Test] public virtual void TestKeyword() { Analyzer a = new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false); AssertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new string[] { "a-bc123 defg+hijklmn567opqrstuv78wxy_z " }); AssertAnalyzesTo(a, "aba4cadaba-Shazam", new string[] { "aba4cadaba-Shazam" }); AssertAnalyzesTo(a, "break+on/Nothing", new string[] { "break+on/Nothing" }); // currently though emits no tokens for empty string: maybe we can do it, // but we don't want to emit tokens infinitely... AssertAnalyzesTo(a, "", new string[0]); } // Test some regular expressions as tokenization patterns /// <summary> /// Test a configuration where each character is a term </summary> [Test] public virtual void TestSingleChar() { var single = new CharacterRunAutomaton((new RegExp(".")).ToAutomaton()); Analyzer a = new MockAnalyzer(Random(), single, false); AssertAnalyzesTo(a, "foobar", new[] { "f", "o", "o", "b", "a", "r" }, new[] { 0, 1, 2, 3, 4, 5 }, new[] { 1, 2, 3, 4, 5, 6 }); CheckRandomData(Random(), a, 100); } /// <summary> /// Test a configuration where two characters makes a term </summary> [Test] public virtual void TestTwoChars() { CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("..")).ToAutomaton()); Analyzer a = new MockAnalyzer(Random(), single, false); AssertAnalyzesTo(a, "foobar", new string[] { "fo", "ob", "ar" }, new int[] { 0, 2, 4 }, new int[] { 2, 4, 6 }); // make sure when last term is a "partial" match that End() is correct AssertTokenStreamContents(a.GetTokenStream("bogus", new StringReader("fooba")), new string[] { "fo", "ob" }, new int[] { 0, 2 }, new int[] { 2, 4 }, new int[] { 1, 1 }, new int?(5)); CheckRandomData(Random(), a, 100); } /// <summary> /// Test a configuration where three characters makes a term </summary> [Test] public virtual void TestThreeChars() { CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("...")).ToAutomaton()); Analyzer a = new MockAnalyzer(Random(), single, false); AssertAnalyzesTo(a, "foobar", new string[] { "foo", "bar" }, new int[] { 0, 3 }, new int[] { 3, 6 }); // make sure when last term is a "partial" match that End() is correct AssertTokenStreamContents(a.GetTokenStream("bogus", new StringReader("fooba")), new string[] { "foo" }, new int[] { 0 }, new int[] { 3 }, new int[] { 1 }, new int?(5)); CheckRandomData(Random(), a, 100); } /// <summary> /// Test a configuration where word starts with one uppercase </summary> [Test] public virtual void TestUppercase() { CharacterRunAutomaton single = new CharacterRunAutomaton((new RegExp("[A-Z][a-z]*")).ToAutomaton()); Analyzer a = new MockAnalyzer(Random(), single, false); AssertAnalyzesTo(a, "FooBarBAZ", new string[] { "Foo", "Bar", "B", "A", "Z" }, new int[] { 0, 3, 6, 7, 8 }, new int[] { 3, 6, 7, 8, 9 }); AssertAnalyzesTo(a, "aFooBar", new string[] { "Foo", "Bar" }, new int[] { 1, 4 }, new int[] { 4, 7 }); CheckRandomData(Random(), a, 100); } /// <summary> /// Test a configuration that behaves a lot like StopAnalyzer </summary> [Test] public virtual void TestStop() { Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); AssertAnalyzesTo(a, "the quick brown a fox", new string[] { "quick", "brown", "fox" }, new int[] { 2, 1, 2 }); } /// <summary> /// Test a configuration that behaves a lot like KeepWordFilter </summary> [Test] public virtual void TestKeep() { CharacterRunAutomaton keepWords = new CharacterRunAutomaton(BasicOperations.Complement(Automaton.Union(Arrays.AsList(BasicAutomata.MakeString("foo"), BasicAutomata.MakeString("bar"))))); Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, keepWords); AssertAnalyzesTo(a, "quick foo brown bar bar fox foo", new string[] { "foo", "bar", "bar", "foo" }, new int[] { 2, 2, 1, 2 }); } /// <summary> /// Test a configuration that behaves a lot like LengthFilter </summary> [Test] public virtual void TestLength() { CharacterRunAutomaton length5 = new CharacterRunAutomaton((new RegExp(".{5,}")).ToAutomaton()); Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true, length5); AssertAnalyzesTo(a, "ok toolong fine notfine", new string[] { "ok", "fine" }, new int[] { 1, 2 }); } /// <summary> /// Test MockTokenizer encountering a too long token </summary> [Test] public virtual void TestTooLongToken() { Analyzer whitespace = new AnalyzerAnonymousInnerClassHelper(this); AssertTokenStreamContents(whitespace.GetTokenStream("bogus", new StringReader("test 123 toolong ok ")), new string[] { "test", "123", "toolo", "ng", "ok" }, new int[] { 0, 5, 9, 14, 17 }, new int[] { 4, 8, 14, 16, 19 }, new int?(20)); AssertTokenStreamContents(whitespace.GetTokenStream("bogus", new StringReader("test 123 toolo")), new string[] { "test", "123", "toolo" }, new int[] { 0, 5, 9 }, new int[] { 4, 8, 14 }, new int?(14)); } private class AnalyzerAnonymousInnerClassHelper : Analyzer { private readonly TestMockAnalyzer OuterInstance; public AnalyzerAnonymousInnerClassHelper(TestMockAnalyzer outerInstance) { this.OuterInstance = outerInstance; } protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false, 5); return new TokenStreamComponents(t, t); } } [Test] public virtual void TestLUCENE_3042() { string testString = "t"; Analyzer analyzer = new MockAnalyzer(Random()); Exception priorException = null; TokenStream stream = analyzer.GetTokenStream("dummy", new StringReader(testString)); try { stream.Reset(); while (stream.IncrementToken()) { // consume } stream.End(); } catch (Exception e) { priorException = e; } finally { IOUtils.DisposeWhileHandlingException(priorException, stream); } AssertAnalyzesTo(analyzer, testString, new string[] { "t" }); } /// <summary> /// blast some random strings through the analyzer </summary> [Test] public virtual void TestRandomStrings() { CheckRandomData(Random(), new MockAnalyzer(Random()), AtLeast(1000)); } /// <summary> /// blast some random strings through differently configured tokenizers </summary> [Test, LongRunningTest] public virtual void TestRandomRegexps() { int iters = AtLeast(30); for (int i = 0; i < iters; i++) { CharacterRunAutomaton dfa = new CharacterRunAutomaton(AutomatonTestUtil.RandomAutomaton(Random())); bool lowercase = Random().NextBoolean(); int limit = TestUtil.NextInt(Random(), 0, 500); Analyzer a = new AnalyzerAnonymousInnerClassHelper2(this, dfa, lowercase, limit); CheckRandomData(Random(), a, 100); a.Dispose(); } } private class AnalyzerAnonymousInnerClassHelper2 : Analyzer { private readonly TestMockAnalyzer OuterInstance; private CharacterRunAutomaton Dfa; private bool Lowercase; private int Limit; public AnalyzerAnonymousInnerClassHelper2(TestMockAnalyzer outerInstance, CharacterRunAutomaton dfa, bool lowercase, int limit) { this.OuterInstance = outerInstance; this.Dfa = dfa; this.Lowercase = lowercase; this.Limit = limit; } protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { Tokenizer t = new MockTokenizer(reader, Dfa, Lowercase, Limit); return new TokenStreamComponents(t, t); } } [Test] public virtual void TestForwardOffsets() { int num = AtLeast(10000); for (int i = 0; i < num; i++) { string s = TestUtil.RandomHtmlishString(Random(), 20); StringReader reader = new StringReader(s); MockCharFilter charfilter = new MockCharFilter(reader, 2); MockAnalyzer analyzer = new MockAnalyzer(Random()); Exception priorException = null; TokenStream ts = analyzer.GetTokenStream("bogus", charfilter.m_input); try { ts.Reset(); while (ts.IncrementToken()) { ; } ts.End(); } catch (Exception e) { priorException = e; } finally { IOUtils.DisposeWhileHandlingException(priorException, ts); } } } [Test] public virtual void TestWrapReader() { // LUCENE-5153: test that wrapping an analyzer's reader is allowed Random random = Random(); Analyzer @delegate = new MockAnalyzer(random); Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper(this, @delegate.Strategy, @delegate); CheckOneTerm(a, "abc", "aabc"); } private class AnalyzerWrapperAnonymousInnerClassHelper : AnalyzerWrapper { private readonly TestMockAnalyzer OuterInstance; private Analyzer @delegate; public AnalyzerWrapperAnonymousInnerClassHelper(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, Analyzer @delegate) : base(getReuseStrategy) { this.OuterInstance = outerInstance; this.@delegate = @delegate; } protected override TextReader WrapReader(string fieldName, TextReader reader) { return new MockCharFilter(reader, 7); } protected override TokenStreamComponents WrapComponents(string fieldName, TokenStreamComponents components) { return components; } protected override Analyzer GetWrappedAnalyzer(string fieldName) { return @delegate; } } [Test] public virtual void TestChangeGaps() { // LUCENE-5324: check that it is possible to change the wrapper's gaps int positionGap = Random().Next(1000); int offsetGap = Random().Next(1000); Analyzer @delegate = new MockAnalyzer(Random()); Analyzer a = new AnalyzerWrapperAnonymousInnerClassHelper2(this, @delegate.Strategy, positionGap, offsetGap, @delegate); RandomIndexWriter writer = new RandomIndexWriter(Random(), NewDirectory(), Similarity, TimeZone); Document doc = new Document(); FieldType ft = new FieldType(); ft.IsIndexed = true; ft.IndexOptions = IndexOptions.DOCS_ONLY; ft.IsTokenized = true; ft.StoreTermVectors = true; ft.StoreTermVectorPositions = true; ft.StoreTermVectorOffsets = true; doc.Add(new Field("f", "a", ft)); doc.Add(new Field("f", "a", ft)); writer.AddDocument(doc, a); AtomicReader reader = GetOnlySegmentReader(writer.Reader); Fields fields = reader.GetTermVectors(0); Terms terms = fields.GetTerms("f"); TermsEnum te = terms.GetIterator(null); Assert.AreEqual(new BytesRef("a"), te.Next()); DocsAndPositionsEnum dpe = te.DocsAndPositions(null, null); Assert.AreEqual(0, dpe.NextDoc()); Assert.AreEqual(2, dpe.Freq); Assert.AreEqual(0, dpe.NextPosition()); Assert.AreEqual(0, dpe.StartOffset); int endOffset = dpe.EndOffset; Assert.AreEqual(1 + positionGap, dpe.NextPosition()); Assert.AreEqual(1 + endOffset + offsetGap, dpe.EndOffset); Assert.AreEqual(null, te.Next()); reader.Dispose(); writer.Dispose(); writer.w.Directory.Dispose(); } private class AnalyzerWrapperAnonymousInnerClassHelper2 : AnalyzerWrapper { private readonly TestMockAnalyzer OuterInstance; private int PositionGap; private int OffsetGap; private Analyzer @delegate; public AnalyzerWrapperAnonymousInnerClassHelper2(TestMockAnalyzer outerInstance, ReuseStrategy getReuseStrategy, int positionGap, int offsetGap, Analyzer @delegate) : base(getReuseStrategy) { this.OuterInstance = outerInstance; this.PositionGap = positionGap; this.OffsetGap = offsetGap; this.@delegate = @delegate; } protected override Analyzer GetWrappedAnalyzer(string fieldName) { return @delegate; } public override int GetPositionIncrementGap(string fieldName) { return PositionGap; } public override int GetOffsetGap(string fieldName) { return OffsetGap; } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Diagnostics; using System.Linq; using NHibernate; using NHibernate.Collection; using NHibernate.DebugHelpers; using NHibernate.Engine; using NHibernate.Loader; using NHibernate.Persister.Collection; using NHibernate.Type; using NHibernate.Util; namespace Bieb.DataAccess { public class Net4CollectionTypeFactory : DefaultCollectionTypeFactory { public override CollectionType Set<T>(string role, string propertyRef, bool embedded) { return new GenericSetType<T>(role, propertyRef); } public override CollectionType SortedSet<T>(string role, string propertyRef, bool embedded, IComparer<T> comparer) { return new GenericSortedSetType<T>(role, propertyRef, comparer); } } [Serializable] public class GenericSortedSetType<T> : GenericSetType<T> { private readonly IComparer<T> comparer; public GenericSortedSetType(string role, string propertyRef, IComparer<T> comparer) : base(role, propertyRef) { this.comparer = comparer; } public override object Instantiate(int anticipatedSize) { return new SortedSet<T>(this.comparer); } public IComparer<T> Comparer { get { return this.comparer; } } } /// <summary> /// An <see cref="IType"/> that maps an <see cref="ISet{T}"/> collection /// to the database. /// </summary> [Serializable] public class GenericSetType<T> : SetType { /// <summary> /// Initializes a new instance of a <see cref="GenericSetType{T}"/> class for /// a specific role. /// </summary> /// <param name="role">The role the persistent collection is in.</param> /// <param name="propertyRef">The name of the property in the /// owner object containing the collection ID, or <see langword="null" /> if it is /// the primary key.</param> public GenericSetType(string role, string propertyRef) : base(role, propertyRef, false) { } public override Type ReturnedClass { get { return typeof (ISet<T>); } } /// <summary> /// Instantiates a new <see cref="IPersistentCollection"/> for the set. /// </summary> /// <param name="session">The current <see cref="ISessionImplementor"/> for the set.</param> /// <param name="persister">The current <see cref="ICollectionPersister" /> for the set.</param> /// <param name="key"></param> public override IPersistentCollection Instantiate(ISessionImplementor session, ICollectionPersister persister, object key) { return new PersistentGenericSet<T>(session); } /// <summary> /// Wraps an <see cref="IList{T}"/> in a <see cref="PersistentGenericSet&lt;T&gt;"/>. /// </summary> /// <param name="session">The <see cref="ISessionImplementor"/> for the collection to be a part of.</param> /// <param name="collection">The unwrapped <see cref="IList{T}"/>.</param> /// <returns> /// An <see cref="PersistentGenericSet&lt;T&gt;"/> that wraps the non NHibernate <see cref="IList{T}"/>. /// </returns> public override IPersistentCollection Wrap(ISessionImplementor session, object collection) { var set = collection as ISet<T>; if (set == null) { var stronglyTypedCollection = collection as ICollection<T>; if (stronglyTypedCollection == null) throw new HibernateException(Role + " must be an implementation of ISet<T> or ICollection<T>"); set = new HashSet<T>(stronglyTypedCollection); } return new PersistentGenericSet<T>(session, set); } public override object Instantiate(int anticipatedSize) { return new HashSet<T>(); } protected override void Clear(object collection) { ((ISet<T>)collection).Clear(); } protected override void Add(object collection, object element) { ((ISet<T>)collection).Add((T)element); } } /// <summary> /// A persistent wrapper for an <see cref="ISet{T}"/> /// </summary> [Serializable] [DebuggerTypeProxy(typeof (CollectionProxy<>))] public class PersistentGenericSet<T> : AbstractPersistentCollection, ISet<T> { /// <summary> /// The <see cref="ISet{T}"/> that NHibernate is wrapping. /// </summary> protected ISet<T> set; /// <summary> /// A temporary list that holds the objects while the PersistentSet is being /// populated from the database. /// </summary> /// <remarks> /// This is necessary to ensure that the object being added to the PersistentSet doesn't /// have its' <c>GetHashCode()</c> and <c>Equals()</c> methods called during the load /// process. /// </remarks> [NonSerialized] private IList<T> tempList; public PersistentGenericSet() { } // needed for serialization /// <summary> /// Constructor matching super. /// Instantiates a lazy set (the underlying set is un-initialized). /// </summary> /// <param name="session">The session to which this set will belong. </param> public PersistentGenericSet(ISessionImplementor session) : base(session) { } /// <summary> /// Instantiates a non-lazy set (the underlying set is constructed /// from the incoming set reference). /// </summary> /// <param name="session">The session to which this set will belong. </param> /// <param name="original">The underlying set data. </param> public PersistentGenericSet(ISessionImplementor session, ISet<T> original) : base(session) { // Sets can be just a view of a part of another collection. // do we need to copy it to be sure it won't be changing // underneath us? // ie. this.set.addAll(set); set = original; SetInitialized(); IsDirectlyAccessible = true; } public override bool RowUpdatePossible { get { return false; } } public override bool Empty { get { return set.Count == 0; } } public bool IsEmpty { get { return ReadSize() ? CachedSize == 0 : (set.Count == 0); } } public object SyncRoot { get { return this; } } public bool IsSynchronized { get { return false; } } #region ISet<T> Members IEnumerator<T> IEnumerable<T>.GetEnumerator() { Read(); return set.GetEnumerator(); } public bool Contains(T o) { bool? exists = ReadElementExistence(o); return exists == null ? set.Contains(o) : exists.Value; } public void CopyTo(T[] array, int arrayIndex) { Read(); Array.Copy(set.ToArray(), 0, array, arrayIndex, Count); } //public bool ContainsAll(ICollection c) //{ // Read(); // return set.ContainsAll(c); //} public bool Add(T o) { bool? exists = IsOperationQueueEnabled ? ReadElementExistence(o) : null; if (!exists.HasValue) { Initialize(true); if (set.Add(o)) { Dirty(); return true; } return false; } if (exists.Value) { return false; } QueueOperation(new SimpleAddDelayedOperation(this, o)); return true; } public void UnionWith(IEnumerable<T> other) { Read(); set.UnionWith(other); } public void IntersectWith(IEnumerable<T> other) { Read(); set.IntersectWith(other); } public void ExceptWith(IEnumerable<T> other) { Read(); set.ExceptWith(other); } public void SymmetricExceptWith(IEnumerable<T> other) { Read(); set.SymmetricExceptWith(other); } public bool IsSubsetOf(IEnumerable<T> other) { Read(); return set.IsProperSupersetOf(other); } public bool IsSupersetOf(IEnumerable<T> other) { Read(); return set.IsSupersetOf(other); } public bool IsProperSupersetOf(IEnumerable<T> other) { Read(); return set.IsProperSupersetOf(other); } public bool IsProperSubsetOf(IEnumerable<T> other) { Read(); return set.IsProperSubsetOf(other); } public bool Overlaps(IEnumerable<T> other) { Read(); return set.Overlaps(other); } public bool SetEquals(IEnumerable<T> other) { Read(); return set.SetEquals(other); } public bool Remove(T o) { bool? exists = PutQueueEnabled ? ReadElementExistence(o) : null; if (!exists.HasValue) { Initialize(true); if (set.Remove(o)) { Dirty(); return true; } return false; } if (exists.Value) { QueueOperation(new SimpleRemoveDelayedOperation(this, o)); return true; } return false; } void ICollection<T>.Add(T item) { Add(item); } public void Clear() { if (ClearQueueEnabled) { QueueOperation(new ClearDelayedOperation(this)); } else { Initialize(true); if (set.Count != 0) { set.Clear(); Dirty(); } } } public int Count { get { return ReadSize() ? CachedSize : set.Count; } } public bool IsReadOnly { get { return false; } } public IEnumerator GetEnumerator() { Read(); return set.GetEnumerator(); } #endregion #region DelayedOperations #region Nested type: ClearDelayedOperation protected sealed class ClearDelayedOperation : IDelayedOperation { private readonly PersistentGenericSet<T> enclosingInstance; public ClearDelayedOperation(PersistentGenericSet<T> enclosingInstance) { this.enclosingInstance = enclosingInstance; } #region IDelayedOperation Members public object AddedInstance { get { return null; } } public object Orphan { get { throw new NotSupportedException("queued clear cannot be used with orphan delete"); } } public void Operate() { enclosingInstance.set.Clear(); } #endregion } #endregion #region Nested type: SimpleAddDelayedOperation protected sealed class SimpleAddDelayedOperation : IDelayedOperation { private readonly PersistentGenericSet<T> enclosingInstance; private readonly T value; public SimpleAddDelayedOperation(PersistentGenericSet<T> enclosingInstance, T value) { this.enclosingInstance = enclosingInstance; this.value = value; } #region IDelayedOperation Members public object AddedInstance { get { return value; } } public object Orphan { get { return null; } } public void Operate() { enclosingInstance.set.Add(value); } #endregion } #endregion #region Nested type: SimpleRemoveDelayedOperation protected sealed class SimpleRemoveDelayedOperation : IDelayedOperation { private readonly PersistentGenericSet<T> enclosingInstance; private readonly T value; public SimpleRemoveDelayedOperation(PersistentGenericSet<T> enclosingInstance, T value) { this.enclosingInstance = enclosingInstance; this.value = value; } #region IDelayedOperation Members public object AddedInstance { get { return null; } } public object Orphan { get { return value; } } public void Operate() { enclosingInstance.set.Remove(value); } #endregion } #endregion #endregion public override ICollection GetSnapshot(ICollectionPersister persister) { var entityMode = Session.EntityMode; var clonedSet = new SetSnapShot<T>(set.Count); var enumerable = from object current in set select persister.ElementType.DeepCopy(current, entityMode, persister.Factory); foreach (var copied in enumerable) { clonedSet.Add((T)copied); } return clonedSet; } public override ICollection GetOrphans(object snapshot, string entityName) { var sn = new SetSnapShot<object>((IEnumerable<object>) snapshot); if (set.Count == 0) return sn; if (((ICollection) sn).Count == 0) return sn; return GetOrphans(sn, set.ToArray(), entityName, Session); } public override bool EqualsSnapshot(ICollectionPersister persister) { var elementType = persister.ElementType; var snapshot = (ISetSnapshot<T>) GetSnapshot(); if (((ICollection) snapshot).Count != set.Count) { return false; } return !(from object obj in set let oldValue = snapshot[(T)obj] where oldValue == null || elementType.IsDirty(oldValue, obj, Session) select obj).Any(); } public override bool IsSnapshotEmpty(object snapshot) { return ((ICollection) snapshot).Count == 0; } public override void BeforeInitialize(ICollectionPersister persister, int anticipatedSize) { set = (ISet<T>) persister.CollectionType.Instantiate(anticipatedSize); } /// <summary> /// Initializes this PersistentSet from the cached values. /// </summary> /// <param name="persister">The CollectionPersister to use to reassemble the PersistentSet.</param> /// <param name="disassembled">The disassembled PersistentSet.</param> /// <param name="owner">The owner object.</param> public override void InitializeFromCache(ICollectionPersister persister, object disassembled, object owner) { var array = (object[]) disassembled; int size = array.Length; BeforeInitialize(persister, size); for (int i = 0; i < size; i++) { var element = (T) persister.ElementType.Assemble(array[i], Session, owner); if (element != null) { set.Add(element); } } SetInitialized(); } public override string ToString() { Read(); return StringHelper.CollectionToString(set.ToArray()); } public override object ReadFrom(IDataReader rs, ICollectionPersister role, ICollectionAliases descriptor, object owner) { var element = (T) role.ReadElement(rs, owner, descriptor.SuffixedElementAliases, Session); if (element != null) { tempList.Add(element); } return element; } /// <summary> /// Set up the temporary List that will be used in the EndRead() /// to fully create the set. /// </summary> public override void BeginRead() { base.BeginRead(); tempList = new List<T>(); } /// <summary> /// Takes the contents stored in the temporary list created during <c>BeginRead()</c> /// that was populated during <c>ReadFrom()</c> and write it to the underlying /// PersistentSet. /// </summary> public override bool EndRead(ICollectionPersister persister) { foreach (T item in tempList) { set.Add(item); } tempList = null; SetInitialized(); return true; } public override IEnumerable Entries(ICollectionPersister persister) { return set; } public override object Disassemble(ICollectionPersister persister) { var result = new object[set.Count]; int i = 0; foreach (object obj in set) { result[i++] = persister.ElementType.Disassemble(obj, Session, null); } return result; } public override IEnumerable GetDeletes(ICollectionPersister persister, bool indexIsFormula) { IType elementType = persister.ElementType; var sn = (ISetSnapshot<T>) GetSnapshot(); var deletes = new List<T>(((ICollection<T>) sn).Count); deletes.AddRange(sn.Where(obj => !set.Contains(obj))); deletes.AddRange(from obj in set let oldValue = sn[obj] where oldValue != null && elementType.IsDirty(obj, oldValue, Session) select oldValue); return deletes; } public override bool NeedsInserting(object entry, int i, IType elemType) { var sn = (ISetSnapshot<T>) GetSnapshot(); object oldKey = sn[(T)entry]; // note that it might be better to iterate the snapshot but this is safe, // assuming the user implements equals() properly, as required by the PersistentSet // contract! return oldKey == null || elemType.IsDirty(oldKey, entry, Session); } public override bool NeedsUpdating(object entry, int i, IType elemType) { return false; } public override object GetIndex(object entry, int i, ICollectionPersister persister) { throw new NotSupportedException("Sets don't have indexes"); } public override object GetElement(object entry) { return entry; } public override object GetSnapshotElement(object entry, int i) { throw new NotSupportedException("Sets don't support updating by element"); } public new void Read() { base.Read(); } public override bool Equals(object other) { var that = other as ISet<T>; if (that == null) { return false; } Read(); return set.SequenceEqual(that); } public override int GetHashCode() { Read(); return set.GetHashCode(); } public override bool EntryExists(object entry, int i) { return true; } public override bool IsWrapper(object collection) { return set == collection; } public void CopyTo(Array array, int index) { // NH : we really need to initialize the set ? Read(); Array.Copy(set.ToArray(), 0, array, index, Count); } #region Nested type: ISetSnapshot private interface ISetSnapshot<TCollection> : ICollection<TCollection>, ICollection { TCollection this[TCollection element] { get; } } #endregion #region Nested type: SetSnapShot [Serializable] private class SetSnapShot<TSnapshot> : ISetSnapshot<TSnapshot> { private readonly List<TSnapshot> elements; private SetSnapShot() { elements = new List<TSnapshot>(); } public SetSnapShot(int capacity) { elements = new List<TSnapshot>(capacity); } public SetSnapShot(IEnumerable<TSnapshot> collection) { elements = new List<TSnapshot>(collection); } #region ISetSnapshot<T> Members public IEnumerator<TSnapshot> GetEnumerator() { return elements.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public void Add(TSnapshot item) { elements.Add(item); } public void Clear() { throw new InvalidOperationException(); } public bool Contains(TSnapshot item) { return elements.Contains(item); } public void CopyTo(TSnapshot[] array, int arrayIndex) { elements.CopyTo(array, arrayIndex); } public bool Remove(TSnapshot item) { throw new InvalidOperationException(); } public void CopyTo(Array array, int index) { ((ICollection) elements).CopyTo(array, index); } int ICollection.Count { get { return elements.Count; } } public object SyncRoot { get { return ((ICollection) elements).SyncRoot; } } public bool IsSynchronized { get { return ((ICollection) elements).IsSynchronized; } } int ICollection<TSnapshot>.Count { get { return elements.Count; } } public bool IsReadOnly { get { return ((ICollection<TSnapshot>) elements).IsReadOnly; } } public TSnapshot this[TSnapshot element] { get { int idx = elements.IndexOf(element); if (idx >= 0) { return elements[idx]; } return default(TSnapshot); } } #endregion } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using Microsoft.CSharp.RuntimeBinder.Errors; using Microsoft.CSharp.RuntimeBinder.Syntax; namespace Microsoft.CSharp.RuntimeBinder.Semantics { internal sealed partial class ExpressionBinder { // ---------------------------------------------------------------------------- // BindImplicitConversion // ---------------------------------------------------------------------------- private sealed class ImplicitConversion { public ImplicitConversion(ExpressionBinder binder, Expr exprSrc, CType typeSrc, ExprClass typeDest, bool needsExprDest, CONVERTTYPE flags) { _binder = binder; _exprSrc = exprSrc; _typeSrc = typeSrc; _typeDest = typeDest.Type; _exprTypeDest = typeDest; _needsExprDest = needsExprDest; _flags = flags; _exprDest = null; } public Expr ExprDest { get { return _exprDest; } } private Expr _exprDest; private readonly ExpressionBinder _binder; private readonly Expr _exprSrc; private readonly CType _typeSrc; private readonly CType _typeDest; private readonly ExprClass _exprTypeDest; private readonly bool _needsExprDest; private CONVERTTYPE _flags; /* * BindImplicitConversion * * This is a complex routine with complex parameters. Generally, this should * be called through one of the helper methods that insulates you * from the complexity of the interface. This routine handles all the logic * associated with implicit conversions. * * exprSrc - the expression being converted. Can be null if only type conversion * info is being supplied. * typeSrc - type of the source * typeDest - type of the destination * exprDest - returns an expression of the src converted to the dest. If null, we * only care about whether the conversion can be attempted, not the * expression tree. * flags - flags possibly customizing the conversions allowed. E.g., can suppress * user-defined conversions. * * returns true if the conversion can be made, false if not. */ public bool Bind() { // 13.1 Implicit conversions // // The following conversions are classified as implicit conversions: // // * Identity conversions // * Implicit numeric conversions // * Implicit enumeration conversions // * Implicit reference conversions // * Boxing conversions // * Implicit type parameter conversions // * Implicit constant expression conversions // * User-defined implicit conversions // * Implicit conversions from an anonymous method expression to a compatible delegate type // * Implicit conversion from a method group to a compatible delegate type // * Conversions from the null type (11.2.7) to any nullable type // * Implicit nullable conversions // * Lifted user-defined implicit conversions // // Implicit conversions can occur in a variety of situations, including function member invocations // (14.4.3), cast expressions (14.6.6), and assignments (14.14). // Can't convert to or from the error type. if (_typeSrc == null || _typeDest == null || _typeDest.IsNeverSameType()) { return false; } Debug.Assert(_typeSrc != null && _typeDest != null); // types must be supplied. Debug.Assert(_exprSrc == null || _typeSrc == _exprSrc.Type); // type of source should be correct if source supplied Debug.Assert(!_needsExprDest || _exprSrc != null); // need source expr to create dest expr switch (_typeDest.GetTypeKind()) { case TypeKind.TK_ErrorType: Debug.Assert(((ErrorType)_typeDest).HasParent()); if (_typeSrc != _typeDest) { return false; } if (_needsExprDest) { _exprDest = _exprSrc; } return true; case TypeKind.TK_NullType: // Can only convert to the null type if src is null. if (!(_typeSrc is NullType)) { return false; } if (_needsExprDest) { _exprDest = _exprSrc; } return true; case TypeKind.TK_MethodGroupType: VSFAIL("Something is wrong with Type.IsNeverSameType()"); return false; case TypeKind.TK_ArgumentListType: return _typeSrc == _typeDest; case TypeKind.TK_VoidType: return false; default: break; } if (_typeSrc is ErrorType) { Debug.Assert(!(_typeDest is ErrorType)); return false; } // 13.1.1 Identity conversion // // An identity conversion converts from any type to the same type. This conversion exists only // such that an entity that already has a required type can be said to be convertible to that type. if (_typeSrc == _typeDest && ((_flags & CONVERTTYPE.ISEXPLICIT) == 0 || (!_typeSrc.isPredefType(PredefinedType.PT_FLOAT) && !_typeSrc.isPredefType(PredefinedType.PT_DOUBLE)))) { if (_needsExprDest) { _exprDest = _exprSrc; } return true; } if (_typeDest is NullableType nubDest) { return BindNubConversion(nubDest); } if (_typeSrc is NullableType nubSrc) { return bindImplicitConversionFromNullable(nubSrc); } if ((_flags & CONVERTTYPE.ISEXPLICIT) != 0) { _flags |= CONVERTTYPE.NOUDC; } // Get the fundamental types of destination. FUNDTYPE ftDest = _typeDest.fundType(); Debug.Assert(ftDest != FUNDTYPE.FT_NONE || _typeDest is ParameterModifierType); switch (_typeSrc.GetTypeKind()) { default: VSFAIL("Bad type symbol kind"); break; case TypeKind.TK_MethodGroupType: if (_exprSrc is ExprMemberGroup memGrp) { ExprCall outExpr; bool retVal = _binder.BindGrpConversion(memGrp, _typeDest, _needsExprDest, out outExpr, false); _exprDest = outExpr; return retVal; } return false; case TypeKind.TK_VoidType: case TypeKind.TK_ErrorType: case TypeKind.TK_ParameterModifierType: case TypeKind.TK_ArgumentListType: return false; case TypeKind.TK_NullType: if (bindImplicitConversionFromNull()) { return true; } // If not, try user defined implicit conversions. break; case TypeKind.TK_ArrayType: if (bindImplicitConversionFromArray()) { return true; } // If not, try user defined implicit conversions. break; case TypeKind.TK_PointerType: if (bindImplicitConversionFromPointer()) { return true; } // If not, try user defined implicit conversions. break; case TypeKind.TK_TypeParameterType: if (bindImplicitConversionFromTypeVar(_typeSrc as TypeParameterType)) { return true; } // If not, try user defined implicit conversions. break; case TypeKind.TK_AggregateType: if (bindImplicitConversionFromAgg(_typeSrc as AggregateType)) { return true; } // If not, try user defined implicit conversions. break; } // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // RUNTIME BINDER ONLY CHANGE // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // // Every incoming dynamic operand should be implicitly convertible // to any type that it is an instance of. object srcRuntimeObject = _exprSrc?.RuntimeObject; if (srcRuntimeObject != null && _typeDest.AssociatedSystemType.IsInstanceOfType(srcRuntimeObject) && _binder.GetSemanticChecker().CheckTypeAccess(_typeDest, _binder.Context.ContextForMemberLookup)) { if (_needsExprDest) { _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, _exprSrc.Flags & EXPRFLAG.EXF_CANTBENULL); } return true; } // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // END RUNTIME BINDER ONLY CHANGE // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // 13.1.8 User-defined implicit conversions // // A user-defined implicit conversion consists of an optional standard implicit conversion, // followed by execution of a user-defined implicit conversion operator, followed by another // optional standard implicit conversion. The exact rules for evaluating user-defined // conversions are described in 13.4.3. if (0 == (_flags & CONVERTTYPE.NOUDC)) { return _binder.bindUserDefinedConversion(_exprSrc, _typeSrc, _typeDest, _needsExprDest, out _exprDest, true); } // No conversion was found. return false; } /*************************************************************************************************** Called by BindImplicitConversion when the destination type is Nullable<T>. The following conversions are handled by this method: * For S in { object, ValueType, interfaces implemented by underlying type} there is an explicit unboxing conversion S => T? * System.Enum => T? there is an unboxing conversion if T is an enum type * null => T? implemented as default(T?) * Implicit T?* => T?+ implemented by either wrapping or calling GetValueOrDefault the appropriate number of times. * If imp/exp S => T then imp/exp S => T?+ implemented by converting to T then wrapping the appropriate number of times. * If imp/exp S => T then imp/exp S?+ => T?+ implemented by calling GetValueOrDefault (m-1) times then calling HasValue, producing a null if it returns false, otherwise calling Value, converting to T then wrapping the appropriate number of times. The 3 rules above can be summarized with the following recursive rules: * If imp/exp S => T? then imp/exp S? => T? implemented as qs.HasValue ? (T?)(qs.Value) : default(T?) * If imp/exp S => T then imp/exp S => T? implemented as new T?((T)s) This method also handles calling bindUserDefinedConverion. This method does NOT handle the following conversions: * Implicit boxing conversion from S? to { object, ValueType, Enum, ifaces implemented by S }. (Handled by BindImplicitConversion.) * If imp/exp S => T then explicit S?+ => T implemented by calling Value the appropriate number of times. (Handled by BindExplicitConversion.) The recursive equivalent is: * If imp/exp S => T and T is not nullable then explicit S? => T implemented as qs.Value Some nullable conversion are NOT standard conversions. In particular, if S => T is implicit then S? => T is not standard. Similarly if S => T is not implicit then S => T? is not standard. ***************************************************************************************************/ private bool BindNubConversion(NullableType nubDst) { // This code assumes that STANDARD and ISEXPLICIT are never both set. // bindUserDefinedConversion should ensure this! Debug.Assert(0 != (~_flags & (CONVERTTYPE.STANDARD | CONVERTTYPE.ISEXPLICIT))); Debug.Assert(_exprSrc == null || _exprSrc.Type == _typeSrc); Debug.Assert(!_needsExprDest || _exprSrc != null); Debug.Assert(_typeSrc != nubDst); // BindImplicitConversion should have taken care of this already. AggregateType atsDst = nubDst.GetAts(); // Check for the unboxing conversion. This takes precedence over the wrapping conversions. if (GetSymbolLoader().HasBaseConversion(nubDst.GetUnderlyingType(), _typeSrc) && !CConversions.FWrappingConv(_typeSrc, nubDst)) { // These should be different! Fix the caller if typeSrc is an AggregateType of Nullable. Debug.Assert(atsDst != _typeSrc); // typeSrc is a base type of the destination nullable type so there is an explicit // unboxing conversion. if (0 == (_flags & CONVERTTYPE.ISEXPLICIT)) { return false; } if (_needsExprDest) { _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_UNBOX); } return true; } bool dstWasNullable; bool srcWasNullable; CType typeDstBase = nubDst.StripNubs(out dstWasNullable); ExprClass exprTypeDstBase = GetExprFactory().CreateClass(typeDstBase); CType typeSrcBase = _typeSrc.StripNubs(out srcWasNullable); ConversionFunc pfn = (_flags & CONVERTTYPE.ISEXPLICIT) != 0 ? (ConversionFunc)_binder.BindExplicitConversion : (ConversionFunc)_binder.BindImplicitConversion; if (!srcWasNullable) { Debug.Assert(_typeSrc == typeSrcBase); // The null type can be implicitly converted to T? as the default value. if (_typeSrc is NullType) { // If we have the constant null, generate it as a default value of T?. If we have // some crazy expression which has been determined to be always null, like (null??null) // keep it in its expression form and transform it in the nullable rewrite pass. if (_needsExprDest) { if (_exprSrc.isCONSTANT_OK()) { _exprDest = GetExprFactory().CreateZeroInit(nubDst); } else { _exprDest = GetExprFactory().CreateCast(_typeDest, _exprSrc); } } return true; } Expr exprTmp = _exprSrc; // If there is an implicit/explicit S => T then there is an implicit/explicit S => T? if (_typeSrc == typeDstBase || pfn(_exprSrc, _typeSrc, exprTypeDstBase, nubDst, _needsExprDest, out exprTmp, _flags | CONVERTTYPE.NOUDC)) { if (_needsExprDest) { ExprUserDefinedConversion exprUDC = exprTmp as ExprUserDefinedConversion; if (exprUDC != null) { exprTmp = exprUDC.UserDefinedCall; } if (dstWasNullable) { ExprCall call = _binder.BindNubNew(exprTmp); exprTmp = call; call.NullableCallLiftKind = NullableCallLiftKind.NullableConversionConstructor; } if (exprUDC != null) { exprUDC.UserDefinedCall = exprTmp; exprTmp = exprUDC; } Debug.Assert(exprTmp.Type == nubDst); _exprDest = exprTmp; } return true; } // No builtin conversion. Maybe there is a user defined conversion.... return 0 == (_flags & CONVERTTYPE.NOUDC) && _binder.bindUserDefinedConversion(_exprSrc, _typeSrc, nubDst, _needsExprDest, out _exprDest, 0 == (_flags & CONVERTTYPE.ISEXPLICIT)); } // Both are Nullable so there is only a conversion if there is a conversion between the base types. // That is, if there is an implicit/explicit S => T then there is an implicit/explicit S?+ => T?+. if (typeSrcBase != typeDstBase && !pfn(null, typeSrcBase, exprTypeDstBase, nubDst, false, out _exprDest, _flags | CONVERTTYPE.NOUDC)) { // No builtin conversion. Maybe there is a user defined conversion.... return 0 == (_flags & CONVERTTYPE.NOUDC) && _binder.bindUserDefinedConversion(_exprSrc, _typeSrc, nubDst, _needsExprDest, out _exprDest, 0 == (_flags & CONVERTTYPE.ISEXPLICIT)); } if (_needsExprDest) { MethWithInst mwi = new MethWithInst(null, null); ExprMemberGroup pMemGroup = GetExprFactory().CreateMemGroup(null, mwi); ExprCall exprDst = GetExprFactory().CreateCall(0, nubDst, _exprSrc, pMemGroup, null); // Here we want to first check whether or not the conversions work on the base types. Expr arg1 = _binder.mustCast(_exprSrc, typeSrcBase); ExprClass arg2 = GetExprFactory().CreateClass(typeDstBase); bool convertible; if (0 != (_flags & CONVERTTYPE.ISEXPLICIT)) { convertible = _binder.BindExplicitConversion(arg1, arg1.Type, arg2, typeDstBase, out arg1, _flags | CONVERTTYPE.NOUDC); } else { convertible = _binder.BindImplicitConversion(arg1, arg1.Type, arg2, typeDstBase, out arg1, _flags | CONVERTTYPE.NOUDC); } if (!convertible) { VSFAIL("bind(Im|Ex)plicitConversion failed unexpectedly"); return false; } exprDst.CastOfNonLiftedResultToLiftedType = _binder.mustCast(arg1, nubDst, 0); exprDst.NullableCallLiftKind = NullableCallLiftKind.NullableConversion; exprDst.PConversions = exprDst.CastOfNonLiftedResultToLiftedType; _exprDest = exprDst; } return true; } private bool bindImplicitConversionFromNull() { // null type can be implicitly converted to any reference type or pointer type or type // variable with reference-type constraint. FUNDTYPE ftDest = _typeDest.fundType(); if (ftDest != FUNDTYPE.FT_REF && ftDest != FUNDTYPE.FT_PTR && (ftDest != FUNDTYPE.FT_VAR || !((TypeParameterType)_typeDest).IsReferenceType()) && // null is convertible to System.Nullable<T>. !_typeDest.isPredefType(PredefinedType.PT_G_OPTIONAL)) { return false; } if (_needsExprDest) { // If the conversion argument is a constant null then return a ZEROINIT. // Otherwise, bind this as a cast to the destination type. In a later // rewrite pass we will rewrite the cast as SEQ(side effects, ZEROINIT). if (_exprSrc.isCONSTANT_OK()) { _exprDest = GetExprFactory().CreateZeroInit(_typeDest); } else { _exprDest = GetExprFactory().CreateCast(_typeDest, _exprSrc); } } return true; } private bool bindImplicitConversionFromNullable(NullableType nubSrc) { // We can convert T? using a boxing conversion, we can convert it to ValueType, and // we can convert it to any interface implemented by T. // // 13.1.5 Boxing Conversions // // A nullable-type has a boxing conversion to the same set of types to which the nullable-type's // underlying type has boxing conversions. A boxing conversion applied to a value of a nullable-type // proceeds as follows: // // * If the HasValue property of the nullable value evaluates to false, then the result of the // boxing conversion is the null reference of the appropriate type. // // Otherwise, the result is obtained by boxing the result of evaluating the Value property on // the nullable value. AggregateType atsNub = nubSrc.GetAts(); if (atsNub == _typeDest) { if (_needsExprDest) { _exprDest = _exprSrc; } return true; } if (GetSymbolLoader().HasBaseConversion(nubSrc.GetUnderlyingType(), _typeDest) && !CConversions.FUnwrappingConv(nubSrc, _typeDest)) { if (_needsExprDest) { _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_BOX); if (!_typeDest.isPredefType(PredefinedType.PT_OBJECT)) { // The base type of a nullable is always a non-nullable value type, // therefore so is typeDest unless typeDest is PT_OBJECT. In this case the conversion // needs to be unboxed. We only need this if we actually will use the result. _binder.bindSimpleCast(_exprDest, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_FORCE_UNBOX); } } return true; } return 0 == (_flags & CONVERTTYPE.NOUDC) && _binder.bindUserDefinedConversion(_exprSrc, nubSrc, _typeDest, _needsExprDest, out _exprDest, true); } private bool bindImplicitConversionFromArray() { // 13.1.4 // // The implicit reference conversions are: // // * From an array-type S with an element type SE to an array-type T with an element // type TE, provided all of the following are true: // * S and T differ only in element type. In other words, S and T have the same number of dimensions. // * An implicit reference conversion exists from SE to TE. // * From a one-dimensional array-type S[] to System.Collections.Generic.IList<S>, // System.Collections.Generic.IReadOnlyList<S> and their base interfaces // * From a one-dimensional array-type S[] to System.Collections.Generic.IList<T>, System.Collections.Generic.IReadOnlyList<T> // and their base interfaces, provided there is an implicit reference conversion from S to T. // * From any array-type to System.Array. // * From any array-type to any interface implemented by System.Array. if (!GetSymbolLoader().HasBaseConversion(_typeSrc, _typeDest)) { return false; } EXPRFLAG grfex = 0; // The above if checks for dest==Array, object or an interface the array implements, // including IList<T>, ICollection<T>, IEnumerable<T>, IReadOnlyList<T>, IReadOnlyCollection<T> // and the non-generic versions. if ((_typeDest is ArrayType || (_typeDest is AggregateType aggDest && aggDest.isInterfaceType() && aggDest.GetTypeArgsAll().Count == 1 && ((aggDest.GetTypeArgsAll()[0] != ((ArrayType)_typeSrc).GetElementType()) || 0 != (_flags & CONVERTTYPE.FORCECAST)))) && (0 != (_flags & CONVERTTYPE.FORCECAST) || TypeManager.TypeContainsTyVars(_typeSrc, null) || TypeManager.TypeContainsTyVars(_typeDest, null))) { grfex = EXPRFLAG.EXF_REFCHECK; } if (_needsExprDest) { _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, grfex); } return true; } private bool bindImplicitConversionFromPointer() { // 27.4 Pointer conversions // // In an unsafe context, the set of available implicit conversions (13.1) is extended to include // the following implicit pointer conversions: // // * From any pointer-type to the type void*. if (_typeDest is PointerType ptDest && ptDest.GetReferentType() == _binder.getVoidType()) { if (_needsExprDest) _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest); return true; } return false; } private bool bindImplicitConversionFromAgg(AggregateType aggTypeSrc) { // GENERICS: The case for constructed types is very similar to types with // no parameters. The parameters are irrelevant for most of the conversions // below. They could be relevant if we had user-defined conversions on // generic types. AggregateSymbol aggSrc = aggTypeSrc.getAggregate(); if (aggSrc.IsEnum()) { return bindImplicitConversionFromEnum(aggTypeSrc); } if (_typeDest.isEnumType()) { if (bindImplicitConversionToEnum(aggTypeSrc)) { return true; } // Even though enum is sealed, a class can derive from enum in LAF scenarios -- // continue testing for derived to base conversions below. } else if (aggSrc.getThisType().isSimpleType() && _typeDest.isSimpleType()) { if (bindImplicitConversionBetweenSimpleTypes(aggTypeSrc)) { return true; } // No simple conversion -- continue testing for derived to base conversions below. } return bindImplicitConversionToBase(aggTypeSrc); } private bool bindImplicitConversionToBase(AggregateType pSource) { // 13.1.4 Implicit reference conversions // // * From any reference-type to object. // * From any class-type S to any class-type T, provided S is derived from T. // * From any class-type S to any interface-type T, provided S implements T. // * From any interface-type S to any interface-type T, provided S is derived from T. // * From any delegate-type to System.Delegate. // * From any delegate-type to System.ICloneable. if (!(_typeDest is AggregateType) || !GetSymbolLoader().HasBaseConversion(pSource, _typeDest)) { return false; } EXPRFLAG flags = 0x00; if (pSource.getAggregate().IsStruct() && _typeDest.fundType() == FUNDTYPE.FT_REF) { flags = EXPRFLAG.EXF_BOX | EXPRFLAG.EXF_CANTBENULL; } else if (_exprSrc != null) { flags = _exprSrc.Flags & EXPRFLAG.EXF_CANTBENULL; } if (_needsExprDest) _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, flags); return true; } private bool bindImplicitConversionFromEnum(AggregateType aggTypeSrc) { // 13.1.5 Boxing conversions // // A boxing conversion permits any non-nullable-value-type to be implicitly converted to the type // object or System.ValueType or to any interface-type implemented by the value-type, and any enum // type to be implicitly converted to System.Enum as well. Boxing a value of a // non-nullable-value-type consists of allocating an object instance and copying the value-type // value into that instance. An enum can be boxed to the type System.Enum, since that is the direct // base class for all enums (21.4). A struct or enum can be boxed to the type System.ValueType, // since that is the direct base class for all structs (18.3.2) and a base class for all enums. if (_typeDest is AggregateType aggDest && GetSymbolLoader().HasBaseConversion(aggTypeSrc, aggDest)) { if (_needsExprDest) _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_BOX | EXPRFLAG.EXF_CANTBENULL); return true; } return false; } private bool bindImplicitConversionToEnum(AggregateType aggTypeSrc) { // The spec states: // ***************** // 13.1.3 Implicit enumeration conversions // // An implicit enumeration conversion permits the decimal-integer-literal 0 to be converted to any // enum-type. // ***************** // However, we actually allow any constant zero, not just the integer literal zero, to be converted // to enum. The reason for this is for backwards compatibility with a premature optimization // that used to be in the binding layer. We would optimize away expressions such as 0 | blah to be // just 0, but not erase the "is literal" bit. This meant that expression such as 0 | 0 | E.X // would succeed rather than correctly producing an error pointing out that 0 | 0 is not a literal // zero and therefore does not convert to any enum. // // We have removed the premature optimization but want old code to continue to compile. Rather than // try to emulate the somewhat complex behaviour of the previous optimizer, it is easier to simply // say that any compile time constant zero is convertible to any enum. This means unfortunately // expressions such as (7-7) * 12 are convertible to enum, but frankly, that's better than having // some terribly complex rule about what constitutes a legal zero and what doesn't. // Note: Don't use GetConst here since the conversion only applies to bona-fide compile time constants. if ( aggTypeSrc.getAggregate().GetPredefType() != PredefinedType.PT_BOOL && _exprSrc != null && _exprSrc.IsZero() && _exprSrc.Type.isNumericType() && /*(exprSrc.flags & EXF_LITERALCONST) &&*/ 0 == (_flags & CONVERTTYPE.STANDARD)) { // NOTE: This allows conversions from uint, long, ulong, float, double, and hexadecimal int // NOTE: This is for backwards compatibility with Everett // This is another place where we lose Expr fidelity. We shouldn't fold this // into a constant here - we should move this to a later pass. if (_needsExprDest) { _exprDest = GetExprFactory().CreateConstant(_typeDest, ConstVal.GetDefaultValue(_typeDest.constValKind())); } return true; } return false; } private bool bindImplicitConversionBetweenSimpleTypes(AggregateType aggTypeSrc) { AggregateSymbol aggSrc = aggTypeSrc.getAggregate(); Debug.Assert(aggSrc.getThisType().isSimpleType()); Debug.Assert(_typeDest.isSimpleType()); Debug.Assert(aggSrc.IsPredefined() && _typeDest.isPredefined()); PredefinedType ptSrc = aggSrc.GetPredefType(); PredefinedType ptDest = _typeDest.getPredefType(); ConvKind convertKind; bool fConstShrinkCast = false; Debug.Assert((int)ptSrc < NUM_SIMPLE_TYPES && (int)ptDest < NUM_SIMPLE_TYPES); // 13.1.7 Implicit constant expression conversions // // An implicit constant expression conversion permits the following conversions: // * A constant-expression (14.16) of type int can be converted to type sbyte, byte, short, // ushort, uint, or ulong, provided the value of the constant-expression is within the range // of the destination type. // * A constant-expression of type long can be converted to type ulong, provided the value of // the constant-expression is not negative. // Note: Don't use GetConst here since the conversion only applies to bona-fide compile time constants. if (_exprSrc is ExprConstant constant && _exprSrc.IsOK && ((ptSrc == PredefinedType.PT_INT && ptDest != PredefinedType.PT_BOOL && ptDest != PredefinedType.PT_CHAR) || (ptSrc == PredefinedType.PT_LONG && ptDest == PredefinedType.PT_ULONG)) && isConstantInRange(constant, _typeDest)) { // Special case (CLR 6.1.6): if integral constant is in range, the conversion is a legal implicit conversion. convertKind = ConvKind.Implicit; fConstShrinkCast = _needsExprDest && (GetConvKind(ptSrc, ptDest) != ConvKind.Implicit); } else if (ptSrc == ptDest) { // Special case: precision limiting casts to float or double Debug.Assert(ptSrc == PredefinedType.PT_FLOAT || ptSrc == PredefinedType.PT_DOUBLE); Debug.Assert(0 != (_flags & CONVERTTYPE.ISEXPLICIT)); convertKind = ConvKind.Implicit; } else { convertKind = GetConvKind(ptSrc, ptDest); Debug.Assert(convertKind != ConvKind.Identity); // identity conversion should have been handled at first. } if (convertKind != ConvKind.Implicit) { return false; } // An implicit conversion exists. Do the conversion. if (_exprSrc.GetConst() != null) { // Fold the constant cast if possible. ConstCastResult result = _binder.bindConstantCast(_exprSrc, _exprTypeDest, _needsExprDest, out _exprDest, false); if (result == ConstCastResult.Success) { return true; // else, don't fold and use a regular cast, below. } } if (isUserDefinedConversion(ptSrc, ptDest)) { if (!_needsExprDest) { return true; } // According the language, this is a standard conversion, but it is implemented // through a user-defined conversion. Because it's a standard conversion, we don't // test the NOUDC flag here. return _binder.bindUserDefinedConversion(_exprSrc, aggTypeSrc, _typeDest, _needsExprDest, out _exprDest, true); } if (_needsExprDest) _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest); return true; } private bool bindImplicitConversionFromTypeVar(TypeParameterType tyVarSrc) { // 13.1.4 // // For a type-parameter T that is known to be a reference type (25.7), the following implicit // reference conversions exist: // // * From T to its effective base class C, from T to any base class of C, and from T to any // interface implemented by C. // * From T to an interface-type I in T's effective interface set and from T to any base // interface of I. // * From T to a type parameter U provided that T depends on U (25.7). [Note: Since T is known // to be a reference type, within the scope of T, the run-time type of U will always be a // reference type, even if U is not known to be a reference type at compile-time.] // * From the null type (11.2.7) to T. // // 13.1.5 // // For a type-parameter T that is not known to be a reference type (25.7), the following conversions // involving T are considered to be boxing conversions at compile-time. At run-time, if T is a value // type, the conversion is executed as a boxing conversion. At run-time, if T is a reference type, // the conversion is executed as an implicit reference conversion or identity conversion. // // * From T to its effective base class C, from T to any base class of C, and from T to any // interface implemented by C. [Note: C will be one of the types System.Object, System.ValueType, // or System.Enum (otherwise T would be known to be a reference type and 13.1.4 would apply // instead of this clause).] // * From T to an interface-type I in T's effective interface set and from T to any base // interface of I. // // 13.1.6 Implicit type parameter conversions // // This clause details implicit conversions involving type parameters that are not classified as // implicit reference conversions or implicit boxing conversions. // // For a type-parameter T that is not known to be a reference type, there is an implicit conversion // from T to a type parameter U provided T depends on U. At run-time, if T is a value type and U is // a reference type, the conversion is executed as a boxing conversion. At run-time, if both T and U // are value types, then T and U are necessarily the same type and no conversion is performed. At // run-time, if T is a reference type, then U is necessarily also a reference type and the conversion // is executed as an implicit reference conversion or identity conversion (25.7). CType typeTmp = tyVarSrc.GetEffectiveBaseClass(); TypeArray bnds = tyVarSrc.GetBounds(); int itype = -1; for (; ;) { if (_binder.canConvert(typeTmp, _typeDest, _flags | CONVERTTYPE.NOUDC)) { if (!_needsExprDest) { return true; } if (_typeDest is TypeParameterType) { // For a type var destination we need to cast to object then to the other type var. Expr exprT; ExprClass exprObj = GetExprFactory().CreateClass(_binder.GetPredefindType(PredefinedType.PT_OBJECT)); _binder.bindSimpleCast(_exprSrc, exprObj, out exprT, EXPRFLAG.EXF_FORCE_BOX); _binder.bindSimpleCast(exprT, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_FORCE_UNBOX); } else { _binder.bindSimpleCast(_exprSrc, _exprTypeDest, out _exprDest, EXPRFLAG.EXF_FORCE_BOX); } return true; } do { if (++itype >= bnds.Count) { return false; } typeTmp = bnds[itype]; } while (!typeTmp.isInterfaceType() && !(typeTmp is TypeParameterType)); } } private SymbolLoader GetSymbolLoader() { return _binder.GetSymbolLoader(); } private ExprFactory GetExprFactory() { return _binder.GetExprFactory(); } } } }
// Copyright (c) Alexandre Mutel. All rights reserved. // Licensed under the BSD license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using System.Security.Policy; using System.Text; using System.Threading.Tasks; using LibGit2Sharp; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; namespace GitRocketFilter { /// <summary> /// Main class for git rocket filter. /// </summary> public class RocketFilterApp : IDisposable { private const string MethodCommitFilterName = "CommitFilterMethod"; private delegate void CommitFilteringCallbackDelegate(Repository repo, SimpleCommit commit); private delegate void PathPatternCallbackDelegate(Repository repo, string pattern, SimpleCommit commit, ref SimpleEntry entry); private readonly Dictionary<Commit, SimpleCommit> simpleCommits = new Dictionary<Commit, SimpleCommit>(); private readonly Dictionary<string, Commit> commitMap; private Repository repo; private Commit lastCommit; private readonly Dictionary<TreeEntry, SimpleEntry.EntryValue> entriesToKeep = new Dictionary<TreeEntry, SimpleEntry.EntryValue>(ObjectReferenceEqualityComparer<TreeEntry>.Default); private readonly List<Task> pendingTasks = new List<Task>(); private PathPatterns keepPathPatterns; private PathPatterns removePathPatterns; private readonly string tempRocketPath; private bool hasTreeFiltering; private bool hasTreeFilteringWithScripts; private bool hasCommitFiltering; private CommitFilteringCallbackDelegate commitFilteringCallback; private RevSpec revisionSpec; private string branchRef; private Stopwatch clock; private readonly HashSet<string> commitsDiscarded = new HashSet<string>(); /// <summary> /// Initializes a new instance of the <see cref="RocketFilterApp"/> class. /// </summary> public RocketFilterApp() { commitMap = new Dictionary<string, Commit>(); tempRocketPath = Path.Combine(Path.GetTempPath(), ".gitRocketFilter"); Repository.Init(tempRocketPath, true); } /// <summary> /// Gets or sets the repository path. /// </summary> /// <value>The repository path.</value> public string RepositoryPath { get; set; } /// <summary> /// Gets or sets the keep patterns. /// </summary> /// <value>The keep patterns.</value> public string KeepPatterns { get; set; } /// <summary> /// Gets or sets the remove patterns. /// </summary> /// <value>The remove patterns.</value> public string RemovePatterns { get; set; } /// <summary> /// Gets or sets the name of the branch. /// </summary> /// <value>The name of the branch.</value> public string BranchName { get; set; } /// <summary> /// Gets or sets a value indicating whether if the branch already exist, it can be overridden. /// </summary> /// <value><c>true</c> if the branch already exist, it can be overridden; otherwise, <c>false</c>.</value> public bool BranchOverwrite { get; set; } /// <summary> /// Gets or sets the commit filter code. /// </summary> /// <value>The commit filter code.</value> public string CommitFilter { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="RocketFilterApp"/> is outputting verbose log. /// </summary> /// <value><c>true</c> if is outputting verbose log; otherwise, <c>false</c>.</value> public bool Verbose { get; set; } /// <summary> /// Gets or sets the revision range to work on. (Default is from first commit to HEAD). /// </summary> /// <value>The revision range.</value> public string RevisionRange { get; set; } /// <summary> /// Gets or sets a value indicating whether to detach first commits from their parents. /// </summary> /// <value><c>true</c> if to detach first commits from their parents; otherwise, <c>false</c>.</value> public bool DetachFirstCommits { get; set; } /// <summary> /// Gets or sets a value indicating whether to include submodule git link in tree-filtering. /// </summary> /// <value><c>true</c> to include submodule git link in tree-filtering; otherwise, <c>false</c>.</value> public bool IncludeLinks { get; set; } /// <summary> /// Gets or sets a value indicating whether to disable multi-threaded tasks. /// </summary> /// <value><c>true</c> to disable multi-threaded tasks, <c>false</c>.</value> public bool DisableTasks { get; set; } /// <summary> /// Gets or sets a value indicating whether to preserve empty merge commits. /// </summary> /// <value><c>true</c> to preserve empty merge commits; otherwise, <c>false</c>.</value> public bool PreserveMergeCommits { get; set; } /// <summary> /// Gets or sets the output writer. /// </summary> /// <value>The output writer.</value> public TextWriter OutputWriter { get; set; } /// <summary> /// Runs the filtering. /// </summary> public void Run() { clock = Stopwatch.StartNew(); if (OutputWriter == null) { OutputWriter = Console.Out; } // Validate parameters ValidateParameters(); // Prepare filtering PrepareFiltering(); // Compile any scripts (from commit or tree filtering) CompileScripts(); // Process all commits ProcessCommits(); // Output the branch refs WriteBranchRefs(); } /// <summary> /// This method Validates the parameters. /// </summary> /// <exception cref="GitRocketFilter.RocketException"> /// No valid git repository path found at [{0}] /// or /// Branch name is required and cannot be null /// or /// The branch [{0}] already exist. Cannot overwrite without force option /// or /// Invalid revspec [{0}]. Reason: {1} /// </exception> private void ValidateParameters() { if (!Repository.IsValid(RepositoryPath)) { throw new RocketException("No valid git repository path found at [{0}]", RepositoryPath); } repo = new Repository(RepositoryPath); if (string.IsNullOrWhiteSpace(BranchName)) { throw new RocketException("Branch name is required and cannot be null"); } branchRef = "refs/heads/" + BranchName; if (repo.Refs[branchRef] != null && !BranchOverwrite) { throw new RocketException("The branch [{0}] already exist. Cannot overwrite without force option", BranchName); } // Validate the revision range if (!string.IsNullOrWhiteSpace(RevisionRange)) { string errorMessage = null; try { revisionSpec = RevSpec.Parse(repo, RevisionRange); if (revisionSpec.Type == RevSpecType.MergeBase) { errorMessage = "Merge base revspec are not supported"; } } catch (LibGit2SharpException libGitException) { errorMessage = libGitException.Message; } if (errorMessage != null) { throw new RocketException("Invalid revspec [{0}]. Reason: {1}", RevisionRange, errorMessage); } } } /// <summary> /// Prepares the filtering by processing keep and remove entries. /// </summary> /// <exception cref="GitRocketFilter.RocketException">Expecting at least a commit or tree filtering option</exception> private void PrepareFiltering() { // Prepare tree filtering keepPathPatterns = ParseTreeFilteringPathPatterns(KeepPatterns, "--keep"); removePathPatterns = ParseTreeFilteringPathPatterns(RemovePatterns, "--remove"); hasTreeFiltering = keepPathPatterns.Count > 0 || removePathPatterns.Count > 0; hasTreeFilteringWithScripts = keepPathPatterns.Any(pattern => !string.IsNullOrWhiteSpace(pattern.ScriptText)); hasTreeFilteringWithScripts = hasTreeFilteringWithScripts || removePathPatterns.Any(pattern => !string.IsNullOrWhiteSpace(pattern.ScriptText)); hasCommitFiltering = !string.IsNullOrWhiteSpace(CommitFilter); // If nothing to do, we are missing a parameter (either commit or tree filtering) if (!hasCommitFiltering && !hasTreeFiltering) { throw new RocketException("Expecting at least a commit or tree filtering option"); } } /// <summary> /// Processes all commits. /// </summary> private void ProcessCommits() { // We are working only in a topological-reverse order (from parent commits to child) var commitFilter = new CommitFilter() { FirstParentOnly = false, SortBy = CommitSortStrategies.Topological | CommitSortStrategies.Reverse }; // If a revision range is specified, try to use it if (RevisionRange != null) { var revSpec = RevSpec.Parse(repo, RevisionRange); if (revSpec.Type == RevSpecType.Single) { commitFilter.IncludeReachableFrom = revSpec.From.Id; } else if (revSpec.Type == RevSpecType.Range) { commitFilter.Range = RevisionRange; } } // Gets all commits in topological reverse order var commits = repo.Commits.QueryBy(commitFilter).ToList(); // Process commits for (int i = 0; i < commits.Count; i++) { var commit = GetSimpleCommit(commits[i]); OutputWriter.Write("Rewrite {0} ({1}/{2}){3}", commit.Id, i + 1, commits.Count, (i+1) == commits.Count ? string.Empty : "\r"); ProcessCommit(commit); } if (commits.Count == 0) { OutputWriter.WriteLine("Nothing to rewrite."); } else { OutputWriter.WriteLine(" in {0:#.###}s", clock.Elapsed.TotalSeconds); } } /// <summary> /// Writes the final branch refs. /// </summary> private void WriteBranchRefs() { var originalRef = repo.Refs[branchRef]; if ((originalRef == null || BranchOverwrite) && lastCommit != null) { if (BranchOverwrite) { repo.Refs.Remove(branchRef); } repo.Refs.Add(branchRef, lastCommit.Id); OutputWriter.WriteLine("Ref '{0}' was {1}", branchRef, originalRef != null && BranchOverwrite ? "overwritten" : "created"); } } /// <summary> /// Processes a commit. /// </summary> /// <param name="commit">The commit.</param> private void ProcessCommit(SimpleCommit commit) { // ------------------------------------ // commit-filtering // ------------------------------------ if (PerformCommitFiltering(commit)) return; // Map parents of previous commit to new parents // Check if at least a parent has the same tree, if yes, we don't need to create a new commit Commit newCommit = null; Tree newTree; // ------------------------------------ // tree-filtering // ------------------------------------ if (PerformTreeFiltering(commit, out newTree)) return; // Process parents var newParents = new List<Commit>(); bool hasOriginalParents = false; Commit pruneCommitParentCandidate = null; foreach (var parent in commit.Parents) { // Find a non discarded parent var remapParent = FindRewrittenParent(parent); // If remap parent is null, we can skip it if (remapParent == null) { continue; } // If parent is same, then it is an original parent that can be detached by DetachFirstCommits hasOriginalParents = parent.GitCommit == remapParent; newParents.Add(remapParent); // If parent tree is equal, we might be able to prune this commit if (pruneCommitParentCandidate == null && remapParent.Tree.Id == newTree.Id) { pruneCommitParentCandidate = remapParent; } } if (pruneCommitParentCandidate != null && !(PreserveMergeCommits && newParents.Count == 2)) { newCommit = pruneCommitParentCandidate; commitsDiscarded.Add(commit.Sha); } // If we detach first commits from their parents if (DetachFirstCommits && hasOriginalParents) { // Remove original parents foreach (var parent in commit.Parents) { newParents.Remove(parent.GitCommit); } } // If we need to create a new commit (new tree) if (newCommit == null) { var author = new Signature(commit.AuthorName, commit.AuthorEmail, commit.AuthorDate); var committer = new Signature(commit.CommitterName, commit.CommitterEmail, commit.CommitterDate); newCommit = repo.ObjectDatabase.CreateCommit(author, committer, commit.Message, newTree, newParents, false); } // Store the remapping between the old commit and the new commit commitMap.Add(commit.Sha, newCommit); // Store the last commit lastCommit = newCommit; } private bool PerformTreeFiltering(SimpleCommit commit, out Tree newTree) { newTree = null; if (hasTreeFiltering) { // clear the cache of entries to keep and the tasks to run entriesToKeep.Clear(); // Process white list if (keepPathPatterns.Count == 0) { KeepAllEntries(commit.Tree); } else { KeepEntries(commit, commit.Tree); } ProcessPendingTasks(); // Process black list if (removePathPatterns.Count > 0) { RemoveEntries(commit); ProcessPendingTasks(); } // If the commit was discarded by a tree-filtering, we need to skip it also here if (commit.Discard || entriesToKeep.Count == 0) { commit.Discard = true; // Store that this commit was discarded (used for re-parenting commits) commitsDiscarded.Add(commit.Sha); return true; } // Rebuild a new tree based on the list of entries to keep var treeDef = new TreeDefinition(); foreach (var entryIt in entriesToKeep) { var entry = entryIt.Key; var entryValue = entryIt.Value; if (entryValue.Blob != null) { treeDef.Add(entry.Path, entryValue.Blob, entryValue.Mode); } else { treeDef.Add(entry.Path, entry); } } newTree = repo.ObjectDatabase.CreateTree(treeDef); } else { // If we don't have any tree filtering, just use the original tree newTree = commit.Tree; } return false; } private bool PerformCommitFiltering(SimpleCommit commit) { if (commitFilteringCallback != null) { // Filter this commit commitFilteringCallback(repo, commit); if (commit.Discard) { // Store that this commit was discarded (used for reparenting commits) commitsDiscarded.Add(commit.Sha); return true; } } return false; } private Commit FindRewrittenParent(Commit commit) { Commit newCommit; if (!commitMap.TryGetValue(commit.Sha, out newCommit)) { if (commitsDiscarded.Contains(commit.Sha)) { foreach (var parent in commit.Parents) { var newParent = FindRewrittenParent(parent); if (newParent != null) { newCommit = newParent; break; } } } else { newCommit = commit; } commitMap.Add(commit.Sha, newCommit); } return newCommit; } private void KeepEntries(SimpleCommit commit, Tree tree) { // Early exit if the commit was discarded by a tree-filtering if (commit.Discard) { return; } var task = Task.Factory.StartNew(() => { foreach (var entryIt in tree) { var entry = entryIt; if (entry.TargetType == TreeEntryTargetType.Tree) { var subTree = (Tree)entry.Target; KeepEntries(commit, subTree); } else if (entry.TargetType == TreeEntryTargetType.Blob || IncludeLinks) { KeepEntry(commit, entry, keepPathPatterns, true); } } }); if (DisableTasks) { task.RunSynchronously(); } else { lock (pendingTasks) { pendingTasks.Add(task); } } } private void KeepAllEntries(Tree tree) { var task = Task.Factory.StartNew(() => { foreach (var entryIt in tree) { var entry = entryIt; if (entry.TargetType == TreeEntryTargetType.Tree) { var subTree = (Tree) entry.Target; KeepAllEntries(subTree); } else if (entry.TargetType == TreeEntryTargetType.Blob || IncludeLinks) { // We can update entries to keep lock (entriesToKeep) { entriesToKeep.Add(entry, new SimpleEntry.EntryValue()); } } } }); if (DisableTasks) { task.RunSynchronously(); } else { lock (pendingTasks) { pendingTasks.Add(task); } } } private void KeepEntry(SimpleCommit commit, TreeEntry entry, PathPatterns globalPattern, bool keepOnIgnore) { // Early exit if the commit was discarded by a tree-filtering if (commit.Discard) { return; } PathMatch match; var path = entry.Path; if (TryMatch(path, globalPattern, out match)) { // If path is ignored we can update the entries to keep if (match.IsIgnored) { DirectMatch(commit, entry, keepOnIgnore, ref match); } } else { var checkTask = Task.Factory.StartNew(() => { Match(path, globalPattern, ref match); // If path is ignored we can update the entries to keep if (match.IsIgnored) { DirectMatch(commit, entry, keepOnIgnore, ref match); } }); if (DisableTasks) { checkTask.RunSynchronously(); } else { lock (pendingTasks) { pendingTasks.Add(checkTask); } } } } private void DirectMatch(SimpleCommit commit, TreeEntry entry, bool keepOnIgnore, ref PathMatch match) { // If callback return false, then we don't update entries to keep or delete SimpleEntry simpleEntry; var pattern = match.Pattern; var callback = pattern.Callback; if (callback != null) { simpleEntry = new SimpleEntry(repo, entry); simpleEntry.Discard = !keepOnIgnore; // Calls the script callback(repo, pattern.Path, commit, ref simpleEntry); // Skip if this commit is discarded by the tree filtering // Skip if this entry was discarded if (commit.Discard || (simpleEntry.Discard == keepOnIgnore)) { return; } } else { simpleEntry = default(SimpleEntry); } // We can update entries to keep lock (entriesToKeep) { if (keepOnIgnore) { entriesToKeep.Add(entry, simpleEntry.NewEntryValue); } else { entriesToKeep.Remove(entry); } } } private static bool TryMatch(string path, PathPatterns pathPatterns, out PathMatch match) { var ignoreCache = pathPatterns.IgnoreCache; // Try first to get a previously match from the cache lock (ignoreCache) { return ignoreCache.TryGetValue(path, out match); } } private static void Match(string path, PathPatterns pathPatterns, ref PathMatch match) { var ignoreCache = pathPatterns.IgnoreCache; foreach (var pathPattern in pathPatterns) { if (pathPattern.Ignore.IsPathIgnored(path)) { match = new PathMatch(true, pathPattern); break; } } lock (ignoreCache) { ignoreCache.Add(path, match); } } private void RemoveEntries(SimpleCommit commit) { var entries = entriesToKeep.ToList(); foreach (var entry in entries) { KeepEntry(commit, entry.Key, removePathPatterns, false); } } private void ProcessPendingTasks() { while (true) { Task[] taskToWait; lock (pendingTasks) { if (pendingTasks.Count == 0) { break; } taskToWait = pendingTasks.ToArray(); pendingTasks.Clear(); } Task.WaitAll(taskToWait); } } private PathPatterns ParseTreeFilteringPathPatterns(string pathPatternsAsText, string context) { var pathPatterns = new PathPatterns(); if (string.IsNullOrWhiteSpace(pathPatternsAsText)) { return pathPatterns; } var reader = new StringReader(pathPatternsAsText); // non scripted patterns var pathPatternsNoScript = new List<string>(); bool isInMultiLineScript = false; var multiLineScript = new StringBuilder(); string currentMultilinePath = null; string line; while ((line = reader.ReadLine()) != null) { if ((!isInMultiLineScript && string.IsNullOrWhiteSpace(line)) || line.TrimStart().StartsWith("#")) { continue; } if (isInMultiLineScript) { var endOfScriptIndex = line.IndexOf("%}", StringComparison.InvariantCultureIgnoreCase); if (endOfScriptIndex >= 0) { isInMultiLineScript = false; multiLineScript.AppendLine(line.Substring(0, endOfScriptIndex)); pathPatterns.Add(new PathPattern(tempRocketPath, currentMultilinePath, multiLineScript.ToString())); multiLineScript.Length = 0; } else { multiLineScript.AppendLine(line); } } else { var scriptIndex = line.IndexOf("=>", StringComparison.InvariantCultureIgnoreCase); if (scriptIndex >= 0) { var pathPatternText = line.Substring(0, scriptIndex).TrimEnd(); var textScript = line.Substring(scriptIndex + 2).TrimEnd(); var pathPattern = new PathPattern(tempRocketPath, pathPatternText, textScript); pathPatterns.Add(pathPattern); } else { scriptIndex = line.IndexOf("{%", StringComparison.InvariantCultureIgnoreCase); if (scriptIndex >= 0) { isInMultiLineScript = true; multiLineScript.Length = 0; currentMultilinePath = line.Substring(0, scriptIndex).TrimEnd(); var textScript = line.Substring(scriptIndex + 2).TrimEnd(); multiLineScript.AppendLine(textScript); } else { // If this is a normal path pattern line pathPatternsNoScript.Add(line.TrimEnd()); } } } } if (isInMultiLineScript) { throw new RocketException("Expecting the end %}} of multiline script: {0}", multiLineScript); } if (pathPatternsNoScript.Count > 0) { var repoFilter = new Repository(tempRocketPath); repoFilter.Ignore.ResetAllTemporaryRules(); if (Verbose) { foreach (var pattern in pathPatternsNoScript) { OutputWriter.WriteLine("Found {0} pattern [{1}]", context, pattern); } } repoFilter.Ignore.AddTemporaryRules(pathPatternsNoScript); // Add the white list repo at the end to let the scripted rules to run first pathPatterns.Add(new PathPattern(repoFilter)); } return pathPatterns; } private void CompileScripts() { // Nothing to compiled? if (!hasTreeFilteringWithScripts && !hasCommitFiltering) { return; } var classText = new StringBuilder(); classText.Append(@"// This file is automatically generated using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; using LibGit2Sharp; "); classText.AppendFormat(@" namespace {0}", typeof(RocketFilterApp).Namespace).Append(@" { public class RocketScript : RocketScriptBase { public RocketScript(RocketFilterApp app) : base(app) { } "); // Append commit filtering method AppendCommitFilterMethod(CommitFilter, classText); // Append any tree filtering methods var treeFilterMethods = new Dictionary<string, PathPattern>(); var allPathPatterns = keepPathPatterns.Concat(removePathPatterns).ToList(); AppendTreeFilterMethods(allPathPatterns, classText, treeFilterMethods); classText.Append(@" } } "); var code = classText.ToString(); // Dumps pretty code if (Verbose) { var prettyCode = DumpPrettyCode(code); OutputWriter.WriteLine("Patterns with scripting:"); OutputWriter.WriteLine(prettyCode); } var syntaxTree = CSharpSyntaxTree.ParseText(code); string assemblyName = Path.GetRandomFileName(); var references = new List<MetadataReference>(); foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies()) { if (!assembly.IsDynamic && !string.IsNullOrEmpty(assembly.Location)) { if (Verbose) { OutputWriter.WriteLine("Used assembly for scripting: " + assembly.Location); } references.Add(MetadataReference.CreateFromFile(assembly.Location)); } } var compilation = CSharpCompilation.Create( assemblyName, syntaxTrees: new[] { syntaxTree }, references: references, options: new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary)); using (var stream = new MemoryStream()) { var result = compilation.Emit(stream); if (result.Success) { stream.Position = 0; var assembly = Assembly.Load(stream.ToArray()); var type = assembly.GetType(typeof(RocketFilterApp).Namespace + ".RocketScript"); var instance = Activator.CreateInstance(type, this); // Rebing methods foreach (var method in type.GetMethods()) { PathPattern pathPattern; if (treeFilterMethods.TryGetValue(method.Name, out pathPattern)) { pathPattern.Callback = (PathPatternCallbackDelegate)Delegate.CreateDelegate(typeof(PathPatternCallbackDelegate), instance, method); } else if (method.Name == MethodCommitFilterName) { commitFilteringCallback = (CommitFilteringCallbackDelegate)Delegate.CreateDelegate(typeof(CommitFilteringCallbackDelegate), instance, method); } } } else { var failures = result.Diagnostics.Where(diagnostic => diagnostic.IsWarningAsError || diagnostic.Severity == DiagnosticSeverity.Error); throw new RocketException(BuildCodeErrors(failures)) { AdditionalText = DumpPrettyCode(code) }; } } } private string DumpPrettyCode(string code) { var prettyCode = new StringBuilder(code.Length + 1024); var codeReader = new StringReader(code); string line; for (int i = 0; (line = codeReader.ReadLine()) != null; i++) { prettyCode.AppendFormat(CultureInfo.InvariantCulture, "{0,4}: {1}\n", i, line); } return prettyCode.ToString(); } private string BuildCodeErrors(IEnumerable<Diagnostic> failures) { var errorMessage = new StringBuilder(); errorMessage.AppendLine(); errorMessage.AppendLine("Error while compiling the script:"); errorMessage.AppendLine(); foreach (var failure in failures) { var lineSpan = failure.Location.GetLineSpan(); errorMessage.AppendFormat(" ({0}): {1} {2}: {3}\n", lineSpan.StartLinePosition, failure.Severity, failure.Id, failure.GetMessage()); } return errorMessage.ToString(); } private static void AppendCommitFilterMethod(string commitFilter, StringBuilder classText) { if (commitFilter == null) { return; } classText.AppendFormat(@" // commit-filtering public void {0}", MethodCommitFilterName) .Append(@"(Repository repo, SimpleCommit commit) { "); classText.Append(commitFilter); classText.Append(@" } "); } private void AppendTreeFilterMethods(IEnumerable<PathPattern> pathPatterns, StringBuilder classText, Dictionary<string, PathPattern> methodNames) { if (!hasTreeFilteringWithScripts) { return; } const string methodTreeFilterPrefix = "TreeFilterMethod"; foreach (var pathPattern in pathPatterns) { // Skip non script text if (pathPattern.ScriptText == null) { continue; } var methodName = string.Format(CultureInfo.InvariantCulture, "{0}{1}", methodTreeFilterPrefix, methodNames.Count); methodNames.Add(methodName, pathPattern); classText.AppendFormat(@" // tree-filtering: {0}", pathPattern.Path).AppendFormat(@" public void {0}", methodName) .Append(@"(Repository repo, string pattern, SimpleCommit commit, ref SimpleEntry entry) { "); classText.Append(pathPattern.ScriptText); classText.Append(@" } "); } } internal SimpleCommit GetMapCommit(SimpleCommit commit) { Commit rewritten; if (commitMap.TryGetValue(commit.Id.Sha, out rewritten)) { return GetSimpleCommit(rewritten); } return null; } internal SimpleCommit GetSimpleCommit(Commit commit) { SimpleCommit simpleCommit; lock (simpleCommits) { if (!simpleCommits.TryGetValue(commit, out simpleCommit)) { simpleCommit = new SimpleCommit(this, commit); simpleCommits.Add(commit, simpleCommit); } } return simpleCommit; } private class PathPatterns : List<PathPattern> { public PathPatterns() { IgnoreCache = new Dictionary<string, PathMatch>(); } public readonly Dictionary<string, PathMatch> IgnoreCache; } private class PathPattern { public PathPattern(Repository repoIgnore) { if (repoIgnore == null) throw new ArgumentNullException("repoIgnore"); Repository = repoIgnore; Ignore = Repository.Ignore; } public PathPattern(string repoIgnorePath, string path, string scriptText) { if (repoIgnorePath == null) throw new ArgumentNullException("repoIgnorePath"); if (path == null) throw new ArgumentNullException("path"); Path = path; ScriptText = scriptText; Repository = new Repository(repoIgnorePath); Repository.Ignore.AddTemporaryRules(new[] { path }); Ignore = Repository.Ignore; } public readonly string Path; public readonly string ScriptText; public PathPatternCallbackDelegate Callback; private readonly Repository Repository; public readonly Ignore Ignore; } private struct PathMatch { public PathMatch(bool isIgnored, PathPattern pattern) { IsIgnored = isIgnored; Pattern = pattern; } public readonly bool IsIgnored; public readonly PathPattern Pattern; } public void Dispose() { if (repo != null) { repo.Dispose(); repo = null; } } /// <summary> /// A generic object comparerer that would only use object's reference, /// ignoring any <see cref="IEquatable{T}"/> or <see cref="object.Equals(object)"/> overrides. /// http://stackoverflow.com/a/1890230/1356325 /// </summary> private class ObjectReferenceEqualityComparer<T> : EqualityComparer<T> where T : class { private static IEqualityComparer<T> _defaultComparer; public new static IEqualityComparer<T> Default { get { return _defaultComparer ?? (_defaultComparer = new ObjectReferenceEqualityComparer<T>()); } } #region IEqualityComparer<T> Members public override bool Equals(T x, T y) { return ReferenceEquals(x, y); } public override int GetHashCode(T obj) { return RuntimeHelpers.GetHashCode(obj); } #endregion } } }
// (c) Copyright 2012 Hewlett-Packard Development Company, L.P. // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Reflection; using HpToolsLauncher.Properties; using HpToolsLauncher.TestRunners; namespace HpToolsLauncher { public class FileSystemTestsRunner : RunnerBase, IDisposable { #region Members Dictionary<string, string> _jenkinsEnvVariables; private List<TestInfo> _tests; private static string _uftViewerPath; private int _errors, _fail; private bool _useUFTLicense; private TimeSpan _timeout = TimeSpan.MaxValue; private Stopwatch _stopwatch = null; private string _abortFilename = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\stop" + Launcher.UniqueTimeStamp + ".txt"; //LoadRunner Arguments private int _pollingInterval; private TimeSpan _perScenarioTimeOut; private List<string> _ignoreErrorStrings; //saves runners for cleaning up at the end. private Dictionary<TestType, IFileSysTestRunner> _colRunnersForCleanup = new Dictionary<TestType, IFileSysTestRunner>(); public const string UftJUnitRportName = "uftRunnerRoot"; #endregion /// <summary> /// creates instance of the runner given a source. /// </summary> /// <param name="sources"></param> /// <param name="timeout"></param> /// <param name="backgroundWorker"></param> /// <param name="useUFTLicense"></param> public FileSystemTestsRunner(List<string> sources, TimeSpan timeout, int ControllerPollingInterval, TimeSpan perScenarioTimeOut, List<string> ignoreErrorStrings, Dictionary<string, string> jenkinsEnvVariables, string fsAppParamName, string appIdentifier, bool useUFTLicense = false ) { _jenkinsEnvVariables = jenkinsEnvVariables; //search if we have any testing tools installed if (!Helper.IsTestingToolsInstalled(TestStorageType.FileSystem)) { ConsoleWriter.WriteErrLine(string.Format(Resources.FileSystemTestsRunner_No_HP_testing_tool_is_installed_on, System.Environment.MachineName)); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } _timeout = timeout; _stopwatch = Stopwatch.StartNew(); _pollingInterval = ControllerPollingInterval; _perScenarioTimeOut = perScenarioTimeOut; _ignoreErrorStrings = ignoreErrorStrings; _useUFTLicense = useUFTLicense; _tests = new List<TestInfo>(); //go over all sources, and create a list of all tests foreach (string source in sources) { List<TestInfo> testGroup = new List<TestInfo>(); try { //--handle directories which contain test subdirectories (recursively) if (Helper.IsDirectory(source)) { var testsLocations = Helper.GetTestsLocations(source); foreach (var loc in testsLocations) { var test = new TestInfo(loc, loc, source); testGroup.Add(test); } } //--handle mtb files (which contain links to tests) else //file might be LoadRunner scenario or //mtb file (which contain links to tests) //other files are dropped { testGroup = new List<TestInfo>(); FileInfo fi = new FileInfo(source); if (fi.Extension == Helper.LoadRunnerFileExtention) testGroup.Add(new TestInfo(source, source, source)); else if (fi.Extension == ".mtb") //if (source.TrimEnd().EndsWith(".mtb", StringComparison.CurrentCultureIgnoreCase)) { MtbManager manager = new MtbManager(); var paths = manager.Parse(source); foreach (var p in paths) { testGroup.Add(new TestInfo(p, p, source)); } } else if (fi.Extension == ".mtbx") //if (source.TrimEnd().EndsWith(".mtb", StringComparison.CurrentCultureIgnoreCase)) { testGroup = MtbxManager.Parse(source, _jenkinsEnvVariables, source); if (!string.IsNullOrEmpty(fsAppParamName) && !string.IsNullOrEmpty(appIdentifier)) { var testParam = new TestParameterInfo() { Name = fsAppParamName, Type = "string", Value = appIdentifier }; foreach(TestInfo testInfo in testGroup) { testInfo.ParameterList.Add(testParam); } } } } } catch (Exception) { testGroup = new List<TestInfo>(); } //--handle single test dir, add it with no group if (testGroup.Count == 1) { testGroup[0].TestGroup = "<None>"; } _tests.AddRange(testGroup); } if (_tests == null || _tests.Count == 0) { ConsoleWriter.WriteLine(Resources.FsRunnerNoValidTests); Environment.Exit((int)Launcher.ExitCodeEnum.Failed); } ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestsFound, _tests.Count)); _tests.ForEach(t => ConsoleWriter.WriteLine("" + t.TestName)); ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); } /// <summary> /// runs all tests given to this runner and returns a suite of run resutls /// </summary> /// <returns>The rest run results for each test</returns> public override TestSuiteRunResults Run() { //create a new Run Results object TestSuiteRunResults activeRunDesc = new TestSuiteRunResults(); double totalTime = 0; try { var start = DateTime.Now; foreach (var test in _tests) { if (RunCancelled()) break; var testStart = DateTime.Now; string errorReason = string.Empty; TestRunResults runResult = null; try { runResult = RunHPToolsTest(test, ref errorReason); } catch (Exception ex) { runResult = new TestRunResults(); runResult.TestState = TestState.Error; runResult.ErrorDesc = ex.Message; runResult.TestName = test.TestName; } //get the original source for this test, for grouping tests under test classes runResult.TestGroup = test.TestGroup; activeRunDesc.TestRuns.Add(runResult); //if fail was terminated before this step, continue if (runResult.TestState != TestState.Failed) { if (runResult.TestState != TestState.Error) { Helper.GetTestStateFromReport(runResult); } else { if (string.IsNullOrEmpty(runResult.ErrorDesc)) { if (RunCancelled()) { runResult.ErrorDesc = HpToolsLauncher.Properties.Resources.ExceptionUserCancelled; } else { runResult.ErrorDesc = HpToolsLauncher.Properties.Resources.ExceptionExternalProcess; } } runResult.ReportLocation = null; runResult.TestState = TestState.Error; } } if (runResult.TestState == TestState.Passed && runResult.HasWarnings) { runResult.TestState = TestState.Warning; ConsoleWriter.WriteLine(Resources.FsRunnerTestDoneWarnings); } else { ConsoleWriter.WriteLine(string.Format(Resources.FsRunnerTestDone, runResult.TestState)); } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Test complete: " + runResult.TestPath + "\n-------------------------------------------------------------------------------------------------------"); UpdateCounters(runResult.TestState); var testTotalTime = (DateTime.Now - testStart).TotalSeconds; } totalTime = (DateTime.Now - start).TotalSeconds; } finally { activeRunDesc.NumTests = _tests.Count; activeRunDesc.NumErrors = _errors; activeRunDesc.TotalRunTime = TimeSpan.FromSeconds(totalTime); activeRunDesc.NumFailures = _fail; foreach (IFileSysTestRunner cleanupRunner in _colRunnersForCleanup.Values) { cleanupRunner.CleanUp(); } } return activeRunDesc; } /// <summary> /// checks if timeout has expired /// </summary> /// <returns></returns> private bool CheckTimeout() { TimeSpan timeleft = _timeout - _stopwatch.Elapsed; return (timeleft > TimeSpan.Zero); } /// <summary> /// creates a correct type of runner and runs a single test. /// </summary> /// <param name="testPath"></param> /// <param name="errorReason"></param> /// <returns></returns> private TestRunResults RunHPToolsTest(TestInfo testinf, ref string errorReason) { var testPath = testinf.TestPath; var type = Helper.GetTestType(testPath); IFileSysTestRunner runner = null; switch (type) { case TestType.ST: runner = new ApiTestRunner(this, _timeout - _stopwatch.Elapsed); break; case TestType.QTP: runner = new GuiTestRunner(this, _useUFTLicense, _timeout - _stopwatch.Elapsed); break; case TestType.LoadRunner: AppDomain.CurrentDomain.AssemblyResolve += Helper.HPToolsAssemblyResolver; runner = new PerformanceTestRunner(this, _timeout, _pollingInterval, _perScenarioTimeOut, _ignoreErrorStrings); break; } if (runner != null) { if (!_colRunnersForCleanup.ContainsKey(type)) _colRunnersForCleanup.Add(type, runner); Stopwatch s = Stopwatch.StartNew(); TestRunResults results = null; results = runner.RunTest(testinf, ref errorReason, RunCancelled); results.Runtime = s.Elapsed; if (type == TestType.LoadRunner) AppDomain.CurrentDomain.AssemblyResolve -= Helper.HPToolsAssemblyResolver; return results; } //check for abortion if (System.IO.File.Exists(_abortFilename)) { ConsoleWriter.WriteLine(Resources.GeneralStopAborted); //stop working Environment.Exit((int)Launcher.ExitCodeEnum.Aborted); } return new TestRunResults { ErrorDesc = "Unknown TestType", TestState = TestState.Error }; } /// <summary> /// checks if run was cancelled/aborted /// </summary> /// <returns></returns> public bool RunCancelled() { //if timeout has passed if (_stopwatch.Elapsed > _timeout) { if (!_blnRunCancelled) { ConsoleWriter.WriteLine(Resources.GeneralTimedOut); Launcher.ExitCode = Launcher.ExitCodeEnum.Aborted; _blnRunCancelled = true; } } //if (System.IO.File.Exists(_abortFilename)) //{ // if (!_blnRunCancelled) // { // ConsoleWriter.WriteLine(Resources.GeneralAbortedByUser); // Launcher.ExitCode = Launcher.ExitCodeEnum.Aborted; // _blnRunCancelled = true; // } //} return _blnRunCancelled; } /// <summary> /// sums errors and failed tests /// </summary> /// <param name="testState"></param> private void UpdateCounters(TestState testState) { switch (testState) { case TestState.Error: _errors += 1; break; case TestState.Failed: _fail += 1; break; } } /// <summary> /// Opens the report viewer for the given report directory /// </summary> /// <param name="reportDirectory"></param> public static void OpenReport(string reportDirectory) { Helper.OpenReport(reportDirectory, ref _uftViewerPath); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Net; using System.Reflection; using System.Threading; using OpenMetaverse; using log4net; using Nini.Config; using OpenSim.Framework; using OpenSim.Framework.Capabilities; using OpenSim.Framework.Client; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.PhysicsModules.SharedBase; using OpenSim.Services.Interfaces; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Region.CoreModules.Framework.EntityTransfer { /// <summary> /// The possible states that an agent can be in when its being transferred between regions. /// </summary> /// <remarks> /// This is a state machine. /// /// [Entry] => Preparing /// Preparing => { Transferring || Cancelling || CleaningUp || Aborting || [Exit] } /// Transferring => { ReceivedAtDestination || Cancelling || CleaningUp || Aborting } /// Cancelling => CleaningUp || Aborting /// ReceivedAtDestination => CleaningUp || Aborting /// CleaningUp => [Exit] /// Aborting => [Exit] /// /// In other words, agents normally travel throwing Preparing => Transferring => ReceivedAtDestination => CleaningUp /// However, any state can transition to CleaningUp if the teleport has failed. /// </remarks> enum AgentTransferState { Preparing, // The agent is being prepared for transfer Transferring, // The agent is in the process of being transferred to a destination ReceivedAtDestination, // The destination has notified us that the agent has been successfully received CleaningUp, // The agent is being changed to child/removed after a transfer Cancelling, // The user has cancelled the teleport but we have yet to act upon this. Aborting // The transfer is aborting. Unlike Cancelling, no compensating actions should be performed } /// <summary> /// Records the state of entities when they are in transfer within or between regions (cross or teleport). /// </summary> public class EntityTransferStateMachine { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static readonly string LogHeader = "[ENTITY TRANSFER STATE MACHINE]"; /// <summary> /// If true then on a teleport, the source region waits for a callback from the destination region. If /// a callback fails to arrive within a set time then the user is pulled back into the source region. /// </summary> public bool EnableWaitForAgentArrivedAtDestination { get; set; } private EntityTransferModule m_mod; private Dictionary<UUID, AgentTransferState> m_agentsInTransit = new Dictionary<UUID, AgentTransferState>(); public EntityTransferStateMachine(EntityTransferModule module) { m_mod = module; } /// <summary> /// Set that an agent is in transit. /// </summary> /// <param name='id'>The ID of the agent being teleported</param> /// <returns>true if the agent was not already in transit, false if it was</returns> internal bool SetInTransit(UUID id) { m_log.DebugFormat("{0} SetInTransit. agent={1}, newState=Preparing", LogHeader, id); lock (m_agentsInTransit) { if (!m_agentsInTransit.ContainsKey(id)) { m_agentsInTransit[id] = AgentTransferState.Preparing; return true; } } return false; } /// <summary> /// Updates the state of an agent that is already in transit. /// </summary> /// <param name='id'></param> /// <param name='newState'></param> /// <returns></returns> /// <exception cref='Exception'>Illegal transitions will throw an Exception</exception> internal bool UpdateInTransit(UUID id, AgentTransferState newState) { m_log.DebugFormat("{0} UpdateInTransit. agent={1}, newState={2}", LogHeader, id, newState); bool transitionOkay = false; // We don't want to throw an exception on cancel since this can come it at any time. bool failIfNotOkay = true; // Should be a failure message if failure is not okay. string failureMessage = null; AgentTransferState? oldState = null; lock (m_agentsInTransit) { // Illegal to try and update an agent that's not actually in transit. if (!m_agentsInTransit.ContainsKey(id)) { if (newState != AgentTransferState.Cancelling && newState != AgentTransferState.Aborting) failureMessage = string.Format( "Agent with ID {0} is not registered as in transit in {1}", id, m_mod.Scene.RegionInfo.RegionName); else failIfNotOkay = false; } else { oldState = m_agentsInTransit[id]; if (newState == AgentTransferState.Aborting) { transitionOkay = true; } else if (newState == AgentTransferState.CleaningUp && oldState != AgentTransferState.CleaningUp) { transitionOkay = true; } else if (newState == AgentTransferState.Transferring && oldState == AgentTransferState.Preparing) { transitionOkay = true; } else if (newState == AgentTransferState.ReceivedAtDestination && oldState == AgentTransferState.Transferring) { transitionOkay = true; } else { if (newState == AgentTransferState.Cancelling && (oldState == AgentTransferState.Preparing || oldState == AgentTransferState.Transferring)) { transitionOkay = true; } else { failIfNotOkay = false; } } if (!transitionOkay) failureMessage = string.Format( "Agent with ID {0} is not allowed to move from old transit state {1} to new state {2} in {3}", id, oldState, newState, m_mod.Scene.RegionInfo.RegionName); } if (transitionOkay) { m_agentsInTransit[id] = newState; // m_log.DebugFormat( // "[ENTITY TRANSFER STATE MACHINE]: Changed agent with id {0} from state {1} to {2} in {3}", // id, oldState, newState, m_mod.Scene.Name); } else if (failIfNotOkay) { m_log.DebugFormat("{0} UpdateInTransit. Throwing transition failure = {1}", LogHeader, failureMessage); throw new Exception(failureMessage); } // else // { // if (oldState != null) // m_log.DebugFormat( // "[ENTITY TRANSFER STATE MACHINE]: Ignored change of agent with id {0} from state {1} to {2} in {3}", // id, oldState, newState, m_mod.Scene.Name); // else // m_log.DebugFormat( // "[ENTITY TRANSFER STATE MACHINE]: Ignored change of agent with id {0} to state {1} in {2} since agent not in transit", // id, newState, m_mod.Scene.Name); // } } return transitionOkay; } /// <summary> /// Gets the current agent transfer state. /// </summary> /// <returns>Null if the agent is not in transit</returns> /// <param name='id'> /// Identifier. /// </param> internal AgentTransferState? GetAgentTransferState(UUID id) { lock (m_agentsInTransit) { if (!m_agentsInTransit.ContainsKey(id)) return null; else return m_agentsInTransit[id]; } } /// <summary> /// Removes an agent from the transit state machine. /// </summary> /// <param name='id'></param> /// <returns>true if the agent was flagged as being teleported when this method was called, false otherwise</returns> internal bool ResetFromTransit(UUID id) { lock (m_agentsInTransit) { if (m_agentsInTransit.ContainsKey(id)) { AgentTransferState state = m_agentsInTransit[id]; if (state == AgentTransferState.Transferring || state == AgentTransferState.ReceivedAtDestination) { // FIXME: For now, we allow exit from any state since a thrown exception in teleport is now guranteed // to be handled properly - ResetFromTransit() could be invoked at any step along the process m_log.WarnFormat( "[ENTITY TRANSFER STATE MACHINE]: Agent with ID {0} should not exit directly from state {1}, should go to {2} state first in {3}", id, state, AgentTransferState.CleaningUp, m_mod.Scene.RegionInfo.RegionName); // throw new Exception( // "Agent with ID {0} cannot exit directly from state {1}, it must go to {2} state first", // state, AgentTransferState.CleaningUp); } m_agentsInTransit.Remove(id); m_log.DebugFormat( "[ENTITY TRANSFER STATE MACHINE]: Agent {0} cleared from transit in {1}", id, m_mod.Scene.RegionInfo.RegionName); return true; } } m_log.WarnFormat( "[ENTITY TRANSFER STATE MACHINE]: Agent {0} requested to clear from transit in {1} but was already cleared", id, m_mod.Scene.RegionInfo.RegionName); return false; } internal bool WaitForAgentArrivedAtDestination(UUID id) { if (!m_mod.WaitForAgentArrivedAtDestination) return true; lock (m_agentsInTransit) { AgentTransferState? currentState = GetAgentTransferState(id); if (currentState == null) throw new Exception( string.Format( "Asked to wait for destination callback for agent with ID {0} in {1} but agent is not in transit", id, m_mod.Scene.RegionInfo.RegionName)); if (currentState != AgentTransferState.Transferring && currentState != AgentTransferState.ReceivedAtDestination) throw new Exception( string.Format( "Asked to wait for destination callback for agent with ID {0} in {1} but agent is in state {2}", id, m_mod.Scene.RegionInfo.RegionName, currentState)); } int count = 200; // There should be no race condition here since no other code should be removing the agent transfer or // changing the state to another other than Transferring => ReceivedAtDestination. while (count-- > 0) { lock (m_agentsInTransit) { if (m_agentsInTransit[id] == AgentTransferState.ReceivedAtDestination) break; } // m_log.Debug(" >>> Waiting... " + count); Thread.Sleep(100); } return count > 0; } internal void SetAgentArrivedAtDestination(UUID id) { lock (m_agentsInTransit) { if (!m_agentsInTransit.ContainsKey(id)) { m_log.WarnFormat( "[ENTITY TRANSFER STATE MACHINE]: Region {0} received notification of arrival in destination of agent {1} but no teleport request is active", m_mod.Scene.RegionInfo.RegionName, id); return; } AgentTransferState currentState = m_agentsInTransit[id]; if (currentState == AgentTransferState.ReceivedAtDestination) { // An anomoly but don't make this an outright failure - destination region could be overzealous in sending notification. m_log.WarnFormat( "[ENTITY TRANSFER STATE MACHINE]: Region {0} received notification of arrival in destination of agent {1} but notification has already previously been received", m_mod.Scene.RegionInfo.RegionName, id); } else if (currentState != AgentTransferState.Transferring) { m_log.ErrorFormat( "[ENTITY TRANSFER STATE MACHINE]: Region {0} received notification of arrival in destination of agent {1} but agent is in state {2}", m_mod.Scene.RegionInfo.RegionName, id, currentState); return; } m_agentsInTransit[id] = AgentTransferState.ReceivedAtDestination; } } } }
namespace android.media { [global::MonoJavaBridge.JavaClass()] public partial class JetPlayer : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static JetPlayer() { InitJNI(); } protected JetPlayer(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } [global::MonoJavaBridge.JavaInterface(typeof(global::android.media.JetPlayer.OnJetEventListener_))] public interface OnJetEventListener : global::MonoJavaBridge.IJavaObject { void onJetEvent(android.media.JetPlayer arg0, short arg1, byte arg2, byte arg3, byte arg4, byte arg5); void onJetUserIdUpdate(android.media.JetPlayer arg0, int arg1, int arg2); void onJetNumQueuedSegmentUpdate(android.media.JetPlayer arg0, int arg1); void onJetPauseUpdate(android.media.JetPlayer arg0, int arg1); } [global::MonoJavaBridge.JavaProxy(typeof(global::android.media.JetPlayer.OnJetEventListener))] public sealed partial class OnJetEventListener_ : java.lang.Object, OnJetEventListener { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; static OnJetEventListener_() { InitJNI(); } internal OnJetEventListener_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } internal static global::MonoJavaBridge.MethodId _onJetEvent4958; void android.media.JetPlayer.OnJetEventListener.onJetEvent(android.media.JetPlayer arg0, short arg1, byte arg2, byte arg3, byte arg4, byte arg5) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_._onJetEvent4958, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_.staticClass, global::android.media.JetPlayer.OnJetEventListener_._onJetEvent4958, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); } internal static global::MonoJavaBridge.MethodId _onJetUserIdUpdate4959; void android.media.JetPlayer.OnJetEventListener.onJetUserIdUpdate(android.media.JetPlayer arg0, int arg1, int arg2) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_._onJetUserIdUpdate4959, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_.staticClass, global::android.media.JetPlayer.OnJetEventListener_._onJetUserIdUpdate4959, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); } internal static global::MonoJavaBridge.MethodId _onJetNumQueuedSegmentUpdate4960; void android.media.JetPlayer.OnJetEventListener.onJetNumQueuedSegmentUpdate(android.media.JetPlayer arg0, int arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_._onJetNumQueuedSegmentUpdate4960, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_.staticClass, global::android.media.JetPlayer.OnJetEventListener_._onJetNumQueuedSegmentUpdate4960, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _onJetPauseUpdate4961; void android.media.JetPlayer.OnJetEventListener.onJetPauseUpdate(android.media.JetPlayer arg0, int arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_._onJetPauseUpdate4961, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.OnJetEventListener_.staticClass, global::android.media.JetPlayer.OnJetEventListener_._onJetPauseUpdate4961, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.JetPlayer.OnJetEventListener_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/JetPlayer$OnJetEventListener")); global::android.media.JetPlayer.OnJetEventListener_._onJetEvent4958 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.OnJetEventListener_.staticClass, "onJetEvent", "(Landroid/media/JetPlayer;SBBBB)V"); global::android.media.JetPlayer.OnJetEventListener_._onJetUserIdUpdate4959 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.OnJetEventListener_.staticClass, "onJetUserIdUpdate", "(Landroid/media/JetPlayer;II)V"); global::android.media.JetPlayer.OnJetEventListener_._onJetNumQueuedSegmentUpdate4960 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.OnJetEventListener_.staticClass, "onJetNumQueuedSegmentUpdate", "(Landroid/media/JetPlayer;I)V"); global::android.media.JetPlayer.OnJetEventListener_._onJetPauseUpdate4961 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.OnJetEventListener_.staticClass, "onJetPauseUpdate", "(Landroid/media/JetPlayer;I)V"); } } internal static global::MonoJavaBridge.MethodId _finalize4962; protected override void finalize() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer._finalize4962); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._finalize4962); } internal static global::MonoJavaBridge.MethodId _clone4963; public virtual new global::java.lang.Object clone() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::android.media.JetPlayer._clone4963)) as java.lang.Object; else return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._clone4963)) as java.lang.Object; } internal static global::MonoJavaBridge.MethodId _release4964; public virtual void release() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer._release4964); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._release4964); } internal static global::MonoJavaBridge.MethodId _play4965; public virtual bool play() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._play4965); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._play4965); } internal static global::MonoJavaBridge.MethodId _pause4966; public virtual bool pause() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._pause4966); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._pause4966); } internal static global::MonoJavaBridge.MethodId _getJetPlayer4967; public static global::android.media.JetPlayer getJetPlayer() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(android.media.JetPlayer.staticClass, global::android.media.JetPlayer._getJetPlayer4967)) as android.media.JetPlayer; } internal static global::MonoJavaBridge.MethodId _getMaxTracks4968; public static int getMaxTracks() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; return @__env.CallStaticIntMethod(android.media.JetPlayer.staticClass, global::android.media.JetPlayer._getMaxTracks4968); } internal static global::MonoJavaBridge.MethodId _loadJetFile4969; public virtual bool loadJetFile(android.content.res.AssetFileDescriptor arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._loadJetFile4969, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._loadJetFile4969, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _loadJetFile4970; public virtual bool loadJetFile(java.lang.String arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._loadJetFile4970, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._loadJetFile4970, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _closeJetFile4971; public virtual bool closeJetFile() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._closeJetFile4971); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._closeJetFile4971); } internal static global::MonoJavaBridge.MethodId _queueJetSegment4972; public virtual bool queueJetSegment(int arg0, int arg1, int arg2, int arg3, int arg4, byte arg5) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._queueJetSegment4972, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._queueJetSegment4972, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); } internal static global::MonoJavaBridge.MethodId _queueJetSegmentMuteArray4973; public virtual bool queueJetSegmentMuteArray(int arg0, int arg1, int arg2, int arg3, bool[] arg4, byte arg5) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._queueJetSegmentMuteArray4973, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._queueJetSegmentMuteArray4973, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5)); } internal static global::MonoJavaBridge.MethodId _setMuteFlags4974; public virtual bool setMuteFlags(int arg0, bool arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._setMuteFlags4974, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._setMuteFlags4974, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _setMuteArray4975; public virtual bool setMuteArray(bool[] arg0, bool arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._setMuteArray4975, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._setMuteArray4975, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } internal static global::MonoJavaBridge.MethodId _setMuteFlag4976; public virtual bool setMuteFlag(int arg0, bool arg1, bool arg2) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._setMuteFlag4976, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._setMuteFlag4976, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); } internal static global::MonoJavaBridge.MethodId _triggerClip4977; public virtual bool triggerClip(int arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._triggerClip4977, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._triggerClip4977, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _clearQueue4978; public virtual bool clearQueue() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) return @__env.CallBooleanMethod(this.JvmHandle, global::android.media.JetPlayer._clearQueue4978); else return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._clearQueue4978); } internal static global::MonoJavaBridge.MethodId _setEventListener4979; public virtual void setEventListener(android.media.JetPlayer.OnJetEventListener arg0) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer._setEventListener4979, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._setEventListener4979, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } internal static global::MonoJavaBridge.MethodId _setEventListener4980; public virtual void setEventListener(android.media.JetPlayer.OnJetEventListener arg0, android.os.Handler arg1) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) @__env.CallVoidMethod(this.JvmHandle, global::android.media.JetPlayer._setEventListener4980, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); else @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.media.JetPlayer.staticClass, global::android.media.JetPlayer._setEventListener4980, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static void InitJNI() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.media.JetPlayer.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/media/JetPlayer")); global::android.media.JetPlayer._finalize4962 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "finalize", "()V"); global::android.media.JetPlayer._clone4963 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "clone", "()Ljava/lang/Object;"); global::android.media.JetPlayer._release4964 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "release", "()V"); global::android.media.JetPlayer._play4965 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "play", "()Z"); global::android.media.JetPlayer._pause4966 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "pause", "()Z"); global::android.media.JetPlayer._getJetPlayer4967 = @__env.GetStaticMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "getJetPlayer", "()Landroid/media/JetPlayer;"); global::android.media.JetPlayer._getMaxTracks4968 = @__env.GetStaticMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "getMaxTracks", "()I"); global::android.media.JetPlayer._loadJetFile4969 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "loadJetFile", "(Landroid/content/res/AssetFileDescriptor;)Z"); global::android.media.JetPlayer._loadJetFile4970 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "loadJetFile", "(Ljava/lang/String;)Z"); global::android.media.JetPlayer._closeJetFile4971 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "closeJetFile", "()Z"); global::android.media.JetPlayer._queueJetSegment4972 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "queueJetSegment", "(IIIIIB)Z"); global::android.media.JetPlayer._queueJetSegmentMuteArray4973 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "queueJetSegmentMuteArray", "(IIII[ZB)Z"); global::android.media.JetPlayer._setMuteFlags4974 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "setMuteFlags", "(IZ)Z"); global::android.media.JetPlayer._setMuteArray4975 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "setMuteArray", "([ZZ)Z"); global::android.media.JetPlayer._setMuteFlag4976 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "setMuteFlag", "(IZZ)Z"); global::android.media.JetPlayer._triggerClip4977 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "triggerClip", "(I)Z"); global::android.media.JetPlayer._clearQueue4978 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "clearQueue", "()Z"); global::android.media.JetPlayer._setEventListener4979 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "setEventListener", "(Landroid/media/JetPlayer$OnJetEventListener;)V"); global::android.media.JetPlayer._setEventListener4980 = @__env.GetMethodIDNoThrow(global::android.media.JetPlayer.staticClass, "setEventListener", "(Landroid/media/JetPlayer$OnJetEventListener;Landroid/os/Handler;)V"); } } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // namespace DiscUtils.Vfs { using System; using System.IO; /// <summary> /// Base class for the public facade on a file system. /// </summary> /// <remarks> /// The derived class can extend the functionality available from a file system /// beyond that defined by DiscFileSystem. /// </remarks> public abstract class VfsFileSystemFacade : DiscFileSystem { private DiscFileSystem _wrapped; /// <summary> /// Initializes a new instance of the VfsFileSystemFacade class. /// </summary> /// <param name="toWrap">The actual file system instance.</param> protected VfsFileSystemFacade(DiscFileSystem toWrap) { _wrapped = toWrap; } /// <summary> /// Gets the file system options, which can be modified. /// </summary> public override DiscFileSystemOptions Options { get { return _wrapped.Options; } } /// <summary> /// Gets a friendly name for the file system. /// </summary> public override string FriendlyName { get { return _wrapped.FriendlyName; } } /// <summary> /// Indicates whether the file system is read-only or read-write. /// </summary> /// <returns>true if the file system is read-write.</returns> public override bool CanWrite { get { return _wrapped.CanWrite; } } /// <summary> /// Gets the root directory of the file system. /// </summary> public override DiscDirectoryInfo Root { get { return new DiscDirectoryInfo(this, string.Empty); } } /// <summary> /// Gets the volume label. /// </summary> public override string VolumeLabel { get { return _wrapped.VolumeLabel; } } /// <summary> /// Gets a value indicating whether the file system is thread-safe. /// </summary> public override bool IsThreadSafe { get { return _wrapped.IsThreadSafe; } } /// <summary> /// Copies an existing file to a new file. /// </summary> /// <param name="sourceFile">The source file.</param> /// <param name="destinationFile">The destination file.</param> public override void CopyFile(string sourceFile, string destinationFile) { _wrapped.CopyFile(sourceFile, destinationFile); } /// <summary> /// Copies an existing file to a new file. /// </summary> /// <param name="sourceFile">The source file.</param> /// <param name="destinationFile">The destination file.</param> /// <param name="overwrite">Overwrite any existing file.</param> public override void CopyFile(string sourceFile, string destinationFile, bool overwrite) { _wrapped.CopyFile(sourceFile, destinationFile, overwrite); } /// <summary> /// Creates a directory. /// </summary> /// <param name="path">The path of the new directory.</param> public override void CreateDirectory(string path) { _wrapped.CreateDirectory(path); } /// <summary> /// Deletes a directory. /// </summary> /// <param name="path">The path of the directory to delete.</param> public override void DeleteDirectory(string path) { _wrapped.DeleteDirectory(path); } /// <summary> /// Deletes a directory, optionally with all descendants. /// </summary> /// <param name="path">The path of the directory to delete.</param> /// <param name="recursive">Determines if the all descendants should be deleted.</param> public override void DeleteDirectory(string path, bool recursive) { _wrapped.DeleteDirectory(path, recursive); } /// <summary> /// Deletes a file. /// </summary> /// <param name="path">The path of the file to delete.</param> public override void DeleteFile(string path) { _wrapped.DeleteFile(path); } /// <summary> /// Indicates if a directory exists. /// </summary> /// <param name="path">The path to test.</param> /// <returns>true if the directory exists.</returns> public override bool DirectoryExists(string path) { return _wrapped.DirectoryExists(path); } /// <summary> /// Indicates if a file exists. /// </summary> /// <param name="path">The path to test.</param> /// <returns>true if the file exists.</returns> public override bool FileExists(string path) { return _wrapped.FileExists(path); } /// <summary> /// Indicates if a file or directory exists. /// </summary> /// <param name="path">The path to test.</param> /// <returns>true if the file or directory exists.</returns> public override bool Exists(string path) { return _wrapped.Exists(path); } /// <summary> /// Gets the names of subdirectories in a specified directory. /// </summary> /// <param name="path">The path to search.</param> /// <returns>Array of directories.</returns> public override string[] GetDirectories(string path) { return _wrapped.GetDirectories(path); } /// <summary> /// Gets the names of subdirectories in a specified directory matching a specified /// search pattern. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against.</param> /// <returns>Array of directories matching the search pattern.</returns> public override string[] GetDirectories(string path, string searchPattern) { return _wrapped.GetDirectories(path, searchPattern); } /// <summary> /// Gets the names of subdirectories in a specified directory matching a specified /// search pattern, using a value to determine whether to search subdirectories. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against.</param> /// <param name="searchOption">Indicates whether to search subdirectories.</param> /// <returns>Array of directories matching the search pattern.</returns> public override string[] GetDirectories(string path, string searchPattern, SearchOption searchOption) { return _wrapped.GetDirectories(path, searchPattern, searchOption); } /// <summary> /// Gets the names of files in a specified directory. /// </summary> /// <param name="path">The path to search.</param> /// <returns>Array of files.</returns> public override string[] GetFiles(string path) { return _wrapped.GetFiles(path); } /// <summary> /// Gets the names of files in a specified directory. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against.</param> /// <returns>Array of files matching the search pattern.</returns> public override string[] GetFiles(string path, string searchPattern) { return _wrapped.GetFiles(path, searchPattern); } /// <summary> /// Gets the names of files in a specified directory matching a specified /// search pattern, using a value to determine whether to search subdirectories. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against.</param> /// <param name="searchOption">Indicates whether to search subdirectories.</param> /// <returns>Array of files matching the search pattern.</returns> public override string[] GetFiles(string path, string searchPattern, SearchOption searchOption) { return _wrapped.GetFiles(path, searchPattern, searchOption); } /// <summary> /// Gets the names of all files and subdirectories in a specified directory. /// </summary> /// <param name="path">The path to search.</param> /// <returns>Array of files and subdirectories matching the search pattern.</returns> public override string[] GetFileSystemEntries(string path) { return _wrapped.GetFileSystemEntries(path); } /// <summary> /// Gets the names of files and subdirectories in a specified directory matching a specified /// search pattern. /// </summary> /// <param name="path">The path to search.</param> /// <param name="searchPattern">The search string to match against.</param> /// <returns>Array of files and subdirectories matching the search pattern.</returns> public override string[] GetFileSystemEntries(string path, string searchPattern) { return _wrapped.GetFileSystemEntries(path, searchPattern); } /// <summary> /// Moves a directory. /// </summary> /// <param name="sourceDirectoryName">The directory to move.</param> /// <param name="destinationDirectoryName">The target directory name.</param> public override void MoveDirectory(string sourceDirectoryName, string destinationDirectoryName) { _wrapped.MoveDirectory(sourceDirectoryName, destinationDirectoryName); } /// <summary> /// Moves a file. /// </summary> /// <param name="sourceName">The file to move.</param> /// <param name="destinationName">The target file name.</param> public override void MoveFile(string sourceName, string destinationName) { _wrapped.MoveFile(sourceName, destinationName); } /// <summary> /// Moves a file, allowing an existing file to be overwritten. /// </summary> /// <param name="sourceName">The file to move.</param> /// <param name="destinationName">The target file name.</param> /// <param name="overwrite">Whether to permit a destination file to be overwritten.</param> public override void MoveFile(string sourceName, string destinationName, bool overwrite) { _wrapped.MoveFile(sourceName, destinationName, overwrite); } /// <summary> /// Opens the specified file. /// </summary> /// <param name="path">The full path of the file to open.</param> /// <param name="mode">The file mode for the created stream.</param> /// <returns>The new stream.</returns> public override SparseStream OpenFile(string path, FileMode mode) { return _wrapped.OpenFile(path, mode); } /// <summary> /// Opens the specified file. /// </summary> /// <param name="path">The full path of the file to open.</param> /// <param name="mode">The file mode for the created stream.</param> /// <param name="access">The access permissions for the created stream.</param> /// <returns>The new stream.</returns> public override SparseStream OpenFile(string path, FileMode mode, FileAccess access) { return _wrapped.OpenFile(path, mode, access); } /// <summary> /// Gets the attributes of a file or directory. /// </summary> /// <param name="path">The file or directory to inspect.</param> /// <returns>The attributes of the file or directory.</returns> public override FileAttributes GetAttributes(string path) { return _wrapped.GetAttributes(path); } /// <summary> /// Sets the attributes of a file or directory. /// </summary> /// <param name="path">The file or directory to change.</param> /// <param name="newValue">The new attributes of the file or directory.</param> public override void SetAttributes(string path, FileAttributes newValue) { _wrapped.SetAttributes(path, newValue); } /// <summary> /// Gets the creation time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The creation time.</returns> public override DateTime GetCreationTime(string path) { return _wrapped.GetCreationTime(path); } /// <summary> /// Sets the creation time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetCreationTime(string path, DateTime newTime) { _wrapped.SetCreationTime(path, newTime); } /// <summary> /// Gets the creation time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The creation time.</returns> public override DateTime GetCreationTimeUtc(string path) { return _wrapped.GetCreationTimeUtc(path); } /// <summary> /// Sets the creation time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetCreationTimeUtc(string path, DateTime newTime) { _wrapped.SetCreationTimeUtc(path, newTime); } /// <summary> /// Gets the last access time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The last access time.</returns> public override DateTime GetLastAccessTime(string path) { return _wrapped.GetLastAccessTime(path); } /// <summary> /// Sets the last access time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetLastAccessTime(string path, DateTime newTime) { _wrapped.SetLastAccessTime(path, newTime); } /// <summary> /// Gets the last access time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The last access time.</returns> public override DateTime GetLastAccessTimeUtc(string path) { return _wrapped.GetLastAccessTimeUtc(path); } /// <summary> /// Sets the last access time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetLastAccessTimeUtc(string path, DateTime newTime) { _wrapped.SetLastAccessTimeUtc(path, newTime); } /// <summary> /// Gets the last modification time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The last write time.</returns> public override DateTime GetLastWriteTime(string path) { return _wrapped.GetLastWriteTime(path); } /// <summary> /// Sets the last modification time (in local time) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetLastWriteTime(string path, DateTime newTime) { _wrapped.SetLastWriteTime(path, newTime); } /// <summary> /// Gets the last modification time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <returns>The last write time.</returns> public override DateTime GetLastWriteTimeUtc(string path) { return _wrapped.GetLastWriteTimeUtc(path); } /// <summary> /// Sets the last modification time (in UTC) of a file or directory. /// </summary> /// <param name="path">The path of the file or directory.</param> /// <param name="newTime">The new time to set.</param> public override void SetLastWriteTimeUtc(string path, DateTime newTime) { _wrapped.SetLastWriteTimeUtc(path, newTime); } /// <summary> /// Gets the length of a file. /// </summary> /// <param name="path">The path to the file.</param> /// <returns>The length in bytes.</returns> public override long GetFileLength(string path) { return _wrapped.GetFileLength(path); } /// <summary> /// Gets an object representing a possible file. /// </summary> /// <param name="path">The file path.</param> /// <returns>The representing object.</returns> /// <remarks>The file does not need to exist.</remarks> public override DiscFileInfo GetFileInfo(string path) { return new DiscFileInfo(this, path); } /// <summary> /// Gets an object representing a possible directory. /// </summary> /// <param name="path">The directory path.</param> /// <returns>The representing object.</returns> /// <remarks>The directory does not need to exist.</remarks> public override DiscDirectoryInfo GetDirectoryInfo(string path) { return new DiscDirectoryInfo(this, path); } /// <summary> /// Gets an object representing a possible file system object (file or directory). /// </summary> /// <param name="path">The file system path.</param> /// <returns>The representing object.</returns> /// <remarks>The file system object does not need to exist.</remarks> public override DiscFileSystemInfo GetFileSystemInfo(string path) { return new DiscFileSystemInfo(this, path); } /// <summary> /// Provides access to the actual file system implementation. /// </summary> /// <typeparam name="TDirEntry">The concrete type representing directory entries.</typeparam> /// <typeparam name="TFile">The concrete type representing files.</typeparam> /// <typeparam name="TDirectory">The concrete type representing directories.</typeparam> /// <typeparam name="TContext">The concrete type holding global state.</typeparam> /// <returns>The actual file system instance.</returns> protected VfsFileSystem<TDirEntry, TFile, TDirectory, TContext> GetRealFileSystem<TDirEntry, TFile, TDirectory, TContext>() where TDirEntry : VfsDirEntry where TFile : IVfsFile where TDirectory : class, IVfsDirectory<TDirEntry, TFile>, TFile where TContext : VfsContext { return (VfsFileSystem<TDirEntry, TFile, TDirectory, TContext>)_wrapped; } /// <summary> /// Provides access to the actual file system implementation. /// </summary> /// <typeparam name="T">The concrete type of the actual file system.</typeparam> /// <returns>The actual file system instance.</returns> protected T GetRealFileSystem<T>() where T : DiscFileSystem { return (T)_wrapped; } } }
using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Net; using System.Text; using System.Text.RegularExpressions; using System.Web; using System.Xml; using Zeta.VoyagerLibrary.Common; using Zeta.VoyagerLibrary.Logging; namespace Web.Code; using Zeta.VoyagerLibrary.Tools.Text; /// <summary> /// Grabs a web page, makes all links absolute to the base URI and splits /// the grabbed HTML at the placeholder. /// </summary> public class WebPageGrabber : IDisposable { #region Public routines. // ------------------------------------------------------------------ /// <summary> /// The main function for fetching. /// If successful, the properties <see cref="HtmlBefore"/> and <see cref="HtmlAfter"/> /// are filled with values. /// </summary> /// <param name="placeholder">The placeholder to search for. E.g. "##guestbook##".</param> /// <param name="fetchFromUrl">The fetch URL to download from.</param> /// <param name="baseUrl">The base URL to prefix all relative links with.</param> /// <param name="header">Optional. An additional header to add.</param> /// <param name="cacheDuration">How long a grabbed page is valid.</param> public void FetchContent( string placeholder, string fetchFromUrl, string baseUrl, string header, TimeSpan cacheDuration ) { _internalHtmlBefore = null; _internalHtmlAfter = null; // -- var cache = new Cache( fetchFromUrl, cacheDuration ); string html; if ( cache.IsUpToDateCachedVersionAvailable ) { html = cache.CachedContent; } else { try { html = readRemoteHtmlDocument( fetchFromUrl ); } catch ( Exception x ) { if ( cache.IsCachedVersionAvailable ) { // Report error but continue anyway. LogCentral.Current.LogError($@"Error during document retrieval from URL '{fetchFromUrl}'.", x ); html = cache.CachedContent; } else { throw; } } cache.CachedContent = html; } var xml = getDocReader( html, baseUrl ); var links = findAllLinks( xml, baseUrl ); var newHtml = replaceAllLinks( html, links, baseUrl ); newHtml = insertHeader( newHtml, header ); splitHtml( newHtml, placeholder ); // -- _fetchContentCalled = true; } // ------------------------------------------------------------------ #endregion #region Public properties. // ------------------------------------------------------------------ /// <summary> /// The placeholder parameters (if any) from the last call. /// Returns NULL if no parameters. /// </summary> public string PlaceholderParameters => _internalPlaceholderParameters; /// <summary> /// Read the HTML string that appears before the placeholder. /// </summary> public string HtmlBefore { get { if ( !_fetchContentCalled ) { throw new ApplicationException( @"Please call the FetchContent() function before accessing the HtmlBefore property." ); } else { return _internalHtmlBefore; } } } /// <summary> /// Read the HTML string that appears after the placeholder. /// </summary> public string HtmlAfter { get { if ( !_fetchContentCalled ) { throw new ApplicationException( @"Please call the FetchContent() function before accessing the HtmlBefore property." ); } else { return _internalHtmlAfter; } } } /// <summary> /// Access AFTER you called <see cref="FetchContent"/>. /// </summary> public string SourcePageEncodingName { get { if ( !_fetchContentCalled ) { throw new ApplicationException( @"Please call the FetchContent() function before accessing the SourcePageEncodingName property." ); } else { return _sourcePageEncodingName; } } } /// <summary> /// Access AFTER you called <see cref="FetchContent"/>. /// </summary> public Encoding SourcePageEncoding { get { if ( !_fetchContentCalled ) { throw new ApplicationException( @"Please call the FetchContent() function before accessing the SourcePageEncoding property." ); } else { return _sourcePageEncoding; } } } // ------------------------------------------------------------------ #endregion #region IDisposable member. // ------------------------------------------------------------------ public void Dispose() { _internalHtmlBefore = null; _internalHtmlAfter = null; } // ------------------------------------------------------------------ #endregion #region Private routines. // ------------------------------------------------------------------ /// <summary> /// &lt;meta http-equiv="Content-Type" content="text/html; charset=utf-8"&gt;. /// </summary> private const string Htmlcontentencodingpattern = @"<meta\s+http-equiv\s*=\s*[""'\s]?Content-Type\b.*?charset\s*=\s*([^""'\s>]*)"; private static string detectEncodingName( byte[] content ) { if ( content is not { Length: > 0 } ) { return null; } else { // Decode with default encoding to detect the . var html = Encoding.Default.GetString( content ); // Find. var match = Regex.Match( html, Htmlcontentencodingpattern, RegexOptions.Singleline | RegexOptions.IgnoreCase ); return !match.Success || match.Groups.Count < 2 ? null : match.Groups[1].Value; } } /// <summary> /// Load the content from the given HTML. /// </summary> private string readRemoteHtmlDocument( string url ) { LogCentral.Current.LogInfo( $@"Reading remote HTML document from URL '{url}'."); var req = (HttpWebRequest)WebRequest.Create( url ); using var resp = (HttpWebResponse)req.GetResponse(); using var stream = resp.GetResponseStream(); byte[] content; using ( var mem = new MemoryStream() ) { const int blockSize = 16384; var blockBuffer = new byte[blockSize]; int read; while ( stream != null && (read = stream.Read( blockBuffer, 0, blockSize )) > 0 ) { mem.Write( blockBuffer, 0, read ); } // -- mem.Seek( 0, SeekOrigin.Begin ); var temporaryContent = mem.GetBuffer(); content = resp.ContentLength > 0 ? new byte[Math.Min( temporaryContent.Length, resp.ContentLength )] : new byte[temporaryContent.Length]; Array.Copy( temporaryContent, content, content.Length ); } _sourcePageEncodingName = detectEncodingName( content ); LogCentral.Current.LogInfo( $@"Detected encoding '{_sourcePageEncodingName}' for remote HTML document from URL '{url}'."); _sourcePageEncoding = getEncodingByName( _sourcePageEncodingName ); var html = _sourcePageEncoding.GetString( content ); return html; } /// <summary> /// /// </summary> private static XmlReader getDocReader( string html, string baseUrl ) { var r = new Sgml.SgmlReader(); if ( baseUrl.Length > 0 ) { r.SetBaseUri( baseUrl ); } r.DocType = @"HTML"; r.InputStream = new StringReader( html ); return r; } /// <summary> /// Find all links. /// </summary> private IEnumerable<string> findAllLinks( XmlReader xml, string baseUrl ) { var links = new List<string>(); while ( xml.Read() ) { switch ( xml.NodeType ) { // Added 2006-03-27: Inside comments, too. case XmlNodeType.Comment: var childXml = getDocReader( xml.Value, baseUrl ); var childLinks = findAllLinks( childXml, baseUrl ); links.AddRange( childLinks ); break; // A node element. case XmlNodeType.Element: // If this is a link element, store the URLs to modify. if ( isLinkElement( xml.Name, out var linkAttributeNames ) ) { while ( xml.MoveToNextAttribute() ) { checkAddStyleAttributeLinks( xml.Name, xml.Value, links ); // ReSharper disable LoopCanBeConvertedToQuery foreach ( var a in linkAttributeNames ) // ReSharper restore LoopCanBeConvertedToQuery { if ( string.Equals(a, xml.Name, StringComparison.CurrentCultureIgnoreCase) ) { var linkUrl = xml.Value; if ( !isAbsoluteUrl( linkUrl ) ) { links.Add( linkUrl ); } } } } } else { // Also, look for style attributes. while ( xml.MoveToNextAttribute() ) { checkAddStyleAttributeLinks( xml.Name, xml.Value, links ); } } break; } } return links.ToArray(); } private static void checkAddStyleAttributeLinks( string attributeName, string attributeValue, IList links ) { if ( attributeName.ToLower() == @"style" ) { var linkUrls = extractStyleUrls( attributeValue ); if ( linkUrls is { Length: > 0 } ) { foreach ( var linkUrl in linkUrls ) { if ( !isAbsoluteUrl( linkUrl ) ) { links.Add( linkUrl ); } } } } } /// <summary> /// Detects URLs in styles. /// </summary> /// <param name="styleValue">The style value.</param> /// <returns></returns> private static string[] extractStyleUrls( string styleValue ) { if ( styleValue == null || styleValue.Trim().Length <= 0 ) { return null; } else { var matchs = Regex.Matches( styleValue, @"url\s*\(\s*([^\)\s]+)\s*\)", RegexOptions.Singleline | RegexOptions.IgnoreCase ); if ( matchs.Count > 0 ) { var result = new List<string>(); // ReSharper disable LoopCanBeConvertedToQuery foreach ( Match match in matchs ) // ReSharper restore LoopCanBeConvertedToQuery { if ( match is { Success: true } ) { result.Add( match.Groups[1].Value ); } } return result.Count <= 0 ? null : result.ToArray(); } else { return null; } } } /// <summary> /// Checks whether a given URL is absolute or relative. /// </summary> private static bool isAbsoluteUrl( string url ) { var dotPos = url.IndexOf( @":", StringComparison.Ordinal); return dotPos > 0 && Uri.CheckSchemeName( url.Substring( 0, dotPos ) ); } /// <summary> /// Replace relative links with absolute links. /// </summary> /// <returns>Returns the replaces string.</returns> private static string replaceAllLinks( string html, IEnumerable<string> links, string baseUrl ) { var baseWS = baseUrl.TrimEnd( '/' ) + '/'; var baseOS = baseUrl.TrimEnd( '/' ); foreach ( var link in links ) { if ( link.Length > 0 ) { if ( link[0] == '/' ) { // Base without slash. html = Regex.Replace( html, @"""" + StringHelper.EscapeRXPattern( link ) + @"""", @"""" + baseOS + link + @"""", RegexOptions.IgnoreCase | RegexOptions.Multiline ); html = Regex.Replace( html, @"'" + StringHelper.EscapeRXPattern(link) + @"'", @"'" + baseOS + link + @"'", RegexOptions.IgnoreCase | RegexOptions.Multiline ); // For style-"url(...)"-links. html = Regex.Replace( html, @"\(\s*" + StringHelper.EscapeRXPattern(link) + @"\s*\)", @"(" + baseOS + link + @")", RegexOptions.IgnoreCase | RegexOptions.Multiline ); } else { //link = StringHelper.EscapeRXCharacters( link ); // Base with slash. html = Regex.Replace( html, @"""" + StringHelper.EscapeRXPattern(link) + @"""", @"""" + baseWS + link + @"""", RegexOptions.IgnoreCase | RegexOptions.Multiline ); html = Regex.Replace( html, @"'" + StringHelper.EscapeRXPattern(link) + @"'", @"'" + baseWS + link + @"'", RegexOptions.IgnoreCase | RegexOptions.Multiline ); // For style-"url(...)"-links. html = Regex.Replace( html, @"\(\s*" + StringHelper.EscapeRXPattern(link) + @"\s*\)", @"(" + baseWS + link + @")", RegexOptions.IgnoreCase | RegexOptions.Multiline ); } } } return html; } /// <summary> /// Breaks the HTML at the placeholder, stores in the member variables. /// </summary> private void splitHtml( string html, string placeholder ) { placeholder = placeholder.Trim().Trim( '#' ).Trim(); var pattern = $@"##{placeholder}(\([^\)]*\))?##"; var match = Regex.Match( html, pattern, RegexOptions.None ); var pos = -1; var length = 0; if ( match.Success ) { pos = match.Index; length = match.Length; if ( match.Groups.Count > 1 ) { _internalPlaceholderParameters = match.Groups[1].Value.Trim().Trim( '(', ')' ).Trim(); if ( _internalPlaceholderParameters.Length <= 0 ) { _internalPlaceholderParameters = null; } } else { _internalPlaceholderParameters = null; } } // -- if ( pos == -1 ) { _internalHtmlBefore = html; _internalHtmlAfter = string.Empty; } else { _internalHtmlBefore = html.Substring( 0, pos ); _internalHtmlAfter = html.Substring( pos + length ); } } /// <summary> /// Insert an additional string into the HTML header. /// </summary> /// <returns></returns> private static string insertHeader( string html, string header ) { if ( string.IsNullOrEmpty( header ) ) { return html; } else { html = Regex.Replace( html, @"(<head[^>]*>)", $"$1\n{header}\n", RegexOptions.IgnoreCase | RegexOptions.Multiline ); return html; } } /// <summary> /// Checks whether the given name is a HTML element (=tag) with /// a contained link. If true, <paramref name="linkAttributeNames"/> contains a list /// of all attributes that are links. /// </summary> /// <returns>Returns true, if it is a link element, false otherwise.</returns> private bool isLinkElement( string name, out string[] linkAttributeNames ) { name = name.ToLower(); foreach ( var e in _linkElements ) { if ( name == e.Name ) { linkAttributeNames = e.Attributes; return true; } } linkAttributeNames = null; return false; } /// <summary> /// Helper function for safely converting a response stream encoding /// to a supported Encoding class. /// </summary> private static Encoding getEncodingByName( string encodingName ) { var encoding = Encoding.Default; if ( !string.IsNullOrEmpty( encodingName ) ) { try { encoding = Encoding.GetEncoding( encodingName ); } catch ( NotSupportedException x ) { encoding = Encoding.Default; LogCentral.Current.LogError( $@"Unsupported encoding: '{encodingName}'. Returning default encoding '{encoding}'.", x ); encoding = Encoding.Default; } } return encoding; } // ------------------------------------------------------------------ #endregion #region Private member. // ------------------------------------------------------------------ /// <summary> /// Remembers whether the <see cref="FetchContent"/> function was called. /// </summary> private bool _fetchContentCalled; /// <summary> /// The HTML string that appears before the placeholder. /// </summary> private string _internalHtmlBefore; /// <summary> /// The HTML string that appears after the placeholder. /// </summary> private string _internalHtmlAfter; /// <summary> /// Stores placeholder parameters (if any) from the last call. /// </summary> private string _internalPlaceholderParameters; /// <summary> /// Encoding. /// </summary> private string _sourcePageEncodingName; private Encoding _sourcePageEncoding = Encoding.Default; // ------------------------------------------------------------------ #endregion #region LinkElement class. // ------------------------------------------------------------------ /// <summary> /// Used by <see cref="WebPageGrabber.isLinkElement"/>. /// </summary> private class LinkElement { public LinkElement( string name, params string[] attributes ) { Name = name; Attributes = attributes; } public readonly string Name; public readonly string[] Attributes; } /// <summary> /// This list was taken from the Perl module 'HTML-Tagset-3.03\blib\lib\HTML\Tagset.pm', /// '%linkElements' hash. /// </summary> private readonly LinkElement[] _linkElements = { new( @"a", @"href" ), new( @"applet", @"archive", @"codebase", @"code" ), new( @"area", @"href" ), new( @"base", @"href" ), new( @"bgsound", @"src" ), new( @"blockquote", @"cite" ), new( @"body", @"background" ), new( @"del", @"cite" ), new( @"embed", @"pluginspage", @"src" ), new( @"form", @"action" ), new( @"frame", @"src", @"longdesc" ), new( @"iframe", @"src", @"longdesc" ), new( @"ilayer", @"background" ), new( @"img", @"src", @"lowsrc", @"longdesc", @"usemap" ), new( @"input", @"src", @"usemap" ), new( @"ins", @"cite" ), new( @"isindex", @"action" ), new( @"head", @"profile" ), new( @"layer", @"background", @"src" ), new( @"link", @"href" ), new( @"object", @"classid", @"codebase", @"data", @"archive", @"usemap" ), new( @"q", @"cite" ), new( @"script", @"src", @"for" ), new( @"table", @"background" ), new( @"td", @"background" ), new( @"th", @"background" ), new( @"tr", @"background" ), new( @"xmp", @"href" ) }; // ------------------------------------------------------------------ #endregion. #region Cache management class. // ------------------------------------------------------------------ /// <summary> /// Caches the fetched URL. /// </summary> private class Cache { /// <summary> /// Constructor. /// </summary> /// <param name="key">This is usually the URL to fetch from.</param> /// <param name="cacheDuration">How long an entry in the cache is valid.</param> public Cache( string key, TimeSpan cacheDuration ) { _key = key; _cacheDuration = cacheDuration; } /// <summary> /// If a cached version is available, check, whether the version /// is up to date. /// </summary> public bool IsUpToDateCachedVersionAvailable { get { lock ( this ) { if ( IsCachedVersionAvailable ) { var span = DateTime.Now - cachedDate; return span < _cacheDuration; } else { return false; } } } } /// <summary> /// Check whether a cached version is available. /// Does ignore the date of the version. /// </summary> public bool IsCachedVersionAvailable { get { lock ( this ) { return CachedContent != null; } } } /// <summary> /// Read and write the cache. /// </summary> public string CachedContent { get { lock ( this ) { var o = HttpContext.Current.Session[cacheContentKeyName]; return ConvertHelper.ToString( o ); } } set { lock ( this ) { HttpContext.Current.Session[cacheContentKeyName] = value; cachedDate = DateTime.Now; } } } /// <summary> /// Read and write the cache date. /// </summary> private DateTime cachedDate { get { var o = HttpContext.Current.Session[cacheDateKeyName]; return ConvertHelper.ToDateTime( o ); } set => HttpContext.Current.Session[cacheDateKeyName] = value; } /// <summary> /// The key for the cache item. /// </summary> private readonly string _key; /// <summary> /// How long an entry is valid. /// </summary> private readonly TimeSpan _cacheDuration; /// <summary> /// Calculate the unique key name. /// </summary> private string cacheContentKeyName => $@"{GetType().FullName}.{_key}.Content"; /// <summary> /// Calculate the unique key name. /// </summary> private string cacheDateKeyName => $@"{GetType().FullName}.{_key}.FetchDate"; } // ------------------------------------------------------------------ #endregion }
/* * Copyright (c) Citrix Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using CookComputing.XmlRpc; namespace XenAPI { /// <summary> /// VM appliance /// First published in XenServer 6.0. /// </summary> public partial class VM_appliance : XenObject<VM_appliance> { public VM_appliance() { } public VM_appliance(string uuid, string name_label, string name_description, List<vm_appliance_operation> allowed_operations, Dictionary<string, vm_appliance_operation> current_operations, List<XenRef<VM>> VMs) { this.uuid = uuid; this.name_label = name_label; this.name_description = name_description; this.allowed_operations = allowed_operations; this.current_operations = current_operations; this.VMs = VMs; } /// <summary> /// Creates a new VM_appliance from a Proxy_VM_appliance. /// </summary> /// <param name="proxy"></param> public VM_appliance(Proxy_VM_appliance proxy) { this.UpdateFromProxy(proxy); } public override void UpdateFrom(VM_appliance update) { uuid = update.uuid; name_label = update.name_label; name_description = update.name_description; allowed_operations = update.allowed_operations; current_operations = update.current_operations; VMs = update.VMs; } internal void UpdateFromProxy(Proxy_VM_appliance proxy) { uuid = proxy.uuid == null ? null : (string)proxy.uuid; name_label = proxy.name_label == null ? null : (string)proxy.name_label; name_description = proxy.name_description == null ? null : (string)proxy.name_description; allowed_operations = proxy.allowed_operations == null ? null : Helper.StringArrayToEnumList<vm_appliance_operation>(proxy.allowed_operations); current_operations = proxy.current_operations == null ? null : Maps.convert_from_proxy_string_vm_appliance_operation(proxy.current_operations); VMs = proxy.VMs == null ? null : XenRef<VM>.Create(proxy.VMs); } public Proxy_VM_appliance ToProxy() { Proxy_VM_appliance result_ = new Proxy_VM_appliance(); result_.uuid = (uuid != null) ? uuid : ""; result_.name_label = (name_label != null) ? name_label : ""; result_.name_description = (name_description != null) ? name_description : ""; result_.allowed_operations = (allowed_operations != null) ? Helper.ObjectListToStringArray(allowed_operations) : new string[] {}; result_.current_operations = Maps.convert_to_proxy_string_vm_appliance_operation(current_operations); result_.VMs = (VMs != null) ? Helper.RefListToStringArray(VMs) : new string[] {}; return result_; } /// <summary> /// Creates a new VM_appliance from a Hashtable. /// </summary> /// <param name="table"></param> public VM_appliance(Hashtable table) { uuid = Marshalling.ParseString(table, "uuid"); name_label = Marshalling.ParseString(table, "name_label"); name_description = Marshalling.ParseString(table, "name_description"); allowed_operations = Helper.StringArrayToEnumList<vm_appliance_operation>(Marshalling.ParseStringArray(table, "allowed_operations")); current_operations = Maps.convert_from_proxy_string_vm_appliance_operation(Marshalling.ParseHashTable(table, "current_operations")); VMs = Marshalling.ParseSetRef<VM>(table, "VMs"); } public bool DeepEquals(VM_appliance other, bool ignoreCurrentOperations) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; if (!ignoreCurrentOperations && !Helper.AreEqual2(this.current_operations, other.current_operations)) return false; return Helper.AreEqual2(this._uuid, other._uuid) && Helper.AreEqual2(this._name_label, other._name_label) && Helper.AreEqual2(this._name_description, other._name_description) && Helper.AreEqual2(this._allowed_operations, other._allowed_operations) && Helper.AreEqual2(this._VMs, other._VMs); } public override string SaveChanges(Session session, string opaqueRef, VM_appliance server) { if (opaqueRef == null) { Proxy_VM_appliance p = this.ToProxy(); return session.proxy.vm_appliance_create(session.uuid, p).parse(); } else { if (!Helper.AreEqual2(_name_label, server._name_label)) { VM_appliance.set_name_label(session, opaqueRef, _name_label); } if (!Helper.AreEqual2(_name_description, server._name_description)) { VM_appliance.set_name_description(session, opaqueRef, _name_description); } return null; } } /// <summary> /// Get a record containing the current state of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static VM_appliance get_record(Session session, string _vm_appliance) { return new VM_appliance((Proxy_VM_appliance)session.proxy.vm_appliance_get_record(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Get a reference to the VM_appliance instance with the specified UUID. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_uuid">UUID of object to return</param> public static XenRef<VM_appliance> get_by_uuid(Session session, string _uuid) { return XenRef<VM_appliance>.Create(session.proxy.vm_appliance_get_by_uuid(session.uuid, (_uuid != null) ? _uuid : "").parse()); } /// <summary> /// Create a new VM_appliance instance, and return its handle. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_record">All constructor arguments</param> public static XenRef<VM_appliance> create(Session session, VM_appliance _record) { return XenRef<VM_appliance>.Create(session.proxy.vm_appliance_create(session.uuid, _record.ToProxy()).parse()); } /// <summary> /// Create a new VM_appliance instance, and return its handle. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_record">All constructor arguments</param> public static XenRef<Task> async_create(Session session, VM_appliance _record) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_create(session.uuid, _record.ToProxy()).parse()); } /// <summary> /// Destroy the specified VM_appliance instance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static void destroy(Session session, string _vm_appliance) { session.proxy.vm_appliance_destroy(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Destroy the specified VM_appliance instance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static XenRef<Task> async_destroy(Session session, string _vm_appliance) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_destroy(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Get all the VM_appliance instances with the given label. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_label">label of object to return</param> public static List<XenRef<VM_appliance>> get_by_name_label(Session session, string _label) { return XenRef<VM_appliance>.Create(session.proxy.vm_appliance_get_by_name_label(session.uuid, (_label != null) ? _label : "").parse()); } /// <summary> /// Get the uuid field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static string get_uuid(Session session, string _vm_appliance) { return (string)session.proxy.vm_appliance_get_uuid(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Get the name/label field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static string get_name_label(Session session, string _vm_appliance) { return (string)session.proxy.vm_appliance_get_name_label(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Get the name/description field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static string get_name_description(Session session, string _vm_appliance) { return (string)session.proxy.vm_appliance_get_name_description(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Get the allowed_operations field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static List<vm_appliance_operation> get_allowed_operations(Session session, string _vm_appliance) { return Helper.StringArrayToEnumList<vm_appliance_operation>(session.proxy.vm_appliance_get_allowed_operations(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Get the current_operations field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static Dictionary<string, vm_appliance_operation> get_current_operations(Session session, string _vm_appliance) { return Maps.convert_from_proxy_string_vm_appliance_operation(session.proxy.vm_appliance_get_current_operations(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Get the VMs field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static List<XenRef<VM>> get_VMs(Session session, string _vm_appliance) { return XenRef<VM>.Create(session.proxy.vm_appliance_get_vms(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Set the name/label field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_label">New value to set</param> public static void set_name_label(Session session, string _vm_appliance, string _label) { session.proxy.vm_appliance_set_name_label(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_label != null) ? _label : "").parse(); } /// <summary> /// Set the name/description field of the given VM_appliance. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_description">New value to set</param> public static void set_name_description(Session session, string _vm_appliance, string _description) { session.proxy.vm_appliance_set_name_description(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_description != null) ? _description : "").parse(); } /// <summary> /// Start all VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_paused">Instantiate all VMs belonging to this appliance in paused state if set to true.</param> public static void start(Session session, string _vm_appliance, bool _paused) { session.proxy.vm_appliance_start(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", _paused).parse(); } /// <summary> /// Start all VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_paused">Instantiate all VMs belonging to this appliance in paused state if set to true.</param> public static XenRef<Task> async_start(Session session, string _vm_appliance, bool _paused) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_start(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", _paused).parse()); } /// <summary> /// Perform a clean shutdown of all the VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static void clean_shutdown(Session session, string _vm_appliance) { session.proxy.vm_appliance_clean_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Perform a clean shutdown of all the VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static XenRef<Task> async_clean_shutdown(Session session, string _vm_appliance) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_clean_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Perform a hard shutdown of all the VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static void hard_shutdown(Session session, string _vm_appliance) { session.proxy.vm_appliance_hard_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// Perform a hard shutdown of all the VMs in the appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static XenRef<Task> async_hard_shutdown(Session session, string _vm_appliance) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_hard_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// For each VM in the appliance, try to shut it down cleanly. If this fails, perform a hard shutdown of the VM. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static void shutdown(Session session, string _vm_appliance) { session.proxy.vm_appliance_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse(); } /// <summary> /// For each VM in the appliance, try to shut it down cleanly. If this fails, perform a hard shutdown of the VM. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> public static XenRef<Task> async_shutdown(Session session, string _vm_appliance) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_shutdown(session.uuid, (_vm_appliance != null) ? _vm_appliance : "").parse()); } /// <summary> /// Assert whether all SRs required to recover this VM appliance are available. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_session_to">The session to which the VM appliance is to be recovered.</param> public static void assert_can_be_recovered(Session session, string _vm_appliance, string _session_to) { session.proxy.vm_appliance_assert_can_be_recovered(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "").parse(); } /// <summary> /// Assert whether all SRs required to recover this VM appliance are available. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_session_to">The session to which the VM appliance is to be recovered.</param> public static XenRef<Task> async_assert_can_be_recovered(Session session, string _vm_appliance, string _session_to) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_assert_can_be_recovered(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "").parse()); } /// <summary> /// Get the list of SRs required by the VM appliance to recover. /// First published in XenServer 6.2. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given VM_appliance</param> /// <param name="_session_to">The session to which the list of SRs have to be recovered .</param> public static List<XenRef<SR>> get_SRs_required_for_recovery(Session session, string _vm_appliance, string _session_to) { return XenRef<SR>.Create(session.proxy.vm_appliance_get_srs_required_for_recovery(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "").parse()); } /// <summary> /// Get the list of SRs required by the VM appliance to recover. /// First published in XenServer 6.2. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given VM_appliance</param> /// <param name="_session_to">The session to which the list of SRs have to be recovered .</param> public static XenRef<Task> async_get_SRs_required_for_recovery(Session session, string _vm_appliance, string _session_to) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_get_srs_required_for_recovery(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "").parse()); } /// <summary> /// Recover the VM appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_session_to">The session to which the VM appliance is to be recovered.</param> /// <param name="_force">Whether the VMs should replace newer versions of themselves.</param> public static void recover(Session session, string _vm_appliance, string _session_to, bool _force) { session.proxy.vm_appliance_recover(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "", _force).parse(); } /// <summary> /// Recover the VM appliance /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_vm_appliance">The opaque_ref of the given vm_appliance</param> /// <param name="_session_to">The session to which the VM appliance is to be recovered.</param> /// <param name="_force">Whether the VMs should replace newer versions of themselves.</param> public static XenRef<Task> async_recover(Session session, string _vm_appliance, string _session_to, bool _force) { return XenRef<Task>.Create(session.proxy.async_vm_appliance_recover(session.uuid, (_vm_appliance != null) ? _vm_appliance : "", (_session_to != null) ? _session_to : "", _force).parse()); } /// <summary> /// Return a list of all the VM_appliances known to the system. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> public static List<XenRef<VM_appliance>> get_all(Session session) { return XenRef<VM_appliance>.Create(session.proxy.vm_appliance_get_all(session.uuid).parse()); } /// <summary> /// Get all the VM_appliance Records at once, in a single XML RPC call /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> public static Dictionary<XenRef<VM_appliance>, VM_appliance> get_all_records(Session session) { return XenRef<VM_appliance>.Create<Proxy_VM_appliance>(session.proxy.vm_appliance_get_all_records(session.uuid).parse()); } /// <summary> /// Unique identifier/object reference /// </summary> public virtual string uuid { get { return _uuid; } set { if (!Helper.AreEqual(value, _uuid)) { _uuid = value; Changed = true; NotifyPropertyChanged("uuid"); } } } private string _uuid; /// <summary> /// a human-readable name /// </summary> public virtual string name_label { get { return _name_label; } set { if (!Helper.AreEqual(value, _name_label)) { _name_label = value; Changed = true; NotifyPropertyChanged("name_label"); } } } private string _name_label; /// <summary> /// a notes field containing human-readable description /// </summary> public virtual string name_description { get { return _name_description; } set { if (!Helper.AreEqual(value, _name_description)) { _name_description = value; Changed = true; NotifyPropertyChanged("name_description"); } } } private string _name_description; /// <summary> /// list of the operations allowed in this state. This list is advisory only and the server state may have changed by the time this field is read by a client. /// </summary> public virtual List<vm_appliance_operation> allowed_operations { get { return _allowed_operations; } set { if (!Helper.AreEqual(value, _allowed_operations)) { _allowed_operations = value; Changed = true; NotifyPropertyChanged("allowed_operations"); } } } private List<vm_appliance_operation> _allowed_operations; /// <summary> /// links each of the running tasks using this object (by reference) to a current_operation enum which describes the nature of the task. /// </summary> public virtual Dictionary<string, vm_appliance_operation> current_operations { get { return _current_operations; } set { if (!Helper.AreEqual(value, _current_operations)) { _current_operations = value; Changed = true; NotifyPropertyChanged("current_operations"); } } } private Dictionary<string, vm_appliance_operation> _current_operations; /// <summary> /// all VMs in this appliance /// </summary> public virtual List<XenRef<VM>> VMs { get { return _VMs; } set { if (!Helper.AreEqual(value, _VMs)) { _VMs = value; Changed = true; NotifyPropertyChanged("VMs"); } } } private List<XenRef<VM>> _VMs; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Net; using System.Threading; using System.Threading.Tasks; using Azure.Core; using Azure.Core.Pipeline; using Azure.Data.Tables.Models; using Azure.Data.Tables.Sas; namespace Azure.Data.Tables { public class TableServiceClient { private readonly ClientDiagnostics _diagnostics; private readonly TableRestClient _tableOperations; private readonly ServiceRestClient _serviceOperations; private readonly ServiceRestClient _secondaryServiceOperations; private readonly OdataMetadataFormat _format = OdataMetadataFormat.ApplicationJsonOdataMinimalmetadata; private readonly string _version; internal readonly bool _isPremiumEndpoint; /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="endpoint"> /// A <see cref="Uri"/> referencing the table service account. /// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/". /// </param> public TableServiceClient(Uri endpoint) : this(endpoint, options: null) { } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="connectionString"> /// A connection string includes the authentication information /// required for your application to access data in an Azure Storage /// account at runtime. /// /// For more information, /// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string"> /// Configure Azure Storage connection strings</see>. /// </param> public TableServiceClient(string connectionString) : this(connectionString, options: null) { } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="endpoint"> /// A <see cref="Uri"/> referencing the table service account. /// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/". /// </param> /// <param name="options"> /// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request. /// </param> public TableServiceClient(Uri endpoint, TableClientOptions options = null) : this(endpoint, default(TableSharedKeyPipelinePolicy), options) { if (endpoint.Scheme != "https") { throw new ArgumentException("Cannot use TokenCredential without HTTPS.", nameof(endpoint)); } } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="endpoint"> /// A <see cref="Uri"/> referencing the table service account. /// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/". /// </param> /// <param name="credential">The shared key credential used to sign requests.</param> public TableServiceClient(Uri endpoint, TableSharedKeyCredential credential) : this(endpoint, new TableSharedKeyPipelinePolicy(credential), null) { Argument.AssertNotNull(credential, nameof(credential)); } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="endpoint"> /// A <see cref="Uri"/> referencing the table service account. /// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/". /// </param> /// <param name="credential">The shared key credential used to sign requests.</param> /// <param name="options"> /// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request. /// </param> public TableServiceClient(Uri endpoint, TableSharedKeyCredential credential, TableClientOptions options = null) : this(endpoint, new TableSharedKeyPipelinePolicy(credential), options) { Argument.AssertNotNull(credential, nameof(credential)); } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/>. /// </summary> /// <param name="connectionString"> /// A connection string includes the authentication information /// required for your application to access data in an Azure Storage /// account at runtime. /// /// For more information, /// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string"> /// Configure Azure Storage connection strings</see>. /// </param> /// <param name="options"> /// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request. /// </param> public TableServiceClient(string connectionString, TableClientOptions options = null) { Argument.AssertNotNull(connectionString, nameof(connectionString)); TableConnectionString connString = TableConnectionString.Parse(connectionString); options ??= new TableClientOptions(); var endpointString = connString.TableStorageUri.PrimaryUri.ToString(); var secondaryEndpoint = connString.TableStorageUri.PrimaryUri?.ToString() ?? endpointString.Insert(endpointString.IndexOf('.'), "-secondary"); TableSharedKeyPipelinePolicy policy = connString.Credentials switch { TableSharedKeyCredential credential => new TableSharedKeyPipelinePolicy(credential), _ => default }; HttpPipeline pipeline = HttpPipelineBuilder.Build(options, policy); _diagnostics = new ClientDiagnostics(options); _tableOperations = new TableRestClient(_diagnostics, pipeline, endpointString); _serviceOperations = new ServiceRestClient(_diagnostics, pipeline, endpointString); _secondaryServiceOperations = new ServiceRestClient(_diagnostics, pipeline, secondaryEndpoint); _version = options.VersionString; _isPremiumEndpoint = IsPremiumEndpoint(connString.TableStorageUri.PrimaryUri); } internal TableServiceClient(Uri endpoint, TableSharedKeyPipelinePolicy policy, TableClientOptions options) { Argument.AssertNotNull(endpoint, nameof(endpoint)); options ??= new TableClientOptions(); var endpointString = endpoint.ToString(); var secondaryEndpoint = endpointString.Insert(endpointString.IndexOf('.'), "-secondary"); HttpPipeline pipeline = HttpPipelineBuilder.Build(options, policy); _diagnostics = new ClientDiagnostics(options); _tableOperations = new TableRestClient(_diagnostics, pipeline, endpointString); _serviceOperations = new ServiceRestClient(_diagnostics, pipeline, endpointString); _secondaryServiceOperations = new ServiceRestClient(_diagnostics, pipeline, secondaryEndpoint); _version = options.VersionString; _isPremiumEndpoint = IsPremiumEndpoint(endpoint); } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/> /// class for mocking. /// </summary> internal TableServiceClient(TableRestClient internalClient) { _tableOperations = internalClient; } /// <summary> /// Initializes a new instance of the <see cref="TableServiceClient"/> /// class for mocking. /// </summary> protected TableServiceClient() { } /// <summary> /// Gets a <see cref="TableSasBuilder"/> instance scoped to the current account. /// </summary> /// <param name="permissions"><see cref="TableAccountSasPermissions"/> containing the allowed permissions.</param> /// <param name="resourceTypes"><see cref="TableAccountSasResourceTypes"/> containing the accessible resource types.</param> /// <param name="expiresOn">The time at which the shared access signature becomes invalid.</param> /// <returns>An instance of <see cref="TableAccountSasBuilder"/>.</returns> public virtual TableAccountSasBuilder GetSasBuilder(TableAccountSasPermissions permissions, TableAccountSasResourceTypes resourceTypes, DateTimeOffset expiresOn) { return new TableAccountSasBuilder(permissions, resourceTypes, expiresOn) { Version = _version }; } /// <summary> /// Gets a <see cref="TableAccountSasBuilder"/> instance scoped to the current table. /// </summary> /// <param name="rawPermissions">The permissions associated with the shared access signature. This string should contain one or more of the following permission characters in this order: "racwdl".</param> /// <param name="resourceTypes"><see cref="TableAccountSasResourceTypes"/> containing the accessible resource types.</param> /// <param name="expiresOn">The time at which the shared access signature becomes invalid.</param> /// <returns>An instance of <see cref="TableAccountSasBuilder"/>.</returns> public virtual TableAccountSasBuilder GetSasBuilder(string rawPermissions, TableAccountSasResourceTypes resourceTypes, DateTimeOffset expiresOn) { return new TableAccountSasBuilder(rawPermissions, resourceTypes, expiresOn) { Version = _version }; } public virtual TableClient GetTableClient(string tableName) { Argument.AssertNotNull(tableName, nameof(tableName)); return new TableClient(tableName, _tableOperations, _version, _diagnostics, _isPremiumEndpoint); } /// <summary> /// Gets a list of tables from the storage account. /// </summary> /// <param name="filter">Returns only tables that satisfy the specified filter.</param> /// <param name="maxPerPage">The maximum number of tables that will be returned per page.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns></returns> public virtual AsyncPageable<TableItem> GetTablesAsync(string filter = null, int? maxPerPage = null, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetTables)}"); scope.Start(); try { return PageableHelpers.CreateAsyncEnumerable(async _ => { var response = await _tableOperations.QueryAsync( null, null, new QueryOptions() { Filter = filter, Select = null, Top = maxPerPage, Format = _format }, cancellationToken).ConfigureAwait(false); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); }, async (nextLink, _) => { var response = await _tableOperations.QueryAsync( null, nextTableName: nextLink, new QueryOptions() { Filter = filter, Select = null, Top = maxPerPage, Format = _format }, cancellationToken).ConfigureAwait(false); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); }); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Gets a list of tables from the storage account. /// </summary> /// <param name="filter">Returns only tables or entities that satisfy the specified filter.</param> /// <param name="maxPerPage">The maximum number of tables that will be returned per page.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns></returns> public virtual Pageable<TableItem> GetTables(string filter = null, int? maxPerPage = null, CancellationToken cancellationToken = default) { return PageableHelpers.CreateEnumerable(_ => { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetTables)}"); scope.Start(); try { var response = _tableOperations.Query( null, null, new QueryOptions() { Filter = filter, Select = null, Top = maxPerPage, Format = _format }, cancellationToken); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } }, (nextLink, _) => { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetTables)}"); scope.Start(); try { var response = _tableOperations.Query( null, nextTableName: nextLink, new QueryOptions() { Filter = filter, Select = null, Top = maxPerPage, Format = _format }, cancellationToken); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } }); } /// <summary> /// Creates a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="Response{TableItem}"/> containing properties of the table.</returns> public virtual Response<TableItem> CreateTable(string tableName, CancellationToken cancellationToken = default) { Argument.AssertNotNull(tableName, nameof(tableName)); using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTable)}"); scope.Start(); try { var response = _tableOperations.Create(new TableProperties() { TableName = tableName }, null, queryOptions: new QueryOptions { Format = _format }, cancellationToken: cancellationToken); return Response.FromValue(response.Value as TableItem, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Creates a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="Response{TableItem}"/> containing properties of the table.</returns> public virtual async Task<Response<TableItem>> CreateTableAsync(string tableName, CancellationToken cancellationToken = default) { Argument.AssertNotNull(tableName, nameof(tableName)); using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTable)}"); scope.Start(); try { var response = await _tableOperations.CreateAsync(new TableProperties() { TableName = tableName }, null, queryOptions: new QueryOptions { Format = _format }, cancellationToken: cancellationToken).ConfigureAwait(false); return Response.FromValue(response.Value as TableItem, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Creates a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>If the table does not already exist, a <see cref="Response{TableItem}"/>. If the table already exists, <c>null</c>.</returns> public virtual Response<TableItem> CreateTableIfNotExists(string tableName, CancellationToken cancellationToken = default) { Argument.AssertNotNull(tableName, nameof(tableName)); using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTableIfNotExists)}"); scope.Start(); try { var response = _tableOperations.Create(new TableProperties() { TableName = tableName }, null, queryOptions: new QueryOptions { Format = _format }, cancellationToken: cancellationToken); return Response.FromValue(response.Value as TableItem, response.GetRawResponse()); } catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Conflict) { return default; } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Creates a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>If the table does not already exist, a <see cref="Response{TableItem}"/>. If the table already exists, <c>null</c>.</returns> public virtual async Task<Response<TableItem>> CreateTableIfNotExistsAsync(string tableName, CancellationToken cancellationToken = default) { Argument.AssertNotNull(tableName, nameof(tableName)); using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTableIfNotExists)}"); scope.Start(); try { var response = await _tableOperations.CreateAsync(new TableProperties() { TableName = tableName }, null, queryOptions: new QueryOptions { Format = _format }, cancellationToken: cancellationToken).ConfigureAwait(false); return Response.FromValue(response.Value as TableItem, response.GetRawResponse()); } catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Conflict) { return default; } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Deletes a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns></returns> public virtual Response DeleteTable(string tableName, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(DeleteTable)}"); scope.Start(); try { return _tableOperations.Delete(tableName, null, cancellationToken: cancellationToken); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> /// Deletes a table in the storage account. /// </summary> /// <param name="tableName">The table name to create.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns></returns> public virtual async Task<Response> DeleteTableAsync(string tableName, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(DeleteTable)}"); scope.Start(); try { return await _tableOperations.DeleteAsync(tableName, null, cancellationToken: cancellationToken).ConfigureAwait(false); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary> /// <param name="properties"> The Table Service properties. </param> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual Response SetProperties(TableServiceProperties properties, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(SetProperties)}"); scope.Start(); try { return _serviceOperations.SetProperties(properties, cancellationToken: cancellationToken); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary> /// <param name="properties"> The Table Service properties. </param> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual async Task<Response> SetPropertiesAsync(TableServiceProperties properties, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(SetProperties)}"); scope.Start(); try { return await _serviceOperations.SetPropertiesAsync(properties, cancellationToken: cancellationToken).ConfigureAwait(false); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual Response<TableServiceProperties> GetProperties(CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetProperties)}"); scope.Start(); try { var response = _serviceOperations.GetProperties(cancellationToken: cancellationToken); return Response.FromValue(response.Value, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual async Task<Response<TableServiceProperties>> GetPropertiesAsync(CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetProperties)}"); scope.Start(); try { var response = await _serviceOperations.GetPropertiesAsync(cancellationToken: cancellationToken).ConfigureAwait(false); return Response.FromValue(response.Value, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account. </summary> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual async Task<Response<TableServiceStatistics>> GetStatisticsAsync(CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetStatistics)}"); scope.Start(); try { var response = await _secondaryServiceOperations.GetStatisticsAsync(cancellationToken: cancellationToken).ConfigureAwait(false); return Response.FromValue(response.Value, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } /// <summary> Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account. </summary> /// <param name="cancellationToken"> The cancellation token to use. </param> public virtual Response<TableServiceStatistics> GetStatistics(CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetStatistics)}"); scope.Start(); try { var response = _secondaryServiceOperations.GetStatistics(cancellationToken: cancellationToken); return Response.FromValue(response.Value, response.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } } internal static bool IsPremiumEndpoint(Uri endpoint) { string absoluteUri = endpoint.OriginalString.ToLowerInvariant(); return (endpoint.Host.Equals("localhost", StringComparison.OrdinalIgnoreCase) && endpoint.Port != 10002) || absoluteUri.Contains(TableConstants.CosmosTableDomain) || absoluteUri.Contains(TableConstants.LegacyCosmosTableDomain); } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Net; using Orleans.Runtime.Configuration; using Orleans.Serialization; namespace Orleans.Runtime { /// <summary> /// Data class encapsulating the details of silo addresses. /// </summary> [Serializable] [DebuggerDisplay("SiloAddress {ToString()}")] public class SiloAddress : IEquatable<SiloAddress>, IComparable<SiloAddress>, IComparable { internal static readonly int SizeBytes = 24; // 16 for the address, 4 for the port, 4 for the generation /// <summary> Special constant value to indicate an empty SiloAddress. </summary> public static SiloAddress Zero { get; private set; } private const int INTERN_CACHE_INITIAL_SIZE = InternerConstants.SIZE_MEDIUM; private static readonly TimeSpan internCacheCleanupInterval = TimeSpan.Zero; private int hashCode = 0; private bool hashCodeSet = false; [NonSerialized] private List<uint> uniformHashCache; public IPEndPoint Endpoint { get; private set; } public int Generation { get; private set; } private const char SEPARATOR = '@'; private static readonly DateTime epoch = new DateTime(2010, 1, 1, 0, 0, 0, DateTimeKind.Utc); private static readonly Interner<SiloAddress, SiloAddress> siloAddressInterningCache; private static readonly IPEndPoint localEndpoint = new IPEndPoint(ClusterConfiguration.GetLocalIPAddress(), 0); // non loopback local ip. static SiloAddress() { siloAddressInterningCache = new Interner<SiloAddress, SiloAddress>(INTERN_CACHE_INITIAL_SIZE, internCacheCleanupInterval); var sa = new SiloAddress(new IPEndPoint(0, 0), 0); Zero = siloAddressInterningCache.Intern(sa, sa); } /// <summary> /// Factory for creating new SiloAddresses for silo on this machine with specified generation number. /// </summary> /// <param name="gen">Generation number of the silo.</param> /// <returns>SiloAddress object initialized with the non-loopback local IP address and the specified silo generation.</returns> public static SiloAddress NewLocalAddress(int gen) { return New(localEndpoint, gen); } /// <summary> /// Factory for creating new SiloAddresses with specified IP endpoint address and silo generation number. /// </summary> /// <param name="ep">IP endpoint address of the silo.</param> /// <param name="gen">Generation number of the silo.</param> /// <returns>SiloAddress object initialized with specified address and silo generation.</returns> public static SiloAddress New(IPEndPoint ep, int gen) { var sa = new SiloAddress(ep, gen); return siloAddressInterningCache.Intern(sa, sa); } private SiloAddress(IPEndPoint endpoint, int gen) { Endpoint = endpoint; Generation = gen; } public bool IsClient { get { return Generation < 0; } } /// <summary> Allocate a new silo generation number. </summary> /// <returns>A new silo generation number.</returns> public static int AllocateNewGeneration() { long elapsed = (DateTime.UtcNow.Ticks - epoch.Ticks) / TimeSpan.TicksPerSecond; return unchecked((int)elapsed); // Unchecked to truncate any bits beyond the lower 32 } /// <summary> /// Return this SiloAddress in a standard string form, suitable for later use with the <c>FromParsableString</c> method. /// </summary> /// <returns>SiloAddress in a standard string format.</returns> public string ToParsableString() { // This must be the "inverse" of FromParsableString, and must be the same across all silos in a deployment. // Basically, this should never change unless the data content of SiloAddress changes return String.Format("{0}:{1}@{2}", Endpoint.Address, Endpoint.Port, Generation); } /// <summary> /// Create a new SiloAddress object by parsing string in a standard form returned from <c>ToParsableString</c> method. /// </summary> /// <param name="addr">String containing the SiloAddress info to be parsed.</param> /// <returns>New SiloAddress object created from the input data.</returns> public static SiloAddress FromParsableString(string addr) { // This must be the "inverse" of ToParsableString, and must be the same across all silos in a deployment. // Basically, this should never change unless the data content of SiloAddress changes // First is the IPEndpoint; then '@'; then the generation int atSign = addr.LastIndexOf(SEPARATOR); if (atSign < 0) { throw new FormatException("Invalid string SiloAddress: " + addr); } var epString = addr.Substring(0, atSign); var genString = addr.Substring(atSign + 1); // IPEndpoint is the host, then ':', then the port int lastColon = epString.LastIndexOf(':'); if (lastColon < 0) throw new FormatException("Invalid string SiloAddress: " + addr); var hostString = epString.Substring(0, lastColon); var portString = epString.Substring(lastColon + 1); var host = IPAddress.Parse(hostString); int port = Int32.Parse(portString); return New(new IPEndPoint(host, port), Int32.Parse(genString)); } /// <summary> Object.ToString method override. </summary> public override string ToString() { return String.Format("{0}{1}:{2}", (IsClient ? "C" : "S"), Endpoint, Generation); } /// <summary> /// Return a long string representation of this SiloAddress. /// </summary> /// <remarks> /// Note: This string value is not comparable with the <c>FromParsableString</c> method -- use the <c>ToParsableString</c> method for that purpose. /// </remarks> /// <returns>String representaiton of this SiloAddress.</returns> public string ToLongString() { return ToString(); } /// <summary> /// Return a long string representation of this SiloAddress, including it's consistent hash value. /// </summary> /// <remarks> /// Note: This string value is not comparable with the <c>FromParsableString</c> method -- use the <c>ToParsableString</c> method for that purpose. /// </remarks> /// <returns>String representaiton of this SiloAddress.</returns> public string ToStringWithHashCode() { return String.Format("{0}/x{1, 8:X8}", ToString(), GetConsistentHashCode()); } /// <summary> Object.Equals method override. </summary> public override bool Equals(object obj) { return Equals(obj as SiloAddress); } /// <summary> Object.GetHashCode method override. </summary> public override int GetHashCode() { // Note that Port cannot be used because Port==0 matches any non-zero Port value for .Equals return Endpoint.GetHashCode() ^ Generation.GetHashCode(); } /// <summary>Get a consistent hash value for this silo address.</summary> /// <returns>Consistent hash value for this silo address.</returns> public int GetConsistentHashCode() { if (hashCodeSet) return hashCode; // Note that Port cannot be used because Port==0 matches any non-zero Port value for .Equals string siloAddressInfoToHash = Endpoint + Generation.ToString(CultureInfo.InvariantCulture); hashCode = Utils.CalculateIdHash(siloAddressInfoToHash); hashCodeSet = true; return hashCode; } public List<uint> GetUniformHashCodes(int numHashes) { if (uniformHashCache != null) return uniformHashCache; var jenkinsHash = JenkinsHash.Factory.GetHashGenerator(); var hashes = new List<uint>(); for (int i = 0; i < numHashes; i++) { uint hash = GetUniformHashCode(jenkinsHash, i); hashes.Add(hash); } uniformHashCache = hashes; return uniformHashCache; } private uint GetUniformHashCode(JenkinsHash jenkinsHash, int extraBit) { var writer = new BinaryTokenStreamWriter(); writer.Write(this); writer.Write(extraBit); byte[] bytes = writer.ToByteArray(); return jenkinsHash.ComputeHash(bytes); } /// <summary> /// Two silo addresses match if they are equal or if one generation or the other is 0 /// </summary> /// <param name="other"> The other SiloAddress to compare this one with. </param> /// <returns> Returns <c>true</c> if the two SiloAddresses are considered to match -- if they are equal or if one generation or the other is 0. </returns> internal bool Matches(SiloAddress other) { return other != null && Endpoint.Address.Equals(other.Endpoint.Address) && (Endpoint.Port == other.Endpoint.Port) && ((Generation == other.Generation) || (Generation == 0) || (other.Generation == 0)); } #region IEquatable<SiloAddress> Members /// <summary> IEquatable.Equals method override. </summary> public bool Equals(SiloAddress other) { return other != null && Endpoint.Address.Equals(other.Endpoint.Address) && (Endpoint.Port == other.Endpoint.Port) && ((Generation == other.Generation)); } #endregion // non-generic version of CompareTo is needed by some contexts. Just calls generic version. public int CompareTo(object obj) { return CompareTo((SiloAddress)obj); } public int CompareTo(SiloAddress other) { if (other == null) return 1; // Compare Generation first. It gives a cheap and fast way to compare, avoiding allocations // and is also semantically meaningfull - older silos (with smaller Generation) will appear first in the comparison order. // Only if Generations are the same, go on to compare Ports and IPAddress (which is more expansive to compare). // Alternatively, we could compare ConsistentHashCode or UniformHashCode. int comp = Generation.CompareTo(other.Generation); if (comp != 0) return comp; comp = Endpoint.Port.CompareTo(other.Endpoint.Port); if (comp != 0) return comp; return CompareIpAddresses(Endpoint.Address, other.Endpoint.Address); } // The comparions code is taken from: http://www.codeproject.com/Articles/26550/Extending-the-IPAddress-object-to-allow-relative-c // Also note that this comparison does not handle semantic equivalence of IPv4 and IPv6 addresses. // In particular, 127.0.0.1 and::1 are semanticaly the same, but not syntacticaly. // For more information refer to: http://stackoverflow.com/questions/16618810/compare-ipv4-addresses-in-ipv6-notation // and http://stackoverflow.com/questions/22187690/ip-address-class-getaddressbytes-method-putting-octets-in-odd-indices-of-the-byt // and dual stack sockets, described at https://msdn.microsoft.com/en-us/library/system.net.ipaddress.maptoipv6(v=vs.110).aspx private static int CompareIpAddresses(IPAddress one, IPAddress two) { int returnVal = 0; if (one.AddressFamily == two.AddressFamily) { byte[] b1 = one.GetAddressBytes(); byte[] b2 = two.GetAddressBytes(); for (int i = 0; i < b1.Length; i++) { if (b1[i] < b2[i]) { returnVal = -1; break; } else if (b1[i] > b2[i]) { returnVal = 1; break; } } } else { returnVal = one.AddressFamily.CompareTo(two.AddressFamily); } return returnVal; } } }
#pragma warning disable 0618 using System; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityObject = UnityEngine.Object; #if UNITY_EDITOR using UnityEditor; #endif namespace Zios{ using Event; using Containers; [InitializeOnLoad] public static class Locate{ private static bool setup; private static bool cleanGameObjects = false; private static List<Type> cleanSceneComponents = new List<Type>(); private static List<GameObject> cleanSiblings = new List<GameObject>(); private static Dictionary<string,GameObject> searchCache = new Dictionary<string,GameObject>(); private static Dictionary<Type,UnityObject[]> assets = new Dictionary<Type,UnityObject[]>(); private static Dictionary<GameObject,GameObject[]> siblings = new Dictionary<GameObject,GameObject[]>(); private static Dictionary<GameObject,GameObject[]> enabledSiblings = new Dictionary<GameObject,GameObject[]>(); private static Dictionary<GameObject,GameObject[]> disabledSiblings = new Dictionary<GameObject,GameObject[]>(); private static GameObject[] rootObjects = new GameObject[0]; private static GameObject[] sceneObjects = new GameObject[0]; private static GameObject[] enabledObjects = new GameObject[0]; private static GameObject[] disabledObjects = new GameObject[0]; private static Component[] allComponents = new Component[0]; private static Dictionary<Type,Component[]> sceneComponents = new Dictionary<Type,Component[]>(); private static Dictionary<Type,Component[]> enabledComponents = new Dictionary<Type,Component[]>(); private static Dictionary<Type,Component[]> disabledComponents = new Dictionary<Type,Component[]>(); private static Hierarchy<GameObject,Type,Component[]> objectComponents = new Hierarchy<GameObject,Type,Component[]>(); #if UNITY_EDITOR private static Dictionary<string,AssetImporter> importers = new Dictionary<string,AssetImporter>(); #endif static Locate(){ if(!Application.isPlaying){ //Event.Add("On Application Quit",Locate.SetDirty); Events.Add("On Level Was Loaded",Locate.SetDirty).SetPermanent(); Events.Add("On Hierarchy Changed",Locate.SetDirty).SetPermanent(); Events.Add("On Asset Changed",()=>Locate.assets.Clear()).SetPermanent(); } Events.Register("On Components Changed"); if(!Locate.setup){Locate.SetDirty();} } public static void CheckChanges(){ var components = Resources.FindObjectsOfTypeAll<Component>(); if(components.Length != Locate.allComponents.Count() && !Locate.allComponents.SequenceEqual(components)){ if(Locate.setup){Events.Call("On Components Changed");} Locate.allComponents = components; } } public static void SetDirty(){ Locate.CheckChanges(); Locate.cleanGameObjects = false; Locate.cleanSceneComponents.Clear(); Locate.cleanSiblings.Clear(); Locate.objectComponents.Clear(); Locate.searchCache.Clear(); Locate.setup = true; } public static void SetComponentsDirty<Type>() where Type : Component{Locate.cleanSceneComponents.Remove(typeof(Type));} public static void SetComponentsDirty<Type>(GameObject target) where Type : Component{Locate.objectComponents[target].Remove(typeof(Type));} public static void Build<Type>() where Type : Component{ List<GameObject> rootObjects = new List<GameObject>(); List<Type> enabled = new List<Type>(); List<Type> disabled = new List<Type>(); Type[] all = (Type[])Resources.FindObjectsOfTypeAll(typeof(Type)); foreach(Type current in all){ if(current.IsNull()){continue;} if(current.InPrefabFile()){continue;} if(current.gameObject.IsNull()){continue;} if(current.gameObject.transform.parent == null){rootObjects.Add(current.gameObject);} if(current.gameObject.activeInHierarchy){enabled.Add(current);} else{disabled.Add(current);} } Locate.sceneComponents[typeof(Type)] = enabled.Extend(disabled).ToArray(); Locate.enabledComponents[typeof(Type)] = enabled.ToArray(); Locate.disabledComponents[typeof(Type)] = disabled.ToArray(); Locate.cleanSceneComponents.Add(typeof(Type)); if(typeof(Type) == typeof(Transform)){ List<GameObject> enabledObjects = enabled.Select(x=>x.gameObject).ToList(); List<GameObject> disabledObjects = disabled.Select(x=>x.gameObject).ToList(); Locate.sceneObjects = enabledObjects.Extend(disabledObjects).ToArray(); Locate.enabledObjects = enabledObjects.ToArray(); Locate.disabledObjects = disabledObjects.ToArray(); Locate.rootObjects = rootObjects.ToArray(); Locate.cleanGameObjects = true; } } //===================== // Gameobject //===================== public static bool HasDuplicate(GameObject target){ if(Application.isLoadingLevel){return false;} GameObject[] siblings = target.GetSiblings(true,true,false); foreach(GameObject current in siblings){ if(current.IsNull()){continue;} if(current.name == target.name){return true;} } return false; } public static GameObject[] GetSiblings(this GameObject current,bool includeEnabled=true,bool includeDisabled=true,bool includeSelf=true){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanSiblings.Contains(current)){ GameObject parent = current.GetParent(); List<GameObject> siblings; if(parent.IsNull()){ Locate.GetSceneObjects(includeEnabled,includeDisabled); siblings = Locate.rootObjects.Remove(current).ToList(); } else{ siblings = parent.GetComponentsInChildren<Transform>(true).Select(x=>x.gameObject).ToList(); siblings.RemoveAll(x=>x.GetParent()!=parent); } Locate.siblings[current] = siblings.ToArray(); Locate.enabledSiblings[current] = Locate.siblings[current].Where(x=>!x.IsNull()&&x.gameObject.activeInHierarchy).Select(x=>x.gameObject).ToArray(); Locate.disabledSiblings[current] = Locate.siblings[current].Where(x=>!x.IsNull()&&!x.gameObject.activeInHierarchy).Select(x=>x.gameObject).ToArray(); Locate.cleanSiblings.Add(current); } GameObject[] results = Locate.enabledSiblings[current]; if(includeEnabled && includeDisabled){results = Locate.siblings[current];} if(!includeEnabled){results = Locate.disabledSiblings[current];} if(!includeSelf){results = results.Remove(current);} return results; } public static GameObject GetScenePath(string name,bool autocreate=true){ string[] parts = name.Split('/'); string path = ""; GameObject current = null; Transform parent = null; foreach(string part in parts){ path = path.IsEmpty() ? part : path + "/" + part; current = GameObject.Find(path); if(current.IsNull()){ if(!autocreate){ return null; } current = new GameObject(part); current.transform.parent = parent; Locate.SetDirty(); } parent = current.transform; } return current; } public static GameObject[] GetByName(string name){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} List<GameObject> matches = new List<GameObject>(); foreach(GameObject current in Locate.enabledObjects){ if(current.IsNull()){continue;} if(current.name == name){ matches.Add(current); } } return matches.ToArray(); } public static GameObject[] GetSceneObjects(bool includeEnabled=true,bool includeDisabled=true){ if(Application.isLoadingLevel){return new GameObject[0];} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} if(includeEnabled && includeDisabled){return Locate.sceneObjects;} if(!includeEnabled){return Locate.disabledObjects;} return Locate.enabledObjects; } public static GameObject Find(string name,bool includeHidden=true){ if(Application.isLoadingLevel){return null;} if(!Locate.cleanGameObjects){Locate.Build<Transform>();} name = name.Trim("/"); if(Locate.searchCache.ContainsKey(name)){return Locate.searchCache[name];} GameObject[] all = includeHidden ? Locate.sceneObjects : Locate.enabledObjects; foreach(GameObject current in all){ if(current.IsNull()){continue;} string path = current.GetPath().Trim("/"); if(path == name){ Locate.searchCache[name] = current; return current; } } return null; } //===================== // Components //===================== public static Type[] GetSceneComponents<Type>(bool includeEnabled=true,bool includeDisabled=true) where Type : Component{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.cleanSceneComponents.Contains(typeof(Type))){Locate.Build<Type>();} if(includeEnabled && includeDisabled){return (Type[])Locate.sceneComponents[typeof(Type)];} if(!includeEnabled){return (Type[])Locate.disabledComponents[typeof(Type)];} return (Type[])Locate.enabledComponents[typeof(Type)]; } public static Type[] GetObjectComponents<Type>(GameObject target) where Type : Component{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.objectComponents.ContainsKey(target) || !Locate.objectComponents[target].ContainsKey(typeof(Type))){ Locate.objectComponents.AddNew(target); Locate.objectComponents[target][typeof(Type)] = target.GetComponents<Type>(true); } return (Type[])Locate.objectComponents[target][typeof(Type)]; } //===================== // Assets //===================== public static object[] GetAssets(Type type){ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.assets.ContainsKey(type)){Locate.assets[type] = Resources.FindObjectsOfTypeAll(type);} return Locate.assets[type]; } public static Type[] GetAssets<Type>() where Type : UnityObject{ if(Application.isLoadingLevel){return new Type[0];} if(!Locate.assets.ContainsKey(typeof(Type))){Locate.assets[typeof(Type)] = Resources.FindObjectsOfTypeAll(typeof(Type));} return (Type[])Locate.assets[typeof(Type)]; } //===================== // Importers //===================== #if UNITY_EDITOR public static Type GetImporter<Type>(string path) where Type : AssetImporter{ if(Application.isLoadingLevel){return default(Type);} if(!Locate.importers.ContainsKey(path)){Locate.importers[path] = AssetImporter.GetAtPath(path);} return Locate.importers[path].As<Type>(); } #endif } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using Hyak.Common; using Hyak.Common.Internals; using Microsoft.Azure.Graph.RBAC; using Microsoft.Azure.Graph.RBAC.Models; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Graph.RBAC { /// <summary> /// Operations for working with Objects in Azure AD Graph API. (see /// http://msdn.microsoft.com/en-us/library/azure/hh974476.aspx for more /// information) /// </summary> internal partial class ObjectOperations : IServiceOperations<GraphRbacManagementClient>, IObjectOperations { /// <summary> /// Initializes a new instance of the ObjectOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal ObjectOperations(GraphRbacManagementClient client) { this._client = client; } private GraphRbacManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.Azure.Graph.RBAC.GraphRbacManagementClient. /// </summary> public GraphRbacManagementClient Client { get { return this._client; } } /// <summary> /// Gets the details for current logged in user /// </summary> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Server response for Active Directory objects inquiry API calls /// </returns> public async Task<GetCurrentUserResult> GetCurrentUserAsync(CancellationToken cancellationToken) { // Validate // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); TracingAdapter.Enter(invocationId, this, "GetCurrentUserAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.TenantID); url = url + "/me"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=1.6-internal"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result GetCurrentUserResult result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new GetCurrentUserResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { AADObject aADObjectInstance = new AADObject(); result.AADObject = aADObjectInstance; JToken objectIdValue = responseDoc["objectId"]; if (objectIdValue != null && objectIdValue.Type != JTokenType.Null) { string objectIdInstance = ((string)objectIdValue); aADObjectInstance.ObjectId = objectIdInstance; } JToken objectTypeValue = responseDoc["objectType"]; if (objectTypeValue != null && objectTypeValue.Type != JTokenType.Null) { string objectTypeInstance = ((string)objectTypeValue); aADObjectInstance.ObjectType = objectTypeInstance; } JToken displayNameValue = responseDoc["displayName"]; if (displayNameValue != null && displayNameValue.Type != JTokenType.Null) { string displayNameInstance = ((string)displayNameValue); aADObjectInstance.DisplayName = displayNameInstance; } JToken userPrincipalNameValue = responseDoc["userPrincipalName"]; if (userPrincipalNameValue != null && userPrincipalNameValue.Type != JTokenType.Null) { string userPrincipalNameInstance = ((string)userPrincipalNameValue); aADObjectInstance.UserPrincipalName = userPrincipalNameInstance; } JToken mailValue = responseDoc["mail"]; if (mailValue != null && mailValue.Type != JTokenType.Null) { string mailInstance = ((string)mailValue); aADObjectInstance.Mail = mailInstance; } JToken mailEnabledValue = responseDoc["mailEnabled"]; if (mailEnabledValue != null && mailEnabledValue.Type != JTokenType.Null) { bool mailEnabledInstance = ((bool)mailEnabledValue); aADObjectInstance.MailEnabled = mailEnabledInstance; } JToken securityEnabledValue = responseDoc["securityEnabled"]; if (securityEnabledValue != null && securityEnabledValue.Type != JTokenType.Null) { bool securityEnabledInstance = ((bool)securityEnabledValue); aADObjectInstance.SecurityEnabled = securityEnabledInstance; } JToken signInNameValue = responseDoc["signInName"]; if (signInNameValue != null && signInNameValue.Type != JTokenType.Null) { string signInNameInstance = ((string)signInNameValue); aADObjectInstance.SignInName = signInNameInstance; } JToken servicePrincipalNamesArray = responseDoc["servicePrincipalNames"]; if (servicePrincipalNamesArray != null && servicePrincipalNamesArray.Type != JTokenType.Null) { foreach (JToken servicePrincipalNamesValue in ((JArray)servicePrincipalNamesArray)) { aADObjectInstance.ServicePrincipalNames.Add(((string)servicePrincipalNamesValue)); } } JToken userTypeValue = responseDoc["userType"]; if (userTypeValue != null && userTypeValue.Type != JTokenType.Null) { string userTypeInstance = ((string)userTypeValue); aADObjectInstance.UserType = userTypeInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("request-id")) { result.RequestId = httpResponse.Headers.GetValues("request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Gets AD group membership by provided AD object Ids /// </summary> /// <param name='parameters'> /// Required. Objects filtering parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Server response for Active Directory objects inquiry API calls /// </returns> public async Task<GetObjectsResult> GetObjectsByObjectIdsAsync(GetObjectsParameters parameters, CancellationToken cancellationToken) { // Validate if (parameters == null) { throw new ArgumentNullException("parameters"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); TracingAdapter.Enter(invocationId, this, "GetObjectsByObjectIdsAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.TenantID); url = url + "/getObjectsByObjectIds"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=1.6-internal"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; JToken requestDoc = null; JObject getObjectsParametersValue = new JObject(); requestDoc = getObjectsParametersValue; if (parameters.Ids != null) { if (parameters.Ids is ILazyCollection == false || ((ILazyCollection)parameters.Ids).IsInitialized) { JArray objectIdsArray = new JArray(); foreach (string objectIdsItem in parameters.Ids) { objectIdsArray.Add(objectIdsItem); } getObjectsParametersValue["objectIds"] = objectIdsArray; } } if (parameters.Types != null) { if (parameters.Types is ILazyCollection == false || ((ILazyCollection)parameters.Types).IsInitialized) { JArray typesArray = new JArray(); foreach (string typesItem in parameters.Types) { typesArray.Add(typesItem); } getObjectsParametersValue["types"] = typesArray; } } getObjectsParametersValue["includeDirectoryObjectReferences"] = parameters.IncludeDirectoryObjectReferences; requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result GetObjectsResult result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new GetObjectsResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken valueArray = responseDoc["value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { AADObject aADObjectInstance = new AADObject(); result.AADObject.Add(aADObjectInstance); JToken objectIdValue = valueValue["objectId"]; if (objectIdValue != null && objectIdValue.Type != JTokenType.Null) { string objectIdInstance = ((string)objectIdValue); aADObjectInstance.ObjectId = objectIdInstance; } JToken objectTypeValue = valueValue["objectType"]; if (objectTypeValue != null && objectTypeValue.Type != JTokenType.Null) { string objectTypeInstance = ((string)objectTypeValue); aADObjectInstance.ObjectType = objectTypeInstance; } JToken displayNameValue = valueValue["displayName"]; if (displayNameValue != null && displayNameValue.Type != JTokenType.Null) { string displayNameInstance = ((string)displayNameValue); aADObjectInstance.DisplayName = displayNameInstance; } JToken userPrincipalNameValue = valueValue["userPrincipalName"]; if (userPrincipalNameValue != null && userPrincipalNameValue.Type != JTokenType.Null) { string userPrincipalNameInstance = ((string)userPrincipalNameValue); aADObjectInstance.UserPrincipalName = userPrincipalNameInstance; } JToken mailValue = valueValue["mail"]; if (mailValue != null && mailValue.Type != JTokenType.Null) { string mailInstance = ((string)mailValue); aADObjectInstance.Mail = mailInstance; } JToken mailEnabledValue = valueValue["mailEnabled"]; if (mailEnabledValue != null && mailEnabledValue.Type != JTokenType.Null) { bool mailEnabledInstance = ((bool)mailEnabledValue); aADObjectInstance.MailEnabled = mailEnabledInstance; } JToken securityEnabledValue = valueValue["securityEnabled"]; if (securityEnabledValue != null && securityEnabledValue.Type != JTokenType.Null) { bool securityEnabledInstance = ((bool)securityEnabledValue); aADObjectInstance.SecurityEnabled = securityEnabledInstance; } JToken signInNameValue = valueValue["signInName"]; if (signInNameValue != null && signInNameValue.Type != JTokenType.Null) { string signInNameInstance = ((string)signInNameValue); aADObjectInstance.SignInName = signInNameInstance; } JToken servicePrincipalNamesArray = valueValue["servicePrincipalNames"]; if (servicePrincipalNamesArray != null && servicePrincipalNamesArray.Type != JTokenType.Null) { foreach (JToken servicePrincipalNamesValue in ((JArray)servicePrincipalNamesArray)) { aADObjectInstance.ServicePrincipalNames.Add(((string)servicePrincipalNamesValue)); } } JToken userTypeValue = valueValue["userType"]; if (userTypeValue != null && userTypeValue.Type != JTokenType.Null) { string userTypeInstance = ((string)userTypeValue); aADObjectInstance.UserType = userTypeInstance; } } } JToken odatanextLinkValue = responseDoc["odata.nextLink"]; if (odatanextLinkValue != null && odatanextLinkValue.Type != JTokenType.Null) { string odatanextLinkInstance = ((string)odatanextLinkValue); result.NextLink = odatanextLinkInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("request-id")) { result.RequestId = httpResponse.Headers.GetValues("request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
/* The MIT License (MIT) Copyright (c) 2007 - 2020 Microting A/S Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using Microting.eForm.Infrastructure.Constants; using Microting.eForm.Infrastructure.Data.Entities; using NUnit.Framework; namespace eFormSDK.Tests { [TestFixture] public class FieldValuesUTest : DbTestFixture { [Test] public async Task FieldValues_Create_DoesCreate() { short shortMinValue = Int16.MinValue; short shortmaxValue = Int16.MaxValue; Random rnd = new Random(); bool randomBool = rnd.Next(0, 2) > 0; Site site = new Site { Name = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await site.Create(DbContext).ConfigureAwait(false); Unit unit = new Unit { CustomerNo = rnd.Next(1, 255), MicrotingUid = rnd.Next(1, 255), OtpCode = rnd.Next(1, 255), SiteId = site.Id }; await unit.Create(DbContext).ConfigureAwait(false); CheckList checklist = new CheckList { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Field1 = rnd.Next(1, 255), Field2 = rnd.Next(1, 255), Field4 = rnd.Next(1, 255), Field5 = rnd.Next(1, 255), Field6 = rnd.Next(1, 255), Field7 = rnd.Next(1, 255), Field8 = rnd.Next(1, 255), Field9 = rnd.Next(1, 255), Field10 = rnd.Next(1, 255), Label = Guid.NewGuid().ToString(), Repeated = rnd.Next(1, 255), ApprovalEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), CaseType = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), DownloadEntities = (short) rnd.Next(shortMinValue, shortmaxValue), FastNavigation = (short) rnd.Next(shortMinValue, shortmaxValue), FolderName = Guid.NewGuid().ToString(), ManualSync = (short) rnd.Next(shortMinValue, shortmaxValue), MultiApproval = (short) rnd.Next(shortMinValue, shortmaxValue), OriginalId = Guid.NewGuid().ToString(), ReviewEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), DocxExportEnabled = randomBool, DoneButtonEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), ExtraFieldsEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), JasperExportEnabled = randomBool, QuickSyncEnabled = (short) rnd.Next(shortMinValue, shortmaxValue) }; await checklist.Create(DbContext).ConfigureAwait(false); EntityGroup entityGroup = new EntityGroup { Name = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString(), MicrotingUid = Guid.NewGuid().ToString() }; await entityGroup.Create(DbContext).ConfigureAwait(false); FieldType fieldType = new FieldType { Description = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString() }; await fieldType.Create(DbContext).ConfigureAwait(false); Field field = new Field { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Dummy = (short) rnd.Next(shortMinValue, shortmaxValue), Label = Guid.NewGuid().ToString(), Mandatory = (short) rnd.Next(shortMinValue, shortmaxValue), Multi = rnd.Next(1, 255), Optional = (short) rnd.Next(shortMinValue, shortmaxValue), Selected = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeType = Guid.NewGuid().ToString(), DecimalCount = rnd.Next(1, 255), DefaultValue = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), GeolocationEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationForced = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationHidden = (short) rnd.Next(shortMinValue, shortmaxValue), IsNum = (short) rnd.Next(shortMinValue, shortmaxValue), MaxLength = rnd.Next(1, 255), MaxValue = Guid.NewGuid().ToString(), MinValue = Guid.NewGuid().ToString(), OriginalId = Guid.NewGuid().ToString(), QueryType = Guid.NewGuid().ToString(), ReadOnly = (short) rnd.Next(shortMinValue, shortmaxValue), Split = (short) rnd.Next(shortMinValue, shortmaxValue), UnitName = Guid.NewGuid().ToString(), StopOnSave = (short) rnd.Next(shortMinValue, shortmaxValue), KeyValuePairList = Guid.NewGuid().ToString(), CheckListId = checklist.Id, EntityGroupId = entityGroup.Id, FieldTypeId = fieldType.Id }; await field.Create(DbContext).ConfigureAwait(false); Worker worker = new Worker { Email = Guid.NewGuid().ToString(), FirstName = Guid.NewGuid().ToString(), LastName = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await worker.Create(DbContext).ConfigureAwait(false); Case theCase = new Case { Custom = Guid.NewGuid().ToString(), Status = rnd.Next(1, 255), Type = Guid.NewGuid().ToString(), CaseUid = Guid.NewGuid().ToString(), DoneAt = DateTime.UtcNow, FieldValue1 = Guid.NewGuid().ToString(), FieldValue2 = Guid.NewGuid().ToString(), FieldValue3 = Guid.NewGuid().ToString(), FieldValue4 = Guid.NewGuid().ToString(), FieldValue5 = Guid.NewGuid().ToString(), FieldValue6 = Guid.NewGuid().ToString(), FieldValue7 = Guid.NewGuid().ToString(), FieldValue8 = Guid.NewGuid().ToString(), FieldValue9 = Guid.NewGuid().ToString(), FieldValue10 = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(shortMinValue, shortmaxValue), SiteId = site.Id, UnitId = unit.Id, WorkerId = worker.Id, CheckListId = checklist.Id, MicrotingCheckUid = rnd.Next(shortMinValue, shortmaxValue) }; await theCase.Create(DbContext).ConfigureAwait(false); UploadedData uploadedData = new UploadedData { Checksum = Guid.NewGuid().ToString(), Extension = Guid.NewGuid().ToString(), Local = (short) rnd.Next(shortMinValue, shortmaxValue), CurrentFile = Guid.NewGuid().ToString(), ExpirationDate = DateTime.UtcNow, FileLocation = Guid.NewGuid().ToString(), FileName = Guid.NewGuid().ToString(), TranscriptionId = rnd.Next(1, 255), UploaderId = rnd.Next(1, 255), UploaderType = Guid.NewGuid().ToString() }; await uploadedData.Create(DbContext).ConfigureAwait(false); FieldValue fieldValue = new FieldValue { Accuracy = Guid.NewGuid().ToString(), Altitude = Guid.NewGuid().ToString(), Date = DateTime.UtcNow, Heading = Guid.NewGuid().ToString(), Latitude = Guid.NewGuid().ToString(), Longitude = Guid.NewGuid().ToString(), Value = Guid.NewGuid().ToString(), CaseId = theCase.Id, DoneAt = DateTime.UtcNow, FieldId = field.Id, WorkerId = worker.Id, CheckListId = checklist.Id, UploadedDataId = uploadedData.Id }; //Act await fieldValue.Create(DbContext).ConfigureAwait(false); List<FieldValue> fieldValues = DbContext.FieldValues.AsNoTracking().ToList(); List<FieldValueVersion> fieldValueVersions = DbContext.FieldValueVersions.AsNoTracking().ToList(); Assert.NotNull(fieldValues); Assert.NotNull(fieldValueVersions); Assert.AreEqual(1,fieldValues.Count()); Assert.AreEqual(1,fieldValueVersions.Count()); Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValues[0].CreatedAt.ToString()); Assert.AreEqual(fieldValue.Version, fieldValues[0].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValues[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValues[0].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValues[0].Id); Assert.AreEqual(fieldValue.Accuracy, fieldValues[0].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValues[0].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValues[0].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValues[0].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValues[0].Longitude); Assert.AreEqual(fieldValue.Value, fieldValues[0].Value); Assert.AreEqual(fieldValue.CaseId, theCase.Id); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValues[0].DoneAt.ToString()); Assert.AreEqual(fieldValue.FieldId, field.Id); Assert.AreEqual(fieldValue.WorkerId, worker.Id); Assert.AreEqual(fieldValue.CheckListId, checklist.Id); Assert.AreEqual(fieldValue.UploadedDataId, uploadedData.Id); //Versions Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValueVersions[0].CreatedAt.ToString()); Assert.AreEqual(1, fieldValueVersions[0].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValueVersions[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValueVersions[0].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(fieldValue.Accuracy, fieldValueVersions[0].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValueVersions[0].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValueVersions[0].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValueVersions[0].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValueVersions[0].Longitude); Assert.AreEqual(fieldValue.Value, fieldValueVersions[0].Value); Assert.AreEqual(theCase.Id, fieldValueVersions[0].CaseId); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValueVersions[0].DoneAt.ToString()); Assert.AreEqual(field.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(worker.Id, fieldValueVersions[0].WorkerId); Assert.AreEqual(checklist.Id, fieldValueVersions[0].CheckListId); Assert.AreEqual(uploadedData.Id, fieldValueVersions[0].UploadedDataId); } [Test] public async Task FieldValues_Update_DoesUpdate() { short shortMinValue = Int16.MinValue; short shortmaxValue = Int16.MaxValue; Random rnd = new Random(); bool randomBool = rnd.Next(0, 2) > 0; Site site = new Site { Name = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await site.Create(DbContext).ConfigureAwait(false); Unit unit = new Unit { CustomerNo = rnd.Next(1, 255), MicrotingUid = rnd.Next(1, 255), OtpCode = rnd.Next(1, 255), SiteId = site.Id }; await unit.Create(DbContext).ConfigureAwait(false); CheckList checklist = new CheckList { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Field1 = rnd.Next(1, 255), Field2 = rnd.Next(1, 255), Field4 = rnd.Next(1, 255), Field5 = rnd.Next(1, 255), Field6 = rnd.Next(1, 255), Field7 = rnd.Next(1, 255), Field8 = rnd.Next(1, 255), Field9 = rnd.Next(1, 255), Field10 = rnd.Next(1, 255), Label = Guid.NewGuid().ToString(), Repeated = rnd.Next(1, 255), ApprovalEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), CaseType = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), DownloadEntities = (short) rnd.Next(shortMinValue, shortmaxValue), FastNavigation = (short) rnd.Next(shortMinValue, shortmaxValue), FolderName = Guid.NewGuid().ToString(), ManualSync = (short) rnd.Next(shortMinValue, shortmaxValue), MultiApproval = (short) rnd.Next(shortMinValue, shortmaxValue), OriginalId = Guid.NewGuid().ToString(), ReviewEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), DocxExportEnabled = randomBool, DoneButtonEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), ExtraFieldsEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), JasperExportEnabled = randomBool, QuickSyncEnabled = (short) rnd.Next(shortMinValue, shortmaxValue) }; await checklist.Create(DbContext).ConfigureAwait(false); EntityGroup entityGroup = new EntityGroup { Name = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString(), MicrotingUid = Guid.NewGuid().ToString() }; await entityGroup.Create(DbContext).ConfigureAwait(false); FieldType fieldType = new FieldType { Description = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString() }; await fieldType.Create(DbContext).ConfigureAwait(false); Field field = new Field { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Dummy = (short) rnd.Next(shortMinValue, shortmaxValue), Label = Guid.NewGuid().ToString(), Mandatory = (short) rnd.Next(shortMinValue, shortmaxValue), Multi = rnd.Next(1, 255), Optional = (short) rnd.Next(shortMinValue, shortmaxValue), Selected = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeType = Guid.NewGuid().ToString(), DecimalCount = rnd.Next(1, 255), DefaultValue = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), GeolocationEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationForced = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationHidden = (short) rnd.Next(shortMinValue, shortmaxValue), IsNum = (short) rnd.Next(shortMinValue, shortmaxValue), MaxLength = rnd.Next(1, 255), MaxValue = Guid.NewGuid().ToString(), MinValue = Guid.NewGuid().ToString(), OriginalId = Guid.NewGuid().ToString(), QueryType = Guid.NewGuid().ToString(), ReadOnly = (short) rnd.Next(shortMinValue, shortmaxValue), Split = (short) rnd.Next(shortMinValue, shortmaxValue), UnitName = Guid.NewGuid().ToString(), StopOnSave = (short) rnd.Next(shortMinValue, shortmaxValue), KeyValuePairList = Guid.NewGuid().ToString(), CheckListId = checklist.Id, EntityGroupId = entityGroup.Id, FieldTypeId = fieldType.Id }; await field.Create(DbContext).ConfigureAwait(false); Worker worker = new Worker { Email = Guid.NewGuid().ToString(), FirstName = Guid.NewGuid().ToString(), LastName = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await worker.Create(DbContext).ConfigureAwait(false); Case theCase = new Case { Custom = Guid.NewGuid().ToString(), Status = rnd.Next(1, 255), Type = Guid.NewGuid().ToString(), CaseUid = Guid.NewGuid().ToString(), DoneAt = DateTime.UtcNow, FieldValue1 = Guid.NewGuid().ToString(), FieldValue2 = Guid.NewGuid().ToString(), FieldValue3 = Guid.NewGuid().ToString(), FieldValue4 = Guid.NewGuid().ToString(), FieldValue5 = Guid.NewGuid().ToString(), FieldValue6 = Guid.NewGuid().ToString(), FieldValue7 = Guid.NewGuid().ToString(), FieldValue8 = Guid.NewGuid().ToString(), FieldValue9 = Guid.NewGuid().ToString(), FieldValue10 = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(shortMinValue, shortmaxValue), SiteId = site.Id, UnitId = unit.Id, WorkerId = worker.Id, CheckListId = checklist.Id, MicrotingCheckUid = rnd.Next(shortMinValue, shortmaxValue) }; await theCase.Create(DbContext).ConfigureAwait(false); UploadedData uploadedData = new UploadedData { Checksum = Guid.NewGuid().ToString(), Extension = Guid.NewGuid().ToString(), Local = (short) rnd.Next(shortMinValue, shortmaxValue), CurrentFile = Guid.NewGuid().ToString(), ExpirationDate = DateTime.UtcNow, FileLocation = Guid.NewGuid().ToString(), FileName = Guid.NewGuid().ToString(), TranscriptionId = rnd.Next(1, 255), UploaderId = rnd.Next(1, 255), UploaderType = Guid.NewGuid().ToString() }; await uploadedData.Create(DbContext).ConfigureAwait(false); FieldValue fieldValue = new FieldValue { Accuracy = Guid.NewGuid().ToString(), Altitude = Guid.NewGuid().ToString(), Date = DateTime.UtcNow, Heading = Guid.NewGuid().ToString(), Latitude = Guid.NewGuid().ToString(), Longitude = Guid.NewGuid().ToString(), Value = Guid.NewGuid().ToString(), CaseId = theCase.Id, DoneAt = DateTime.UtcNow, FieldId = field.Id, WorkerId = worker.Id, CheckListId = checklist.Id, UploadedDataId = uploadedData.Id }; await fieldValue.Create(DbContext).ConfigureAwait(false); //Act DateTime? oldUpdatedAt = fieldValue.UpdatedAt; string oldAccuracy = fieldValue.Accuracy; string oldAltitude = fieldValue.Altitude; DateTime? oldDate = fieldValue.Date; string oldHeading = fieldValue.Heading; string oldLatitude = fieldValue.Latitude; string oldLongitude = fieldValue.Longitude; string oldValue = fieldValue.Value; DateTime? oldDoneAt = fieldValue.DoneAt; fieldValue.Accuracy = Guid.NewGuid().ToString(); fieldValue.Altitude = Guid.NewGuid().ToString(); fieldValue.Date = DateTime.UtcNow; fieldValue.Heading = Guid.NewGuid().ToString(); fieldValue.Latitude = Guid.NewGuid().ToString(); fieldValue.Longitude = Guid.NewGuid().ToString(); fieldValue.Value = Guid.NewGuid().ToString(); fieldValue.DoneAt = DateTime.UtcNow; await fieldValue.Update(DbContext).ConfigureAwait(false); List<FieldValue> fieldValues = DbContext.FieldValues.AsNoTracking().ToList(); List<FieldValueVersion> fieldValueVersions = DbContext.FieldValueVersions.AsNoTracking().ToList(); Assert.NotNull(fieldValues); Assert.NotNull(fieldValueVersions); Assert.AreEqual(1,fieldValues.Count()); Assert.AreEqual(2,fieldValueVersions.Count()); Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValues[0].CreatedAt.ToString()); Assert.AreEqual(fieldValue.Version, fieldValues[0].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValues[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValues[0].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValues[0].Id); Assert.AreEqual(fieldValue.Accuracy, fieldValues[0].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValues[0].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValues[0].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValues[0].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValues[0].Longitude); Assert.AreEqual(fieldValue.Value, fieldValues[0].Value); Assert.AreEqual(fieldValue.CaseId, theCase.Id); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValues[0].DoneAt.ToString()); Assert.AreEqual(fieldValue.FieldId, field.Id); Assert.AreEqual(fieldValue.WorkerId, worker.Id); Assert.AreEqual(fieldValue.CheckListId, checklist.Id); Assert.AreEqual(fieldValue.UploadedDataId, uploadedData.Id); //Old Version Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValueVersions[0].CreatedAt.ToString()); Assert.AreEqual(1, fieldValueVersions[0].Version); // Assert.AreEqual(oldUpdatedAt.ToString(), fieldValueVersions[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValueVersions[0].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(oldAccuracy, fieldValueVersions[0].Accuracy); Assert.AreEqual(oldDate.ToString(), fieldValueVersions[0].Date.ToString()); Assert.AreEqual(oldHeading, fieldValueVersions[0].Heading); Assert.AreEqual(oldLatitude, fieldValueVersions[0].Latitude); Assert.AreEqual(oldLongitude, fieldValueVersions[0].Longitude); Assert.AreEqual(oldValue, fieldValueVersions[0].Value); Assert.AreEqual(theCase.Id, fieldValueVersions[0].CaseId); Assert.AreEqual(oldDoneAt.ToString(), fieldValueVersions[0].DoneAt.ToString()); Assert.AreEqual(field.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(worker.Id, fieldValueVersions[0].WorkerId); Assert.AreEqual(checklist.Id, fieldValueVersions[0].CheckListId); Assert.AreEqual(uploadedData.Id, fieldValueVersions[0].UploadedDataId); //New Version Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValueVersions[1].CreatedAt.ToString()); Assert.AreEqual(2, fieldValueVersions[1].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValueVersions[1].UpdatedAt.ToString()); Assert.AreEqual(fieldValueVersions[1].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValueVersions[1].FieldId); Assert.AreEqual(fieldValue.Accuracy, fieldValueVersions[1].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValueVersions[1].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValueVersions[1].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValueVersions[1].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValueVersions[1].Longitude); Assert.AreEqual(fieldValue.Value, fieldValueVersions[1].Value); Assert.AreEqual(theCase.Id, fieldValueVersions[1].CaseId); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValueVersions[1].DoneAt.ToString()); Assert.AreEqual(field.Id, fieldValueVersions[1].FieldId); Assert.AreEqual(worker.Id, fieldValueVersions[1].WorkerId); Assert.AreEqual(checklist.Id, fieldValueVersions[1].CheckListId); Assert.AreEqual(uploadedData.Id, fieldValueVersions[1].UploadedDataId); } [Test] public async Task FieldValues_Delete_DoesSetWorkflowStateToRemoved() { short shortMinValue = Int16.MinValue; short shortmaxValue = Int16.MaxValue; Random rnd = new Random(); bool randomBool = rnd.Next(0, 2) > 0; Site site = new Site { Name = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await site.Create(DbContext).ConfigureAwait(false); Unit unit = new Unit { CustomerNo = rnd.Next(1, 255), MicrotingUid = rnd.Next(1, 255), OtpCode = rnd.Next(1, 255), SiteId = site.Id }; await unit.Create(DbContext).ConfigureAwait(false); CheckList checklist = new CheckList { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Field1 = rnd.Next(1, 255), Field2 = rnd.Next(1, 255), Field4 = rnd.Next(1, 255), Field5 = rnd.Next(1, 255), Field6 = rnd.Next(1, 255), Field7 = rnd.Next(1, 255), Field8 = rnd.Next(1, 255), Field9 = rnd.Next(1, 255), Field10 = rnd.Next(1, 255), Label = Guid.NewGuid().ToString(), Repeated = rnd.Next(1, 255), ApprovalEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), CaseType = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), DownloadEntities = (short) rnd.Next(shortMinValue, shortmaxValue), FastNavigation = (short) rnd.Next(shortMinValue, shortmaxValue), FolderName = Guid.NewGuid().ToString(), ManualSync = (short) rnd.Next(shortMinValue, shortmaxValue), MultiApproval = (short) rnd.Next(shortMinValue, shortmaxValue), OriginalId = Guid.NewGuid().ToString(), ReviewEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), DocxExportEnabled = randomBool, DoneButtonEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), ExtraFieldsEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), JasperExportEnabled = randomBool, QuickSyncEnabled = (short) rnd.Next(shortMinValue, shortmaxValue) }; await checklist.Create(DbContext).ConfigureAwait(false); EntityGroup entityGroup = new EntityGroup { Name = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString(), MicrotingUid = Guid.NewGuid().ToString() }; await entityGroup.Create(DbContext).ConfigureAwait(false); FieldType fieldType = new FieldType { Description = Guid.NewGuid().ToString(), Type = Guid.NewGuid().ToString() }; await fieldType.Create(DbContext).ConfigureAwait(false); Field field = new Field { Color = Guid.NewGuid().ToString(), Custom = Guid.NewGuid().ToString(), Description = Guid.NewGuid().ToString(), Dummy = (short) rnd.Next(shortMinValue, shortmaxValue), Label = Guid.NewGuid().ToString(), Mandatory = (short) rnd.Next(shortMinValue, shortmaxValue), Multi = rnd.Next(1, 255), Optional = (short) rnd.Next(shortMinValue, shortmaxValue), Selected = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), BarcodeType = Guid.NewGuid().ToString(), DecimalCount = rnd.Next(1, 255), DefaultValue = Guid.NewGuid().ToString(), DisplayIndex = rnd.Next(1, 255), GeolocationEnabled = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationForced = (short) rnd.Next(shortMinValue, shortmaxValue), GeolocationHidden = (short) rnd.Next(shortMinValue, shortmaxValue), IsNum = (short) rnd.Next(shortMinValue, shortmaxValue), MaxLength = rnd.Next(1, 255), MaxValue = Guid.NewGuid().ToString(), MinValue = Guid.NewGuid().ToString(), OriginalId = Guid.NewGuid().ToString(), QueryType = Guid.NewGuid().ToString(), ReadOnly = (short) rnd.Next(shortMinValue, shortmaxValue), Split = (short) rnd.Next(shortMinValue, shortmaxValue), UnitName = Guid.NewGuid().ToString(), StopOnSave = (short) rnd.Next(shortMinValue, shortmaxValue), KeyValuePairList = Guid.NewGuid().ToString(), CheckListId = checklist.Id, EntityGroupId = entityGroup.Id, FieldTypeId = fieldType.Id }; await field.Create(DbContext).ConfigureAwait(false); Worker worker = new Worker { Email = Guid.NewGuid().ToString(), FirstName = Guid.NewGuid().ToString(), LastName = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(1, 255) }; await worker.Create(DbContext).ConfigureAwait(false); Case theCase = new Case { Custom = Guid.NewGuid().ToString(), Status = rnd.Next(1, 255), Type = Guid.NewGuid().ToString(), CaseUid = Guid.NewGuid().ToString(), DoneAt = DateTime.UtcNow, FieldValue1 = Guid.NewGuid().ToString(), FieldValue2 = Guid.NewGuid().ToString(), FieldValue3 = Guid.NewGuid().ToString(), FieldValue4 = Guid.NewGuid().ToString(), FieldValue5 = Guid.NewGuid().ToString(), FieldValue6 = Guid.NewGuid().ToString(), FieldValue7 = Guid.NewGuid().ToString(), FieldValue8 = Guid.NewGuid().ToString(), FieldValue9 = Guid.NewGuid().ToString(), FieldValue10 = Guid.NewGuid().ToString(), MicrotingUid = rnd.Next(shortMinValue, shortmaxValue), SiteId = site.Id, UnitId = unit.Id, WorkerId = worker.Id, CheckListId = checklist.Id, MicrotingCheckUid = rnd.Next(shortMinValue, shortmaxValue) }; await theCase.Create(DbContext).ConfigureAwait(false); UploadedData uploadedData = new UploadedData { Checksum = Guid.NewGuid().ToString(), Extension = Guid.NewGuid().ToString(), Local = (short) rnd.Next(shortMinValue, shortmaxValue), CurrentFile = Guid.NewGuid().ToString(), ExpirationDate = DateTime.UtcNow, FileLocation = Guid.NewGuid().ToString(), FileName = Guid.NewGuid().ToString(), TranscriptionId = rnd.Next(1, 255), UploaderId = rnd.Next(1, 255), UploaderType = Guid.NewGuid().ToString() }; await uploadedData.Create(DbContext).ConfigureAwait(false); FieldValue fieldValue = new FieldValue { Accuracy = Guid.NewGuid().ToString(), Altitude = Guid.NewGuid().ToString(), Date = DateTime.UtcNow, Heading = Guid.NewGuid().ToString(), Latitude = Guid.NewGuid().ToString(), Longitude = Guid.NewGuid().ToString(), Value = Guid.NewGuid().ToString(), CaseId = theCase.Id, DoneAt = DateTime.UtcNow, FieldId = field.Id, WorkerId = worker.Id, CheckListId = checklist.Id, UploadedDataId = uploadedData.Id }; await fieldValue.Create(DbContext).ConfigureAwait(false); //Act DateTime? oldUpdatedAt = fieldValue.UpdatedAt; await fieldValue.Delete(DbContext); List<FieldValue> fieldValues = DbContext.FieldValues.AsNoTracking().ToList(); List<FieldValueVersion> fieldValueVersions = DbContext.FieldValueVersions.AsNoTracking().ToList(); Assert.NotNull(fieldValues); Assert.NotNull(fieldValueVersions); Assert.AreEqual(1,fieldValues.Count()); Assert.AreEqual(2,fieldValueVersions.Count()); Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValues[0].CreatedAt.ToString()); Assert.AreEqual(fieldValue.Version, fieldValues[0].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValues[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValues[0].WorkflowState, Constants.WorkflowStates.Removed); Assert.AreEqual(fieldValue.Id, fieldValues[0].Id); Assert.AreEqual(fieldValue.Accuracy, fieldValues[0].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValues[0].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValues[0].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValues[0].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValues[0].Longitude); Assert.AreEqual(fieldValue.Value, fieldValues[0].Value); Assert.AreEqual(fieldValue.CaseId, theCase.Id); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValues[0].DoneAt.ToString()); Assert.AreEqual(fieldValue.FieldId, field.Id); Assert.AreEqual(fieldValue.WorkerId, worker.Id); Assert.AreEqual(fieldValue.CheckListId, checklist.Id); Assert.AreEqual(fieldValue.UploadedDataId, uploadedData.Id); //Old Version Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValueVersions[0].CreatedAt.ToString()); Assert.AreEqual(1, fieldValueVersions[0].Version); // Assert.AreEqual(oldUpdatedAt.ToString(), fieldValueVersions[0].UpdatedAt.ToString()); Assert.AreEqual(fieldValueVersions[0].WorkflowState, Constants.WorkflowStates.Created); Assert.AreEqual(fieldValue.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(fieldValue.Accuracy, fieldValueVersions[0].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValueVersions[0].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValueVersions[0].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValueVersions[0].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValueVersions[0].Longitude); Assert.AreEqual(fieldValue.Value, fieldValueVersions[0].Value); Assert.AreEqual(theCase.Id, fieldValueVersions[0].CaseId); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValueVersions[0].DoneAt.ToString()); Assert.AreEqual(field.Id, fieldValueVersions[0].FieldId); Assert.AreEqual(worker.Id, fieldValueVersions[0].WorkerId); Assert.AreEqual(checklist.Id, fieldValueVersions[0].CheckListId); Assert.AreEqual(uploadedData.Id, fieldValueVersions[0].UploadedDataId); //New Version Assert.AreEqual(fieldValue.CreatedAt.ToString(), fieldValueVersions[1].CreatedAt.ToString()); Assert.AreEqual(2, fieldValueVersions[1].Version); // Assert.AreEqual(fieldValue.UpdatedAt.ToString(), fieldValueVersions[1].UpdatedAt.ToString()); Assert.AreEqual(fieldValueVersions[1].WorkflowState, Constants.WorkflowStates.Removed); Assert.AreEqual(fieldValue.Id, fieldValueVersions[1].FieldId); Assert.AreEqual(fieldValue.Accuracy, fieldValueVersions[1].Accuracy); Assert.AreEqual(fieldValue.Date.ToString(), fieldValueVersions[1].Date.ToString()); Assert.AreEqual(fieldValue.Heading, fieldValueVersions[1].Heading); Assert.AreEqual(fieldValue.Latitude, fieldValueVersions[1].Latitude); Assert.AreEqual(fieldValue.Longitude, fieldValueVersions[1].Longitude); Assert.AreEqual(fieldValue.Value, fieldValueVersions[1].Value); Assert.AreEqual(theCase.Id, fieldValueVersions[1].CaseId); Assert.AreEqual(fieldValue.DoneAt.ToString(), fieldValueVersions[1].DoneAt.ToString()); Assert.AreEqual(field.Id, fieldValueVersions[1].FieldId); Assert.AreEqual(worker.Id, fieldValueVersions[1].WorkerId); Assert.AreEqual(checklist.Id, fieldValueVersions[1].CheckListId); Assert.AreEqual(uploadedData.Id, fieldValueVersions[1].UploadedDataId); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Dialogflow.V2.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedEntityTypesClientTest { [xunit::FactAttribute] public void GetEntityTypeRequestObject() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.GetEntityType(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetEntityTypeRequestObjectAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.GetEntityTypeAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.GetEntityTypeAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetEntityType1() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.GetEntityType(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetEntityType1Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.GetEntityTypeAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.GetEntityTypeAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetEntityType1ResourceNames() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.GetEntityType(request.EntityTypeName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetEntityType1ResourceNamesAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.GetEntityTypeAsync(request.EntityTypeName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.GetEntityTypeAsync(request.EntityTypeName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetEntityType2() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.GetEntityType(request.Name, request.LanguageCode); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetEntityType2Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.GetEntityTypeAsync(request.Name, request.LanguageCode, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.GetEntityTypeAsync(request.Name, request.LanguageCode, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetEntityType2ResourceNames() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.GetEntityType(request.EntityTypeName, request.LanguageCode); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetEntityType2ResourceNamesAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetEntityTypeRequest request = new GetEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.GetEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.GetEntityTypeAsync(request.EntityTypeName, request.LanguageCode, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.GetEntityTypeAsync(request.EntityTypeName, request.LanguageCode, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateEntityTypeRequestObject() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.CreateEntityType(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateEntityTypeRequestObjectAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.CreateEntityTypeAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.CreateEntityTypeAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateEntityType1() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.CreateEntityType(request.Parent, request.EntityType); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateEntityType1Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.CreateEntityTypeAsync(request.Parent, request.EntityType, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.CreateEntityTypeAsync(request.Parent, request.EntityType, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateEntityType1ResourceNames() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.CreateEntityType(request.ParentAsAgentName, request.EntityType); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateEntityType1ResourceNamesAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.CreateEntityTypeAsync(request.ParentAsAgentName, request.EntityType, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.CreateEntityTypeAsync(request.ParentAsAgentName, request.EntityType, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateEntityType2() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.CreateEntityType(request.Parent, request.EntityType, request.LanguageCode); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateEntityType2Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.CreateEntityTypeAsync(request.Parent, request.EntityType, request.LanguageCode, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.CreateEntityTypeAsync(request.Parent, request.EntityType, request.LanguageCode, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateEntityType2ResourceNames() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.CreateEntityType(request.ParentAsAgentName, request.EntityType, request.LanguageCode); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateEntityType2ResourceNamesAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateEntityTypeRequest request = new CreateEntityTypeRequest { ParentAsAgentName = AgentName.FromProject("[PROJECT]"), EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.CreateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.CreateEntityTypeAsync(request.ParentAsAgentName, request.EntityType, request.LanguageCode, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.CreateEntityTypeAsync(request.ParentAsAgentName, request.EntityType, request.LanguageCode, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateEntityTypeRequestObject() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", UpdateMask = new wkt::FieldMask(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.UpdateEntityType(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateEntityTypeRequestObjectAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", UpdateMask = new wkt::FieldMask(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.UpdateEntityTypeAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.UpdateEntityTypeAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateEntityType1() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.UpdateEntityType(request.EntityType); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateEntityType1Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.UpdateEntityTypeAsync(request.EntityType, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.UpdateEntityTypeAsync(request.EntityType, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateEntityType2() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType response = client.UpdateEntityType(request.EntityType, request.LanguageCode); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateEntityType2Async() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateEntityTypeRequest request = new UpdateEntityTypeRequest { EntityType = new EntityType(), LanguageCode = "language_code2f6c7160", }; EntityType expectedResponse = new EntityType { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), DisplayName = "display_name137f65c2", Kind = EntityType.Types.Kind.Regexp, AutoExpansionMode = EntityType.Types.AutoExpansionMode.Default, Entities = { new EntityType.Types.Entity(), }, EnableFuzzyExtraction = false, }; mockGrpcClient.Setup(x => x.UpdateEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<EntityType>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); EntityType responseCallSettings = await client.UpdateEntityTypeAsync(request.EntityType, request.LanguageCode, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); EntityType responseCancellationToken = await client.UpdateEntityTypeAsync(request.EntityType, request.LanguageCode, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteEntityTypeRequestObject() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); client.DeleteEntityType(request); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteEntityTypeRequestObjectAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); await client.DeleteEntityTypeAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteEntityTypeAsync(request, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteEntityType() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); client.DeleteEntityType(request.Name); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteEntityTypeAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); await client.DeleteEntityTypeAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteEntityTypeAsync(request.Name, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteEntityTypeResourceNames() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityType(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); client.DeleteEntityType(request.EntityTypeName); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteEntityTypeResourceNamesAsync() { moq::Mock<EntityTypes.EntityTypesClient> mockGrpcClient = new moq::Mock<EntityTypes.EntityTypesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteEntityTypeRequest request = new DeleteEntityTypeRequest { EntityTypeName = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteEntityTypeAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); EntityTypesClient client = new EntityTypesClientImpl(mockGrpcClient.Object, null); await client.DeleteEntityTypeAsync(request.EntityTypeName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteEntityTypeAsync(request.EntityTypeName, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Text; using EditorBrowsableAttribute = System.ComponentModel.EditorBrowsableAttribute; using EditorBrowsableState = System.ComponentModel.EditorBrowsableState; using Internal.Runtime.CompilerServices; #if BIT64 using nint = System.Int64; using nuint = System.UInt64; #else // BIT64 using nint = System.Int32; using nuint = System.UInt32; #endif // BIT64 namespace System { /// <summary> /// Memory represents a contiguous region of arbitrary memory similar to <see cref="Span{T}"/>. /// Unlike <see cref="Span{T}"/>, it is not a byref-like type. /// </summary> [DebuggerTypeProxy(typeof(MemoryDebugView<>))] [DebuggerDisplay("{ToString(),raw}")] public readonly struct Memory<T> : IEquatable<Memory<T>> { // NOTE: With the current implementation, Memory<T> and ReadOnlyMemory<T> must have the same layout, // as code uses Unsafe.As to cast between them. // The highest order bit of _index is used to discern whether _object is a pre-pinned array. // (_index < 0) => _object is a pre-pinned array, so Pin() will not allocate a new GCHandle // (else) => Pin() needs to allocate a new GCHandle to pin the object. private readonly object? _object; private readonly int _index; private readonly int _length; /// <summary> /// Creates a new memory over the entirety of the target array. /// </summary> /// <param name="array">The target array.</param> /// <remarks>Returns default when <paramref name="array"/> is null.</remarks> /// <exception cref="System.ArrayTypeMismatchException">Thrown when <paramref name="array"/> is covariant and array's type is not exactly T[].</exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Memory(T[]? array) { if (array == null) { this = default; return; // returns default } if (default(T)! == null && array.GetType() != typeof(T[])) // TODO-NULLABLE: default(T) == null warning (https://github.com/dotnet/roslyn/issues/34757) ThrowHelper.ThrowArrayTypeMismatchException(); _object = array; _index = 0; _length = array.Length; } [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Memory(T[]? array, int start) { if (array == null) { if (start != 0) ThrowHelper.ThrowArgumentOutOfRangeException(); this = default; return; // returns default } if (default(T)! == null && array.GetType() != typeof(T[])) // TODO-NULLABLE: default(T) == null warning (https://github.com/dotnet/roslyn/issues/34757) ThrowHelper.ThrowArrayTypeMismatchException(); if ((uint)start > (uint)array.Length) ThrowHelper.ThrowArgumentOutOfRangeException(); _object = array; _index = start; _length = array.Length - start; } /// <summary> /// Creates a new memory over the portion of the target array beginning /// at 'start' index and ending at 'end' index (exclusive). /// </summary> /// <param name="array">The target array.</param> /// <param name="start">The index at which to begin the memory.</param> /// <param name="length">The number of items in the memory.</param> /// <remarks>Returns default when <paramref name="array"/> is null.</remarks> /// <exception cref="System.ArrayTypeMismatchException">Thrown when <paramref name="array"/> is covariant and array's type is not exactly T[].</exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified <paramref name="start"/> or end index is not in the range (&lt;0 or &gt;Length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Memory(T[]? array, int start, int length) { if (array == null) { if (start != 0 || length != 0) ThrowHelper.ThrowArgumentOutOfRangeException(); this = default; return; // returns default } if (default(T)! == null && array.GetType() != typeof(T[])) // TODO-NULLABLE: default(T) == null warning (https://github.com/dotnet/roslyn/issues/34757) ThrowHelper.ThrowArrayTypeMismatchException(); #if BIT64 // See comment in Span<T>.Slice for how this works. if ((ulong)(uint)start + (ulong)(uint)length > (ulong)(uint)array.Length) ThrowHelper.ThrowArgumentOutOfRangeException(); #else if ((uint)start > (uint)array.Length || (uint)length > (uint)(array.Length - start)) ThrowHelper.ThrowArgumentOutOfRangeException(); #endif _object = array; _index = start; _length = length; } /// <summary> /// Creates a new memory from a memory manager that provides specific method implementations beginning /// at 0 index and ending at 'end' index (exclusive). /// </summary> /// <param name="manager">The memory manager.</param> /// <param name="length">The number of items in the memory.</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified <paramref name="length"/> is negative. /// </exception> /// <remarks>For internal infrastructure only</remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Memory(MemoryManager<T> manager, int length) { Debug.Assert(manager != null); if (length < 0) ThrowHelper.ThrowArgumentOutOfRangeException(); _object = manager; _index = 0; _length = length; } /// <summary> /// Creates a new memory from a memory manager that provides specific method implementations beginning /// at 'start' index and ending at 'end' index (exclusive). /// </summary> /// <param name="manager">The memory manager.</param> /// <param name="start">The index at which to begin the memory.</param> /// <param name="length">The number of items in the memory.</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified <paramref name="start"/> or <paramref name="length"/> is negative. /// </exception> /// <remarks>For internal infrastructure only</remarks> [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Memory(MemoryManager<T> manager, int start, int length) { Debug.Assert(manager != null); if (length < 0 || start < 0) ThrowHelper.ThrowArgumentOutOfRangeException(); _object = manager; _index = start; _length = length; } [MethodImpl(MethodImplOptions.AggressiveInlining)] internal Memory(object? obj, int start, int length) { // No validation performed in release builds; caller must provide any necessary validation. // 'obj is T[]' below also handles things like int[] <-> uint[] being convertible Debug.Assert((obj == null) || (typeof(T) == typeof(char) && obj is string) #if FEATURE_UTF8STRING || ((typeof(T) == typeof(byte) || typeof(T) == typeof(Char8)) && obj is Utf8String) #endif // FEATURE_UTF8STRING || (obj is T[]) || (obj is MemoryManager<T>)); _object = obj; _index = start; _length = length; } /// <summary> /// Defines an implicit conversion of an array to a <see cref="Memory{T}"/> /// </summary> public static implicit operator Memory<T>(T[]? array) => new Memory<T>(array); /// <summary> /// Defines an implicit conversion of a <see cref="ArraySegment{T}"/> to a <see cref="Memory{T}"/> /// </summary> public static implicit operator Memory<T>(ArraySegment<T> segment) => new Memory<T>(segment.Array, segment.Offset, segment.Count); /// <summary> /// Defines an implicit conversion of a <see cref="Memory{T}"/> to a <see cref="ReadOnlyMemory{T}"/> /// </summary> public static implicit operator ReadOnlyMemory<T>(Memory<T> memory) => Unsafe.As<Memory<T>, ReadOnlyMemory<T>>(ref memory); /// <summary> /// Returns an empty <see cref="Memory{T}"/> /// </summary> public static Memory<T> Empty => default; /// <summary> /// The number of items in the memory. /// </summary> public int Length => _length; /// <summary> /// Returns true if Length is 0. /// </summary> public bool IsEmpty => _length == 0; /// <summary> /// For <see cref="Memory{Char}"/>, returns a new instance of string that represents the characters pointed to by the memory. /// Otherwise, returns a <see cref="string"/> with the name of the type and the number of elements. /// </summary> public override string ToString() { if (typeof(T) == typeof(char)) { return (_object is string str) ? str.Substring(_index, _length) : Span.ToString(); } #if FEATURE_UTF8STRING else if (typeof(T) == typeof(Char8)) { // TODO_UTF8STRING: Call into optimized transcoding routine when it's available. Span<T> span = Span; return Encoding.UTF8.GetString(new ReadOnlySpan<byte>(ref Unsafe.As<T, byte>(ref MemoryMarshal.GetReference(span)), span.Length)); } #endif // FEATURE_UTF8STRING return string.Format("System.Memory<{0}>[{1}]", typeof(T).Name, _length); } /// <summary> /// Forms a slice out of the given memory, beginning at 'start'. /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified <paramref name="start"/> index is not in range (&lt;0 or &gt;Length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Memory<T> Slice(int start) { if ((uint)start > (uint)_length) { ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.start); } // It is expected for _index + start to be negative if the memory is already pre-pinned. return new Memory<T>(_object, _index + start, _length - start); } /// <summary> /// Forms a slice out of the given memory, beginning at 'start', of given length /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <param name="length">The desired length for the slice (exclusive).</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified <paramref name="start"/> or end index is not in range (&lt;0 or &gt;Length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public Memory<T> Slice(int start, int length) { #if BIT64 // See comment in Span<T>.Slice for how this works. if ((ulong)(uint)start + (ulong)(uint)length > (ulong)(uint)_length) ThrowHelper.ThrowArgumentOutOfRangeException(); #else if ((uint)start > (uint)_length || (uint)length > (uint)(_length - start)) ThrowHelper.ThrowArgumentOutOfRangeException(); #endif // It is expected for _index + start to be negative if the memory is already pre-pinned. return new Memory<T>(_object, _index + start, length); } /// <summary> /// Returns a span from the memory. /// </summary> public unsafe Span<T> Span { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { // This property getter has special support for returning a mutable Span<char> that wraps // an immutable String instance. This is obviously a dangerous feature and breaks type safety. // However, we need to handle the case where a ReadOnlyMemory<char> was created from a string // and then cast to a Memory<T>. Such a cast can only be done with unsafe or marshaling code, // in which case that's the dangerous operation performed by the dev, and we're just following // suit here to make it work as best as possible. ref T refToReturn = ref Unsafe.AsRef<T>(null); int lengthOfUnderlyingSpan = 0; // Copy this field into a local so that it can't change out from under us mid-operation. object? tmpObject = _object; if (tmpObject != null) { if (typeof(T) == typeof(char) && tmpObject.GetType() == typeof(string)) { // Special-case string since it's the most common for ROM<char>. refToReturn = ref Unsafe.As<char, T>(ref Unsafe.As<string>(tmpObject).GetRawStringData()); lengthOfUnderlyingSpan = Unsafe.As<string>(tmpObject).Length; } #if FEATURE_UTF8STRING else if ((typeof(T) == typeof(byte) || typeof(T) == typeof(Char8)) && tmpObject.GetType() == typeof(Utf8String)) { refToReturn = ref Unsafe.As<byte, T>(ref Unsafe.As<Utf8String>(tmpObject).DangerousGetMutableReference()); lengthOfUnderlyingSpan = Unsafe.As<Utf8String>(tmpObject).Length; } #endif // FEATURE_UTF8STRING else if (RuntimeHelpers.ObjectHasComponentSize(tmpObject)) { // We know the object is not null, it's not a string, and it is variable-length. The only // remaining option is for it to be a T[] (or a U[] which is blittable to T[], like int[] // and uint[]). Otherwise somebody used private reflection to set this field, and we're not // too worried about type safety violations at this point. // 'tmpObject is T[]' below also handles things like int[] <-> uint[] being convertible Debug.Assert(tmpObject is T[]); refToReturn = ref Unsafe.As<byte, T>(ref Unsafe.As<T[]>(tmpObject).GetRawSzArrayData()); lengthOfUnderlyingSpan = Unsafe.As<T[]>(tmpObject).Length; } else { // We know the object is not null, and it's not variable-length, so it must be a MemoryManager<T>. // Otherwise somebody used private reflection to set this field, and we're not too worried about // type safety violations at that point. Note that it can't be a MemoryManager<U>, even if U and // T are blittable (e.g., MemoryManager<int> to MemoryManager<uint>), since there exists no // constructor or other public API which would allow such a conversion. Debug.Assert(tmpObject is MemoryManager<T>); Span<T> memoryManagerSpan = Unsafe.As<MemoryManager<T>>(tmpObject).GetSpan(); refToReturn = ref MemoryMarshal.GetReference(memoryManagerSpan); lengthOfUnderlyingSpan = memoryManagerSpan.Length; } // If the Memory<T> or ReadOnlyMemory<T> instance is torn, this property getter has undefined behavior. // We try to detect this condition and throw an exception, but it's possible that a torn struct might // appear to us to be valid, and we'll return an undesired span. Such a span is always guaranteed at // least to be in-bounds when compared with the original Memory<T> instance, so using the span won't // AV the process. nuint desiredStartIndex = (uint)_index & (uint)ReadOnlyMemory<T>.RemoveFlagsBitMask; int desiredLength = _length; #if BIT64 // See comment in Span<T>.Slice for how this works. if ((ulong)desiredStartIndex + (ulong)(uint)desiredLength > (ulong)(uint)lengthOfUnderlyingSpan) { ThrowHelper.ThrowArgumentOutOfRangeException(); } #else if ((uint)desiredStartIndex > (uint)lengthOfUnderlyingSpan || (uint)desiredLength > (uint)(lengthOfUnderlyingSpan - desiredStartIndex)) { ThrowHelper.ThrowArgumentOutOfRangeException(); } #endif refToReturn = ref Unsafe.Add(ref refToReturn, (IntPtr)(void*)desiredStartIndex); lengthOfUnderlyingSpan = desiredLength; } return new Span<T>(ref refToReturn, lengthOfUnderlyingSpan); } } /// <summary> /// Copies the contents of the memory into the destination. If the source /// and destination overlap, this method behaves as if the original values are in /// a temporary location before the destination is overwritten. /// /// <param name="destination">The Memory to copy items into.</param> /// <exception cref="System.ArgumentException"> /// Thrown when the destination is shorter than the source. /// </exception> /// </summary> public void CopyTo(Memory<T> destination) => Span.CopyTo(destination.Span); /// <summary> /// Copies the contents of the memory into the destination. If the source /// and destination overlap, this method behaves as if the original values are in /// a temporary location before the destination is overwritten. /// /// <returns>If the destination is shorter than the source, this method /// return false and no data is written to the destination.</returns> /// </summary> /// <param name="destination">The span to copy items into.</param> public bool TryCopyTo(Memory<T> destination) => Span.TryCopyTo(destination.Span); /// <summary> /// Creates a handle for the memory. /// The GC will not move the memory until the returned <see cref="MemoryHandle"/> /// is disposed, enabling taking and using the memory's address. /// <exception cref="System.ArgumentException"> /// An instance with nonprimitive (non-blittable) members cannot be pinned. /// </exception> /// </summary> public unsafe MemoryHandle Pin() { // Just like the Span property getter, we have special support for a mutable Memory<char> // that wraps an immutable String instance. This might happen if a caller creates an // immutable ROM<char> wrapping a String, then uses Unsafe.As to create a mutable M<char>. // This needs to work, however, so that code that uses a single Memory<char> field to store either // a readable ReadOnlyMemory<char> or a writable Memory<char> can still be pinned and // used for interop purposes. // It's possible that the below logic could result in an AV if the struct // is torn. This is ok since the caller is expecting to use raw pointers, // and we're not required to keep this as safe as the other Span-based APIs. object? tmpObject = _object; if (tmpObject != null) { if (typeof(T) == typeof(char) && tmpObject is string s) { GCHandle handle = GCHandle.Alloc(tmpObject, GCHandleType.Pinned); ref char stringData = ref Unsafe.Add(ref s.GetRawStringData(), _index); return new MemoryHandle(Unsafe.AsPointer(ref stringData), handle); } #if FEATURE_UTF8STRING else if ((typeof(T) == typeof(byte) || typeof(T) == typeof(Char8)) && tmpObject is Utf8String utf8String) { GCHandle handle = GCHandle.Alloc(tmpObject, GCHandleType.Pinned); ref byte stringData = ref utf8String.DangerousGetMutableReference(_index); return new MemoryHandle(Unsafe.AsPointer(ref stringData), handle); } #endif // FEATURE_UTF8STRING else if (RuntimeHelpers.ObjectHasComponentSize(tmpObject)) { // 'tmpObject is T[]' below also handles things like int[] <-> uint[] being convertible Debug.Assert(tmpObject is T[]); // Array is already pre-pinned if (_index < 0) { void* pointer = Unsafe.Add<T>(Unsafe.AsPointer(ref Unsafe.As<T[]>(tmpObject).GetRawSzArrayData()), _index & ReadOnlyMemory<T>.RemoveFlagsBitMask); return new MemoryHandle(pointer); } else { GCHandle handle = GCHandle.Alloc(tmpObject, GCHandleType.Pinned); void* pointer = Unsafe.Add<T>(Unsafe.AsPointer(ref Unsafe.As<T[]>(tmpObject).GetRawSzArrayData()), _index); return new MemoryHandle(pointer, handle); } } else { Debug.Assert(tmpObject is MemoryManager<T>); return Unsafe.As<MemoryManager<T>>(tmpObject).Pin(_index); } } return default; } /// <summary> /// Copies the contents from the memory into a new array. This heap /// allocates, so should generally be avoided, however it is sometimes /// necessary to bridge the gap with APIs written in terms of arrays. /// </summary> public T[] ToArray() => Span.ToArray(); /// <summary> /// Determines whether the specified object is equal to the current object. /// Returns true if the object is Memory or ReadOnlyMemory and if both objects point to the same array and have the same length. /// </summary> [EditorBrowsable(EditorBrowsableState.Never)] public override bool Equals(object? obj) { if (obj is ReadOnlyMemory<T>) { return ((ReadOnlyMemory<T>)obj).Equals(this); } else if (obj is Memory<T> memory) { return Equals(memory); } else { return false; } } /// <summary> /// Returns true if the memory points to the same array and has the same length. Note that /// this does *not* check to see if the *contents* are equal. /// </summary> public bool Equals(Memory<T> other) { return _object == other._object && _index == other._index && _length == other._length; } /// <summary> /// Serves as the default hash function. /// </summary> [EditorBrowsable(EditorBrowsableState.Never)] public override int GetHashCode() { // We use RuntimeHelpers.GetHashCode instead of Object.GetHashCode because the hash // code is based on object identity and referential equality, not deep equality (as common with string). return (_object != null) ? HashCode.Combine(RuntimeHelpers.GetHashCode(_object), _index, _length) : 0; } } }
using System; using System.Runtime.InteropServices; using System.Text; using DokanNet.Logging; using DokanNet.Native; namespace DokanNet { /// <summary> /// Helper and extension methods to %Dokan. /// </summary> public static class Dokan { #region Dokan Driver Options /// <summary> /// The %Dokan version that DokanNet is compatible with. Currently it is version 1.0.0. /// </summary> /// <see cref="DOKAN_OPTIONS.Version"/> private const ushort DOKAN_VERSION = 200; #endregion Dokan Driver Options /// <summary> /// Initialize all required Dokan internal resources. /// /// This needs to be called only once before trying to use <see cref="Mount"/> or <see cref="CreateFileSystem"/> for the first time. /// Otherwise both will fail and raise an exception. /// </summary> public static void Init() { NativeMethods.DokanInit(); } /// <summary> /// Release all allocated resources by <see cref="Init"/> when they are no longer needed. /// /// This should be called when the application no longer expects to create a new FileSystem with /// <see cref="Mount"/> or <see cref="CreateFileSystem"/> and after all devices are unmount. /// </summary> public static void Shutdown() { NativeMethods.DokanShutdown(); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, ILogger logger = null) { Mount(operations, mountPoint, DokanOptions.FixedDrive, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, ILogger logger = null) { Mount(operations, mountPoint, mountOptions, false, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Only use a single thread to process events. This is highly not recommended as can easily create a bottleneck.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, ILogger logger = null) { Mount(operations, mountPoint, mountOptions, singleThread, DOKAN_VERSION, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Only use a single thread to process events. This is highly not recommended as can easily create a bottleneck.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, ILogger logger = null) { Mount(operations, mountPoint, mountOptions, singleThread, version, TimeSpan.FromSeconds(20), string.Empty, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Only use a single thread to process events. This is highly not recommended as can easily create a bottleneck.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, ILogger logger = null) { Mount(operations, mountPoint, mountOptions, singleThread, version, timeout, string.Empty, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Only use a single thread to process events. This is highly not recommended as can easily create a bottleneck.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="uncName">UNC name used for network volume.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, string uncName, ILogger logger = null) { Mount(operations, mountPoint, mountOptions, singleThread, version, timeout, uncName, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// This function block until the device is unmount. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Only use a single thread to process events. This is highly not recommended as can easily create a bottleneck.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="uncName">UNC name used for network volume.</param> /// <param name="allocationUnitSize">Allocation Unit Size of the volume. This will behave on the file size.</param> /// <param name="sectorSize">Sector Size of the volume. This will behave on the file size.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> public static void Mount(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, string uncName = null, int allocationUnitSize = 512, int sectorSize = 512, ILogger logger = null) { if (logger == null) { #if TRACE logger = new ConsoleLogger("[DokanNet] "); #else logger = new NullLogger(); #endif } var dokanOperationProxy = new DokanOperationProxy(operations, logger); var dokanOptions = new DOKAN_OPTIONS { Version = (ushort)version, MountPoint = mountPoint, UNCName = string.IsNullOrEmpty(uncName) ? null : uncName, SingleThread = singleThread, Options = (uint)mountOptions, Timeout = (uint)timeout.TotalMilliseconds, AllocationUnitSize = (uint)allocationUnitSize, SectorSize = (uint)sectorSize, VolumeSecurityDescriptorLength = 0 }; var dokanOperations = new DOKAN_OPERATIONS { ZwCreateFile = dokanOperationProxy.ZwCreateFileProxy, Cleanup = dokanOperationProxy.CleanupProxy, CloseFile = dokanOperationProxy.CloseFileProxy, ReadFile = dokanOperationProxy.ReadFileProxy, WriteFile = dokanOperationProxy.WriteFileProxy, FlushFileBuffers = dokanOperationProxy.FlushFileBuffersProxy, GetFileInformation = dokanOperationProxy.GetFileInformationProxy, FindFiles = dokanOperationProxy.FindFilesProxy, FindFilesWithPattern = dokanOperationProxy.FindFilesWithPatternProxy, SetFileAttributes = dokanOperationProxy.SetFileAttributesProxy, SetFileTime = dokanOperationProxy.SetFileTimeProxy, DeleteFile = dokanOperationProxy.DeleteFileProxy, DeleteDirectory = dokanOperationProxy.DeleteDirectoryProxy, MoveFile = dokanOperationProxy.MoveFileProxy, SetEndOfFile = dokanOperationProxy.SetEndOfFileProxy, SetAllocationSize = dokanOperationProxy.SetAllocationSizeProxy, LockFile = dokanOperationProxy.LockFileProxy, UnlockFile = dokanOperationProxy.UnlockFileProxy, GetDiskFreeSpace = dokanOperationProxy.GetDiskFreeSpaceProxy, GetVolumeInformation = dokanOperationProxy.GetVolumeInformationProxy, Mounted = dokanOperationProxy.MountedProxy, Unmounted = dokanOperationProxy.UnmountedProxy, GetFileSecurity = dokanOperationProxy.GetFileSecurityProxy, SetFileSecurity = dokanOperationProxy.SetFileSecurityProxy, FindStreams = dokanOperationProxy.FindStreamsProxy }; DokanStatus status = NativeMethods.DokanMain(dokanOptions, dokanOperations); if (status != DokanStatus.Success) { throw new DokanException(status); } GC.KeepAlive(dokanOptions); GC.KeepAlive(dokanOperations); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, DokanOptions.FixedDrive, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, mountOptions, false, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Number of threads to be used internally by %Dokan library. More thread will handle more event at the same time.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, mountOptions, singleThread, DOKAN_VERSION, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Number of threads to be used internally by %Dokan library. More thread will handle more event at the same time.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, mountOptions, singleThread, version, TimeSpan.FromSeconds(20), string.Empty, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Number of threads to be used internally by %Dokan library. More thread will handle more event at the same time.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, mountOptions, singleThread, version, timeout, string.Empty, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Number of threads to be used internally by %Dokan library. More thread will handle more event at the same time.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="uncName">UNC name used for network volume.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, string uncName, ILogger logger = null) { return CreateFileSystem(operations, mountPoint, mountOptions, singleThread, version, timeout, uncName, 512, 512, logger); } /// <summary> /// Mount a new %Dokan Volume. /// It is mandatory to have called <see cref="DokanInit"/> previously to use this API. /// This function returns directly on device mount or on failure. /// <see cref="WaitForFileSystemClosed"/> can be used to wait until the device is unmount. /// </summary> /// <param name="operations">Instance of <see cref="IDokanOperations"/> that will be called for each request made by the kernel.</param> /// <param name="mountPoint">Mount point. Can be <c>M:\\</c> (drive letter) or <c>C:\\mount\\dokan</c> (path in NTFS).</param> /// <param name="mountOptions"><see cref="DokanOptions"/> features enable for the mount.</param> /// <param name="singleThread">Number of threads to be used internally by %Dokan library. More thread will handle more event at the same time.</param> /// <param name="version">Version of the dokan features requested (Version "123" is equal to %Dokan version 1.2.3).</param> /// <param name="timeout">Max timeout in ms of each request before dokan give up.</param> /// <param name="uncName">UNC name used for network volume.</param> /// <param name="allocationUnitSize">Allocation Unit Size of the volume. This will behave on the file size.</param> /// <param name="sectorSize">Sector Size of the volume. This will behave on the file size.</param> /// <param name="logger"><see cref="ILogger"/> that will log all DokanNet debug informations.</param> /// <exception cref="DokanException">If the mount fails.</exception> /// <returns>Dokan mount instance context that can be used for related instance calls like <see cref="IsFileSystemRunning"/></returns> public static DokanInstance CreateFileSystem(this IDokanOperations operations, string mountPoint, DokanOptions mountOptions, bool singleThread, int version, TimeSpan timeout, string uncName = null, int allocationUnitSize = 512, int sectorSize = 512, ILogger logger = null) { if (logger == null) { #if TRACE logger = new ConsoleLogger("[DokanNet] "); #else logger = new NullLogger(); #endif } DokanInstance instance = new DokanInstance(); var dokanOperationProxy = new DokanOperationProxy(operations, logger); var dokanOptions = new DOKAN_OPTIONS { Version = (ushort)version, MountPoint = mountPoint, UNCName = string.IsNullOrEmpty(uncName) ? null : uncName, SingleThread = singleThread, Options = (uint)mountOptions, Timeout = (uint)timeout.TotalMilliseconds, AllocationUnitSize = (uint)allocationUnitSize, SectorSize = (uint)sectorSize, VolumeSecurityDescriptorLength = 0 }; instance.DokanOptions = new NativeStructWrapper<DOKAN_OPTIONS>(dokanOptions); var dokanOperations = new DOKAN_OPERATIONS { ZwCreateFile = dokanOperationProxy.ZwCreateFileProxy, Cleanup = dokanOperationProxy.CleanupProxy, CloseFile = dokanOperationProxy.CloseFileProxy, ReadFile = dokanOperationProxy.ReadFileProxy, WriteFile = dokanOperationProxy.WriteFileProxy, FlushFileBuffers = dokanOperationProxy.FlushFileBuffersProxy, GetFileInformation = dokanOperationProxy.GetFileInformationProxy, FindFiles = dokanOperationProxy.FindFilesProxy, FindFilesWithPattern = dokanOperationProxy.FindFilesWithPatternProxy, SetFileAttributes = dokanOperationProxy.SetFileAttributesProxy, SetFileTime = dokanOperationProxy.SetFileTimeProxy, DeleteFile = dokanOperationProxy.DeleteFileProxy, DeleteDirectory = dokanOperationProxy.DeleteDirectoryProxy, MoveFile = dokanOperationProxy.MoveFileProxy, SetEndOfFile = dokanOperationProxy.SetEndOfFileProxy, SetAllocationSize = dokanOperationProxy.SetAllocationSizeProxy, LockFile = dokanOperationProxy.LockFileProxy, UnlockFile = dokanOperationProxy.UnlockFileProxy, GetDiskFreeSpace = dokanOperationProxy.GetDiskFreeSpaceProxy, GetVolumeInformation = dokanOperationProxy.GetVolumeInformationProxy, Mounted = dokanOperationProxy.MountedProxy, Unmounted = dokanOperationProxy.UnmountedProxy, GetFileSecurity = dokanOperationProxy.GetFileSecurityProxy, SetFileSecurity = dokanOperationProxy.SetFileSecurityProxy, FindStreams = dokanOperationProxy.FindStreamsProxy }; instance.DokanOperations = new NativeStructWrapper<DOKAN_OPERATIONS>(dokanOperations); DokanStatus status = NativeMethods.DokanCreateFileSystem(instance.DokanOptions, instance.DokanOperations, out instance.DokanHandle); if (status != DokanStatus.Success) { throw new DokanException(status); } return instance; } /// <summary> /// Check if the FileSystem is still running or not. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="CreateFileSystem"/>.</param> /// <returns>Whether the FileSystem is still running or not.</returns> public static bool IsFileSystemRunning(this DokanInstance dokanInstance) { return NativeMethods.DokanIsFileSystemRunning(dokanInstance.DokanHandle); } /// <summary> /// Wait until the FileSystem is unmount. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="CreateFileSystem"/>.</param> /// <param name="milliSeconds">The time-out interval, in milliseconds. If a nonzero value is specified, the function waits until the object is signaled or the interval elapses. If <param name="milliSeconds"> is zero, /// the function does not enter a wait state if the object is not signaled; it always returns immediately. If <param name="milliSeconds"> is INFINITE, the function will return only when the object is signaled.</param> /// <returns>See <a href="https://docs.microsoft.com/en-us/windows/win32/api/synchapi/nf-synchapi-waitforsingleobject">WaitForSingleObject</a> for a description of return values.</returns> public static uint WaitForFileSystemClosed(this DokanInstance dokanInstance, uint milliSeconds) { return NativeMethods.DokanWaitForFileSystemClosed(dokanInstance.DokanHandle, milliSeconds); } /// <summary> /// Unmount a dokan device from a driver letter. /// </summary> /// <param name="driveLetter">Driver letter to unmount.</param> /// <returns><c>true</c> if device was unmount /// -or- <c>false</c> in case of failure or device not found.</returns> public static bool Unmount(char driveLetter) { return NativeMethods.DokanUnmount(driveLetter); } /// <summary> /// Unmount a dokan device from a mount point. /// </summary> /// <param name="mountPoint">Mount point to unmount (<c>Z</c>, <c>Z:</c>, <c>Z:\\</c>, <c>Z:\\MyMountPoint</c>).</param> /// <returns><c>true</c> if device was unmount /// -or- <c>false</c> in case of failure or device not found.</returns> public static bool RemoveMountPoint(string mountPoint) { return NativeMethods.DokanRemoveMountPoint(mountPoint); } /// <summary> /// Retrieve native dokan dll version supported. /// </summary> /// <returns>Return native dokan dll version supported.</returns> public static int Version => (int)NativeMethods.DokanVersion(); /// <summary> /// Retrieve native dokan driver version supported. /// </summary> /// <returns>Return native dokan driver version supported.</returns> public static int DriverVersion => (int)NativeMethods.DokanDriverVersion(); /// <summary> /// Dokan User FS file-change notifications /// </summary> /// <remarks> If <see cref="DokanOptions.EnableNotificationAPI"/> is passed to <see cref="Dokan.Mount"/>, /// the application implementing the user file system can notify /// the Dokan kernel driver of external file- and directory-changes. /// /// For example, the mirror application can notify the driver about /// changes made in the mirrored directory so that those changes will /// be automatically reflected in the implemented mirror file system. /// /// This requires the filePath passed to the respective methods /// to include the absolute path of the changed file including the drive-letter /// and the path to the mount point, e.g. "C:\Dokan\ChangedFile.txt". /// /// These functions SHOULD NOT be called from within the implemented /// file system and thus be independent of any Dokan file system operation. ///</remarks> public class Notify { /// <summary> /// Notify Dokan that a file or directory has been created. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="DokanCreateFileSystem"/></param> /// <param name="filePath">Absolute path to the file or directory, including the mount-point of the file system.</param> /// <param name="isDirectory">Indicates if the path is a directory.</param> /// <returns>true if the notification succeeded.</returns> public static bool Create(DokanInstance dokanInstance, string filePath, bool isDirectory) { return NativeMethods.DokanNotifyCreate(dokanInstance.DokanHandle, filePath, isDirectory); } /// <summary> /// Notify Dokan that a file or directory has been deleted. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="DokanCreateFileSystem"/></param> /// <param name="filePath">Absolute path to the file or directory, including the mount-point of the file system.</param> /// <param name="isDirectory">Indicates if the path is a directory.</param> /// <returns>true if notification succeeded.</returns> /// <remarks><see cref="DokanOptions.EnableNotificationAPI"/> must be set in the mount options for this to succeed.</remarks> public static bool Delete(DokanInstance dokanInstance, string filePath, bool isDirectory) { return NativeMethods.DokanNotifyDelete(dokanInstance.DokanHandle, filePath, isDirectory); } /// <summary> /// Notify Dokan that file or directory attributes have changed. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="DokanCreateFileSystem"/></param> /// <param name="filePath">Absolute path to the file or directory, including the mount-point of the file system.</param> /// <returns>true if notification succeeded.</returns> /// <remarks><see cref="DokanOptions.EnableNotificationAPI"/> must be set in the mount options for this to succeed.</remarks> public static bool Update(DokanInstance dokanInstance, string filePath) { return NativeMethods.DokanNotifyUpdate(dokanInstance.DokanHandle, filePath); } /// <summary> /// Notify Dokan that file or directory extended attributes have changed. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="DokanCreateFileSystem"/></param> /// <param name="filePath">Absolute path to the file or directory, including the mount-point of the file system.</param> /// <returns>true if notification succeeded.</returns> /// <remarks><see cref="DokanOptions.EnableNotificationAPI"/> must be set in the mount options for this to succeed.</remarks> public static bool XAttrUpdate(DokanInstance dokanInstance, string filePath) { return NativeMethods.DokanNotifyXAttrUpdate(dokanInstance.DokanHandle, filePath); } /// <summary> /// Notify Dokan that a file or directory has been renamed. /// This method supports in-place rename for file/directory within the same parent. /// </summary> /// <param name="dokanInstance">The dokan mount context created by <see cref="DokanCreateFileSystem"/></param> /// <param name="oldPath">Old, absolute path to the file or directory, including the mount-point of the file system.</param> /// <param name="newPath">New, absolute path to the file or directory, including the mount-point of the file system.</param> /// <param name="isDirectory">Indicates if the path is a directory.</param> /// <param name="isInSameDirectory">Indicates if the file or directory have the same parent directory.</param> /// <returns>true if notification succeeded.</returns> /// <remarks><see cref="DokanOptions.EnableNotificationAPI"/> must be set in the mount options for this to succeed.</remarks> public static bool Rename(DokanInstance dokanInstance, string oldPath, string newPath, bool isDirectory, bool isInSameDirectory) { return NativeMethods.DokanNotifyRename(dokanInstance.DokanHandle, oldPath, newPath, isDirectory, isInSameDirectory); } } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:2.0.50727.312 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ // // This source code was auto-generated by wsdl, Version=2.0.50727.42. // namespace WebsitePanel.Providers.Database { using System.Diagnostics; using System.Web.Services; using System.ComponentModel; using System.Web.Services.Protocols; using System; using System.Xml.Serialization; using System.Data; /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Web.Services.WebServiceBindingAttribute(Name = "DatabaseServerSoap", Namespace = "http://smbsaas/websitepanel/server/")] [System.Xml.Serialization.XmlIncludeAttribute(typeof(ServiceProviderItem))] public partial class DatabaseServer : Microsoft.Web.Services3.WebServicesClientProtocol { public ServiceProviderSettingsSoapHeader ServiceProviderSettingsSoapHeaderValue; private System.Threading.SendOrPostCallback CheckConnectivityOperationCompleted; private System.Threading.SendOrPostCallback ExecuteSqlQueryOperationCompleted; private System.Threading.SendOrPostCallback ExecuteSqlNonQueryOperationCompleted; private System.Threading.SendOrPostCallback ExecuteSqlQuerySafeOperationCompleted; private System.Threading.SendOrPostCallback ExecuteSqlNonQuerySafeOperationCompleted; private System.Threading.SendOrPostCallback DatabaseExistsOperationCompleted; private System.Threading.SendOrPostCallback GetDatabasesOperationCompleted; private System.Threading.SendOrPostCallback GetDatabaseOperationCompleted; private System.Threading.SendOrPostCallback CreateDatabaseOperationCompleted; private System.Threading.SendOrPostCallback UpdateDatabaseOperationCompleted; private System.Threading.SendOrPostCallback DeleteDatabaseOperationCompleted; private System.Threading.SendOrPostCallback TruncateDatabaseOperationCompleted; private System.Threading.SendOrPostCallback GetTempFileBinaryChunkOperationCompleted; private System.Threading.SendOrPostCallback AppendTempFileBinaryChunkOperationCompleted; private System.Threading.SendOrPostCallback BackupDatabaseOperationCompleted; private System.Threading.SendOrPostCallback RestoreDatabaseOperationCompleted; private System.Threading.SendOrPostCallback UserExistsOperationCompleted; private System.Threading.SendOrPostCallback GetUsersOperationCompleted; private System.Threading.SendOrPostCallback GetUserOperationCompleted; private System.Threading.SendOrPostCallback CreateUserOperationCompleted; private System.Threading.SendOrPostCallback UpdateUserOperationCompleted; private System.Threading.SendOrPostCallback DeleteUserOperationCompleted; private System.Threading.SendOrPostCallback ChangeUserPasswordOperationCompleted; /// <remarks/> public DatabaseServer() { this.Url = "http://localhost/WebsitePanelServer/DatabaseServer.asmx"; } /// <remarks/> public event CheckConnectivityCompletedEventHandler CheckConnectivityCompleted; /// <remarks/> public event ExecuteSqlQueryCompletedEventHandler ExecuteSqlQueryCompleted; /// <remarks/> public event ExecuteSqlNonQueryCompletedEventHandler ExecuteSqlNonQueryCompleted; /// <remarks/> public event ExecuteSqlQuerySafeCompletedEventHandler ExecuteSqlQuerySafeCompleted; /// <remarks/> public event ExecuteSqlNonQuerySafeCompletedEventHandler ExecuteSqlNonQuerySafeCompleted; /// <remarks/> public event DatabaseExistsCompletedEventHandler DatabaseExistsCompleted; /// <remarks/> public event GetDatabasesCompletedEventHandler GetDatabasesCompleted; /// <remarks/> public event GetDatabaseCompletedEventHandler GetDatabaseCompleted; /// <remarks/> public event CreateDatabaseCompletedEventHandler CreateDatabaseCompleted; /// <remarks/> public event UpdateDatabaseCompletedEventHandler UpdateDatabaseCompleted; /// <remarks/> public event DeleteDatabaseCompletedEventHandler DeleteDatabaseCompleted; /// <remarks/> public event TruncateDatabaseCompletedEventHandler TruncateDatabaseCompleted; /// <remarks/> public event GetTempFileBinaryChunkCompletedEventHandler GetTempFileBinaryChunkCompleted; /// <remarks/> public event AppendTempFileBinaryChunkCompletedEventHandler AppendTempFileBinaryChunkCompleted; /// <remarks/> public event BackupDatabaseCompletedEventHandler BackupDatabaseCompleted; /// <remarks/> public event RestoreDatabaseCompletedEventHandler RestoreDatabaseCompleted; /// <remarks/> public event UserExistsCompletedEventHandler UserExistsCompleted; /// <remarks/> public event GetUsersCompletedEventHandler GetUsersCompleted; /// <remarks/> public event GetUserCompletedEventHandler GetUserCompleted; /// <remarks/> public event CreateUserCompletedEventHandler CreateUserCompleted; /// <remarks/> public event UpdateUserCompletedEventHandler UpdateUserCompleted; /// <remarks/> public event DeleteUserCompletedEventHandler DeleteUserCompleted; /// <remarks/> public event ChangeUserPasswordCompletedEventHandler ChangeUserPasswordCompleted; /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/CheckConnectivity", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public bool CheckConnectivity(string databaseName, string username, string password) { object[] results = this.Invoke("CheckConnectivity", new object[] { databaseName, username, password}); return ((bool)(results[0])); } /// <remarks/> public System.IAsyncResult BeginCheckConnectivity(string databaseName, string username, string password, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("CheckConnectivity", new object[] { databaseName, username, password}, callback, asyncState); } /// <remarks/> public bool EndCheckConnectivity(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((bool)(results[0])); } /// <remarks/> public void CheckConnectivityAsync(string databaseName, string username, string password) { this.CheckConnectivityAsync(databaseName, username, password, null); } /// <remarks/> public void CheckConnectivityAsync(string databaseName, string username, string password, object userState) { if ((this.CheckConnectivityOperationCompleted == null)) { this.CheckConnectivityOperationCompleted = new System.Threading.SendOrPostCallback(this.OnCheckConnectivityOperationCompleted); } this.InvokeAsync("CheckConnectivity", new object[] { databaseName, username, password}, this.CheckConnectivityOperationCompleted, userState); } private void OnCheckConnectivityOperationCompleted(object arg) { if ((this.CheckConnectivityCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.CheckConnectivityCompleted(this, new CheckConnectivityCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/ExecuteSqlQuery", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public System.Data.DataSet ExecuteSqlQuery(string databaseName, string commandText) { object[] results = this.Invoke("ExecuteSqlQuery", new object[] { databaseName, commandText}); return ((System.Data.DataSet)(results[0])); } /// <remarks/> public System.IAsyncResult BeginExecuteSqlQuery(string databaseName, string commandText, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ExecuteSqlQuery", new object[] { databaseName, commandText}, callback, asyncState); } /// <remarks/> public System.Data.DataSet EndExecuteSqlQuery(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((System.Data.DataSet)(results[0])); } /// <remarks/> public void ExecuteSqlQueryAsync(string databaseName, string commandText) { this.ExecuteSqlQueryAsync(databaseName, commandText, null); } /// <remarks/> public void ExecuteSqlQueryAsync(string databaseName, string commandText, object userState) { if ((this.ExecuteSqlQueryOperationCompleted == null)) { this.ExecuteSqlQueryOperationCompleted = new System.Threading.SendOrPostCallback(this.OnExecuteSqlQueryOperationCompleted); } this.InvokeAsync("ExecuteSqlQuery", new object[] { databaseName, commandText}, this.ExecuteSqlQueryOperationCompleted, userState); } private void OnExecuteSqlQueryOperationCompleted(object arg) { if ((this.ExecuteSqlQueryCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ExecuteSqlQueryCompleted(this, new ExecuteSqlQueryCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/ExecuteSqlNonQuery", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void ExecuteSqlNonQuery(string databaseName, string commandText) { this.Invoke("ExecuteSqlNonQuery", new object[] { databaseName, commandText}); } /// <remarks/> public System.IAsyncResult BeginExecuteSqlNonQuery(string databaseName, string commandText, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ExecuteSqlNonQuery", new object[] { databaseName, commandText}, callback, asyncState); } /// <remarks/> public void EndExecuteSqlNonQuery(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void ExecuteSqlNonQueryAsync(string databaseName, string commandText) { this.ExecuteSqlNonQueryAsync(databaseName, commandText, null); } /// <remarks/> public void ExecuteSqlNonQueryAsync(string databaseName, string commandText, object userState) { if ((this.ExecuteSqlNonQueryOperationCompleted == null)) { this.ExecuteSqlNonQueryOperationCompleted = new System.Threading.SendOrPostCallback(this.OnExecuteSqlNonQueryOperationCompleted); } this.InvokeAsync("ExecuteSqlNonQuery", new object[] { databaseName, commandText}, this.ExecuteSqlNonQueryOperationCompleted, userState); } private void OnExecuteSqlNonQueryOperationCompleted(object arg) { if ((this.ExecuteSqlNonQueryCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ExecuteSqlNonQueryCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/ExecuteSqlQuerySafe", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public System.Data.DataSet ExecuteSqlQuerySafe(string databaseName, string username, string password, string commandText) { object[] results = this.Invoke("ExecuteSqlQuerySafe", new object[] { databaseName, username, password, commandText}); return ((System.Data.DataSet)(results[0])); } /// <remarks/> public System.IAsyncResult BeginExecuteSqlQuerySafe(string databaseName, string username, string password, string commandText, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ExecuteSqlQuerySafe", new object[] { databaseName, username, password, commandText}, callback, asyncState); } /// <remarks/> public System.Data.DataSet EndExecuteSqlQuerySafe(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((System.Data.DataSet)(results[0])); } /// <remarks/> public void ExecuteSqlQuerySafeAsync(string databaseName, string username, string password, string commandText) { this.ExecuteSqlQuerySafeAsync(databaseName, username, password, commandText, null); } /// <remarks/> public void ExecuteSqlQuerySafeAsync(string databaseName, string username, string password, string commandText, object userState) { if ((this.ExecuteSqlQuerySafeOperationCompleted == null)) { this.ExecuteSqlQuerySafeOperationCompleted = new System.Threading.SendOrPostCallback(this.OnExecuteSqlQuerySafeOperationCompleted); } this.InvokeAsync("ExecuteSqlQuerySafe", new object[] { databaseName, username, password, commandText}, this.ExecuteSqlQuerySafeOperationCompleted, userState); } private void OnExecuteSqlQuerySafeOperationCompleted(object arg) { if ((this.ExecuteSqlQuerySafeCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ExecuteSqlQuerySafeCompleted(this, new ExecuteSqlQuerySafeCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/ExecuteSqlNonQuerySafe", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void ExecuteSqlNonQuerySafe(string databaseName, string username, string password, string commandText) { this.Invoke("ExecuteSqlNonQuerySafe", new object[] { databaseName, username, password, commandText}); } /// <remarks/> public System.IAsyncResult BeginExecuteSqlNonQuerySafe(string databaseName, string username, string password, string commandText, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ExecuteSqlNonQuerySafe", new object[] { databaseName, username, password, commandText}, callback, asyncState); } /// <remarks/> public void EndExecuteSqlNonQuerySafe(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void ExecuteSqlNonQuerySafeAsync(string databaseName, string username, string password, string commandText) { this.ExecuteSqlNonQuerySafeAsync(databaseName, username, password, commandText, null); } /// <remarks/> public void ExecuteSqlNonQuerySafeAsync(string databaseName, string username, string password, string commandText, object userState) { if ((this.ExecuteSqlNonQuerySafeOperationCompleted == null)) { this.ExecuteSqlNonQuerySafeOperationCompleted = new System.Threading.SendOrPostCallback(this.OnExecuteSqlNonQuerySafeOperationCompleted); } this.InvokeAsync("ExecuteSqlNonQuerySafe", new object[] { databaseName, username, password, commandText}, this.ExecuteSqlNonQuerySafeOperationCompleted, userState); } private void OnExecuteSqlNonQuerySafeOperationCompleted(object arg) { if ((this.ExecuteSqlNonQuerySafeCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ExecuteSqlNonQuerySafeCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/DatabaseExists", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public bool DatabaseExists(string databaseName) { object[] results = this.Invoke("DatabaseExists", new object[] { databaseName}); return ((bool)(results[0])); } /// <remarks/> public System.IAsyncResult BeginDatabaseExists(string databaseName, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("DatabaseExists", new object[] { databaseName}, callback, asyncState); } /// <remarks/> public bool EndDatabaseExists(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((bool)(results[0])); } /// <remarks/> public void DatabaseExistsAsync(string databaseName) { this.DatabaseExistsAsync(databaseName, null); } /// <remarks/> public void DatabaseExistsAsync(string databaseName, object userState) { if ((this.DatabaseExistsOperationCompleted == null)) { this.DatabaseExistsOperationCompleted = new System.Threading.SendOrPostCallback(this.OnDatabaseExistsOperationCompleted); } this.InvokeAsync("DatabaseExists", new object[] { databaseName}, this.DatabaseExistsOperationCompleted, userState); } private void OnDatabaseExistsOperationCompleted(object arg) { if ((this.DatabaseExistsCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.DatabaseExistsCompleted(this, new DatabaseExistsCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/GetDatabases", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public string[] GetDatabases() { object[] results = this.Invoke("GetDatabases", new object[0]); return ((string[])(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetDatabases(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetDatabases", new object[0], callback, asyncState); } /// <remarks/> public string[] EndGetDatabases(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((string[])(results[0])); } /// <remarks/> public void GetDatabasesAsync() { this.GetDatabasesAsync(null); } /// <remarks/> public void GetDatabasesAsync(object userState) { if ((this.GetDatabasesOperationCompleted == null)) { this.GetDatabasesOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetDatabasesOperationCompleted); } this.InvokeAsync("GetDatabases", new object[0], this.GetDatabasesOperationCompleted, userState); } private void OnGetDatabasesOperationCompleted(object arg) { if ((this.GetDatabasesCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetDatabasesCompleted(this, new GetDatabasesCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/GetDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public SqlDatabase GetDatabase(string databaseName) { object[] results = this.Invoke("GetDatabase", new object[] { databaseName}); return ((SqlDatabase)(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetDatabase(string databaseName, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetDatabase", new object[] { databaseName}, callback, asyncState); } /// <remarks/> public SqlDatabase EndGetDatabase(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((SqlDatabase)(results[0])); } /// <remarks/> public void GetDatabaseAsync(string databaseName) { this.GetDatabaseAsync(databaseName, null); } /// <remarks/> public void GetDatabaseAsync(string databaseName, object userState) { if ((this.GetDatabaseOperationCompleted == null)) { this.GetDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetDatabaseOperationCompleted); } this.InvokeAsync("GetDatabase", new object[] { databaseName}, this.GetDatabaseOperationCompleted, userState); } private void OnGetDatabaseOperationCompleted(object arg) { if ((this.GetDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetDatabaseCompleted(this, new GetDatabaseCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/CreateDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void CreateDatabase(SqlDatabase database) { this.Invoke("CreateDatabase", new object[] { database}); } /// <remarks/> public System.IAsyncResult BeginCreateDatabase(SqlDatabase database, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("CreateDatabase", new object[] { database}, callback, asyncState); } /// <remarks/> public void EndCreateDatabase(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void CreateDatabaseAsync(SqlDatabase database) { this.CreateDatabaseAsync(database, null); } /// <remarks/> public void CreateDatabaseAsync(SqlDatabase database, object userState) { if ((this.CreateDatabaseOperationCompleted == null)) { this.CreateDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnCreateDatabaseOperationCompleted); } this.InvokeAsync("CreateDatabase", new object[] { database}, this.CreateDatabaseOperationCompleted, userState); } private void OnCreateDatabaseOperationCompleted(object arg) { if ((this.CreateDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.CreateDatabaseCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/UpdateDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void UpdateDatabase(SqlDatabase database) { this.Invoke("UpdateDatabase", new object[] { database}); } /// <remarks/> public System.IAsyncResult BeginUpdateDatabase(SqlDatabase database, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("UpdateDatabase", new object[] { database}, callback, asyncState); } /// <remarks/> public void EndUpdateDatabase(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void UpdateDatabaseAsync(SqlDatabase database) { this.UpdateDatabaseAsync(database, null); } /// <remarks/> public void UpdateDatabaseAsync(SqlDatabase database, object userState) { if ((this.UpdateDatabaseOperationCompleted == null)) { this.UpdateDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnUpdateDatabaseOperationCompleted); } this.InvokeAsync("UpdateDatabase", new object[] { database}, this.UpdateDatabaseOperationCompleted, userState); } private void OnUpdateDatabaseOperationCompleted(object arg) { if ((this.UpdateDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.UpdateDatabaseCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/DeleteDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void DeleteDatabase(string databaseName) { this.Invoke("DeleteDatabase", new object[] { databaseName}); } /// <remarks/> public System.IAsyncResult BeginDeleteDatabase(string databaseName, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("DeleteDatabase", new object[] { databaseName}, callback, asyncState); } /// <remarks/> public void EndDeleteDatabase(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void DeleteDatabaseAsync(string databaseName) { this.DeleteDatabaseAsync(databaseName, null); } /// <remarks/> public void DeleteDatabaseAsync(string databaseName, object userState) { if ((this.DeleteDatabaseOperationCompleted == null)) { this.DeleteDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnDeleteDatabaseOperationCompleted); } this.InvokeAsync("DeleteDatabase", new object[] { databaseName}, this.DeleteDatabaseOperationCompleted, userState); } private void OnDeleteDatabaseOperationCompleted(object arg) { if ((this.DeleteDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.DeleteDatabaseCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/TruncateDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void TruncateDatabase(string databaseName) { this.Invoke("TruncateDatabase", new object[] { databaseName}); } /// <remarks/> public System.IAsyncResult BeginTruncateDatabase(string databaseName, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("TruncateDatabase", new object[] { databaseName}, callback, asyncState); } /// <remarks/> public void EndTruncateDatabase(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void TruncateDatabaseAsync(string databaseName) { this.TruncateDatabaseAsync(databaseName, null); } /// <remarks/> public void TruncateDatabaseAsync(string databaseName, object userState) { if ((this.TruncateDatabaseOperationCompleted == null)) { this.TruncateDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnTruncateDatabaseOperationCompleted); } this.InvokeAsync("TruncateDatabase", new object[] { databaseName}, this.TruncateDatabaseOperationCompleted, userState); } private void OnTruncateDatabaseOperationCompleted(object arg) { if ((this.TruncateDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.TruncateDatabaseCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/GetTempFileBinaryChunk", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] [return: System.Xml.Serialization.XmlElementAttribute(DataType = "base64Binary")] public byte[] GetTempFileBinaryChunk(string path, int offset, int length) { object[] results = this.Invoke("GetTempFileBinaryChunk", new object[] { path, offset, length}); return ((byte[])(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetTempFileBinaryChunk(string path, int offset, int length, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetTempFileBinaryChunk", new object[] { path, offset, length}, callback, asyncState); } /// <remarks/> public byte[] EndGetTempFileBinaryChunk(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((byte[])(results[0])); } /// <remarks/> public void GetTempFileBinaryChunkAsync(string path, int offset, int length) { this.GetTempFileBinaryChunkAsync(path, offset, length, null); } /// <remarks/> public void GetTempFileBinaryChunkAsync(string path, int offset, int length, object userState) { if ((this.GetTempFileBinaryChunkOperationCompleted == null)) { this.GetTempFileBinaryChunkOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetTempFileBinaryChunkOperationCompleted); } this.InvokeAsync("GetTempFileBinaryChunk", new object[] { path, offset, length}, this.GetTempFileBinaryChunkOperationCompleted, userState); } private void OnGetTempFileBinaryChunkOperationCompleted(object arg) { if ((this.GetTempFileBinaryChunkCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetTempFileBinaryChunkCompleted(this, new GetTempFileBinaryChunkCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/AppendTempFileBinaryChunk", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public string AppendTempFileBinaryChunk(string fileName, string path, [System.Xml.Serialization.XmlElementAttribute(DataType = "base64Binary")] byte[] chunk) { object[] results = this.Invoke("AppendTempFileBinaryChunk", new object[] { fileName, path, chunk}); return ((string)(results[0])); } /// <remarks/> public System.IAsyncResult BeginAppendTempFileBinaryChunk(string fileName, string path, byte[] chunk, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("AppendTempFileBinaryChunk", new object[] { fileName, path, chunk}, callback, asyncState); } /// <remarks/> public string EndAppendTempFileBinaryChunk(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((string)(results[0])); } /// <remarks/> public void AppendTempFileBinaryChunkAsync(string fileName, string path, byte[] chunk) { this.AppendTempFileBinaryChunkAsync(fileName, path, chunk, null); } /// <remarks/> public void AppendTempFileBinaryChunkAsync(string fileName, string path, byte[] chunk, object userState) { if ((this.AppendTempFileBinaryChunkOperationCompleted == null)) { this.AppendTempFileBinaryChunkOperationCompleted = new System.Threading.SendOrPostCallback(this.OnAppendTempFileBinaryChunkOperationCompleted); } this.InvokeAsync("AppendTempFileBinaryChunk", new object[] { fileName, path, chunk}, this.AppendTempFileBinaryChunkOperationCompleted, userState); } private void OnAppendTempFileBinaryChunkOperationCompleted(object arg) { if ((this.AppendTempFileBinaryChunkCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.AppendTempFileBinaryChunkCompleted(this, new AppendTempFileBinaryChunkCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/BackupDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public string BackupDatabase(string databaseName, string backupName, bool zipBackup) { object[] results = this.Invoke("BackupDatabase", new object[] { databaseName, backupName, zipBackup}); return ((string)(results[0])); } /// <remarks/> public System.IAsyncResult BeginBackupDatabase(string databaseName, string backupName, bool zipBackup, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("BackupDatabase", new object[] { databaseName, backupName, zipBackup}, callback, asyncState); } /// <remarks/> public string EndBackupDatabase(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((string)(results[0])); } /// <remarks/> public void BackupDatabaseAsync(string databaseName, string backupName, bool zipBackup) { this.BackupDatabaseAsync(databaseName, backupName, zipBackup, null); } /// <remarks/> public void BackupDatabaseAsync(string databaseName, string backupName, bool zipBackup, object userState) { if ((this.BackupDatabaseOperationCompleted == null)) { this.BackupDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnBackupDatabaseOperationCompleted); } this.InvokeAsync("BackupDatabase", new object[] { databaseName, backupName, zipBackup}, this.BackupDatabaseOperationCompleted, userState); } private void OnBackupDatabaseOperationCompleted(object arg) { if ((this.BackupDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.BackupDatabaseCompleted(this, new BackupDatabaseCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/RestoreDatabase", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void RestoreDatabase(string databaseName, string[] fileNames) { this.Invoke("RestoreDatabase", new object[] { databaseName, fileNames}); } /// <remarks/> public System.IAsyncResult BeginRestoreDatabase(string databaseName, string[] fileNames, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("RestoreDatabase", new object[] { databaseName, fileNames}, callback, asyncState); } /// <remarks/> public void EndRestoreDatabase(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void RestoreDatabaseAsync(string databaseName, string[] fileNames) { this.RestoreDatabaseAsync(databaseName, fileNames, null); } /// <remarks/> public void RestoreDatabaseAsync(string databaseName, string[] fileNames, object userState) { if ((this.RestoreDatabaseOperationCompleted == null)) { this.RestoreDatabaseOperationCompleted = new System.Threading.SendOrPostCallback(this.OnRestoreDatabaseOperationCompleted); } this.InvokeAsync("RestoreDatabase", new object[] { databaseName, fileNames}, this.RestoreDatabaseOperationCompleted, userState); } private void OnRestoreDatabaseOperationCompleted(object arg) { if ((this.RestoreDatabaseCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.RestoreDatabaseCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/UserExists", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public bool UserExists(string userName) { object[] results = this.Invoke("UserExists", new object[] { userName}); return ((bool)(results[0])); } /// <remarks/> public System.IAsyncResult BeginUserExists(string userName, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("UserExists", new object[] { userName}, callback, asyncState); } /// <remarks/> public bool EndUserExists(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((bool)(results[0])); } /// <remarks/> public void UserExistsAsync(string userName) { this.UserExistsAsync(userName, null); } /// <remarks/> public void UserExistsAsync(string userName, object userState) { if ((this.UserExistsOperationCompleted == null)) { this.UserExistsOperationCompleted = new System.Threading.SendOrPostCallback(this.OnUserExistsOperationCompleted); } this.InvokeAsync("UserExists", new object[] { userName}, this.UserExistsOperationCompleted, userState); } private void OnUserExistsOperationCompleted(object arg) { if ((this.UserExistsCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.UserExistsCompleted(this, new UserExistsCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/GetUsers", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public string[] GetUsers() { object[] results = this.Invoke("GetUsers", new object[0]); return ((string[])(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetUsers(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetUsers", new object[0], callback, asyncState); } /// <remarks/> public string[] EndGetUsers(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((string[])(results[0])); } /// <remarks/> public void GetUsersAsync() { this.GetUsersAsync(null); } /// <remarks/> public void GetUsersAsync(object userState) { if ((this.GetUsersOperationCompleted == null)) { this.GetUsersOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetUsersOperationCompleted); } this.InvokeAsync("GetUsers", new object[0], this.GetUsersOperationCompleted, userState); } private void OnGetUsersOperationCompleted(object arg) { if ((this.GetUsersCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetUsersCompleted(this, new GetUsersCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/GetUser", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public SqlUser GetUser(string username, string[] databases) { object[] results = this.Invoke("GetUser", new object[] { username, databases}); return ((SqlUser)(results[0])); } /// <remarks/> public System.IAsyncResult BeginGetUser(string username, string[] databases, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("GetUser", new object[] { username, databases}, callback, asyncState); } /// <remarks/> public SqlUser EndGetUser(System.IAsyncResult asyncResult) { object[] results = this.EndInvoke(asyncResult); return ((SqlUser)(results[0])); } /// <remarks/> public void GetUserAsync(string username, string[] databases) { this.GetUserAsync(username, databases, null); } /// <remarks/> public void GetUserAsync(string username, string[] databases, object userState) { if ((this.GetUserOperationCompleted == null)) { this.GetUserOperationCompleted = new System.Threading.SendOrPostCallback(this.OnGetUserOperationCompleted); } this.InvokeAsync("GetUser", new object[] { username, databases}, this.GetUserOperationCompleted, userState); } private void OnGetUserOperationCompleted(object arg) { if ((this.GetUserCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.GetUserCompleted(this, new GetUserCompletedEventArgs(invokeArgs.Results, invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/CreateUser", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void CreateUser(SqlUser user, string password) { this.Invoke("CreateUser", new object[] { user, password}); } /// <remarks/> public System.IAsyncResult BeginCreateUser(SqlUser user, string password, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("CreateUser", new object[] { user, password}, callback, asyncState); } /// <remarks/> public void EndCreateUser(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void CreateUserAsync(SqlUser user, string password) { this.CreateUserAsync(user, password, null); } /// <remarks/> public void CreateUserAsync(SqlUser user, string password, object userState) { if ((this.CreateUserOperationCompleted == null)) { this.CreateUserOperationCompleted = new System.Threading.SendOrPostCallback(this.OnCreateUserOperationCompleted); } this.InvokeAsync("CreateUser", new object[] { user, password}, this.CreateUserOperationCompleted, userState); } private void OnCreateUserOperationCompleted(object arg) { if ((this.CreateUserCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.CreateUserCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/UpdateUser", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void UpdateUser(SqlUser user, string[] databases) { this.Invoke("UpdateUser", new object[] { user, databases}); } /// <remarks/> public System.IAsyncResult BeginUpdateUser(SqlUser user, string[] databases, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("UpdateUser", new object[] { user, databases}, callback, asyncState); } /// <remarks/> public void EndUpdateUser(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void UpdateUserAsync(SqlUser user, string[] databases) { this.UpdateUserAsync(user, databases, null); } /// <remarks/> public void UpdateUserAsync(SqlUser user, string[] databases, object userState) { if ((this.UpdateUserOperationCompleted == null)) { this.UpdateUserOperationCompleted = new System.Threading.SendOrPostCallback(this.OnUpdateUserOperationCompleted); } this.InvokeAsync("UpdateUser", new object[] { user, databases}, this.UpdateUserOperationCompleted, userState); } private void OnUpdateUserOperationCompleted(object arg) { if ((this.UpdateUserCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.UpdateUserCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/DeleteUser", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void DeleteUser(string username, string[] databases) { this.Invoke("DeleteUser", new object[] { username, databases}); } /// <remarks/> public System.IAsyncResult BeginDeleteUser(string username, string[] databases, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("DeleteUser", new object[] { username, databases}, callback, asyncState); } /// <remarks/> public void EndDeleteUser(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void DeleteUserAsync(string username, string[] databases) { this.DeleteUserAsync(username, databases, null); } /// <remarks/> public void DeleteUserAsync(string username, string[] databases, object userState) { if ((this.DeleteUserOperationCompleted == null)) { this.DeleteUserOperationCompleted = new System.Threading.SendOrPostCallback(this.OnDeleteUserOperationCompleted); } this.InvokeAsync("DeleteUser", new object[] { username, databases}, this.DeleteUserOperationCompleted, userState); } private void OnDeleteUserOperationCompleted(object arg) { if ((this.DeleteUserCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.DeleteUserCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> [System.Web.Services.Protocols.SoapHeaderAttribute("ServiceProviderSettingsSoapHeaderValue")] [System.Web.Services.Protocols.SoapDocumentMethodAttribute("http://smbsaas/websitepanel/server/ChangeUserPassword", RequestNamespace = "http://smbsaas/websitepanel/server/", ResponseNamespace = "http://smbsaas/websitepanel/server/", Use = System.Web.Services.Description.SoapBindingUse.Literal, ParameterStyle = System.Web.Services.Protocols.SoapParameterStyle.Wrapped)] public void ChangeUserPassword(string username, string password) { this.Invoke("ChangeUserPassword", new object[] { username, password}); } /// <remarks/> public System.IAsyncResult BeginChangeUserPassword(string username, string password, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ChangeUserPassword", new object[] { username, password}, callback, asyncState); } /// <remarks/> public void EndChangeUserPassword(System.IAsyncResult asyncResult) { this.EndInvoke(asyncResult); } /// <remarks/> public void ChangeUserPasswordAsync(string username, string password) { this.ChangeUserPasswordAsync(username, password, null); } /// <remarks/> public void ChangeUserPasswordAsync(string username, string password, object userState) { if ((this.ChangeUserPasswordOperationCompleted == null)) { this.ChangeUserPasswordOperationCompleted = new System.Threading.SendOrPostCallback(this.OnChangeUserPasswordOperationCompleted); } this.InvokeAsync("ChangeUserPassword", new object[] { username, password}, this.ChangeUserPasswordOperationCompleted, userState); } private void OnChangeUserPasswordOperationCompleted(object arg) { if ((this.ChangeUserPasswordCompleted != null)) { System.Web.Services.Protocols.InvokeCompletedEventArgs invokeArgs = ((System.Web.Services.Protocols.InvokeCompletedEventArgs)(arg)); this.ChangeUserPasswordCompleted(this, new System.ComponentModel.AsyncCompletedEventArgs(invokeArgs.Error, invokeArgs.Cancelled, invokeArgs.UserState)); } } /// <remarks/> public new void CancelAsync(object userState) { base.CancelAsync(userState); } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void CheckConnectivityCompletedEventHandler(object sender, CheckConnectivityCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class CheckConnectivityCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal CheckConnectivityCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public bool Result { get { this.RaiseExceptionIfNecessary(); return ((bool)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ExecuteSqlQueryCompletedEventHandler(object sender, ExecuteSqlQueryCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class ExecuteSqlQueryCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal ExecuteSqlQueryCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public System.Data.DataSet Result { get { this.RaiseExceptionIfNecessary(); return ((System.Data.DataSet)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ExecuteSqlNonQueryCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ExecuteSqlQuerySafeCompletedEventHandler(object sender, ExecuteSqlQuerySafeCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class ExecuteSqlQuerySafeCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal ExecuteSqlQuerySafeCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public System.Data.DataSet Result { get { this.RaiseExceptionIfNecessary(); return ((System.Data.DataSet)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ExecuteSqlNonQuerySafeCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void DatabaseExistsCompletedEventHandler(object sender, DatabaseExistsCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class DatabaseExistsCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal DatabaseExistsCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public bool Result { get { this.RaiseExceptionIfNecessary(); return ((bool)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetDatabasesCompletedEventHandler(object sender, GetDatabasesCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetDatabasesCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetDatabasesCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public string[] Result { get { this.RaiseExceptionIfNecessary(); return ((string[])(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetDatabaseCompletedEventHandler(object sender, GetDatabaseCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetDatabaseCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetDatabaseCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public SqlDatabase Result { get { this.RaiseExceptionIfNecessary(); return ((SqlDatabase)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void CreateDatabaseCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void UpdateDatabaseCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void DeleteDatabaseCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void TruncateDatabaseCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetTempFileBinaryChunkCompletedEventHandler(object sender, GetTempFileBinaryChunkCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetTempFileBinaryChunkCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetTempFileBinaryChunkCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public byte[] Result { get { this.RaiseExceptionIfNecessary(); return ((byte[])(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void AppendTempFileBinaryChunkCompletedEventHandler(object sender, AppendTempFileBinaryChunkCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class AppendTempFileBinaryChunkCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal AppendTempFileBinaryChunkCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public string Result { get { this.RaiseExceptionIfNecessary(); return ((string)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void BackupDatabaseCompletedEventHandler(object sender, BackupDatabaseCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class BackupDatabaseCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal BackupDatabaseCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public string Result { get { this.RaiseExceptionIfNecessary(); return ((string)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void RestoreDatabaseCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void UserExistsCompletedEventHandler(object sender, UserExistsCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class UserExistsCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal UserExistsCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public bool Result { get { this.RaiseExceptionIfNecessary(); return ((bool)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetUsersCompletedEventHandler(object sender, GetUsersCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetUsersCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetUsersCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public string[] Result { get { this.RaiseExceptionIfNecessary(); return ((string[])(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void GetUserCompletedEventHandler(object sender, GetUserCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] public partial class GetUserCompletedEventArgs : System.ComponentModel.AsyncCompletedEventArgs { private object[] results; internal GetUserCompletedEventArgs(object[] results, System.Exception exception, bool cancelled, object userState) : base(exception, cancelled, userState) { this.results = results; } /// <remarks/> public SqlUser Result { get { this.RaiseExceptionIfNecessary(); return ((SqlUser)(this.results[0])); } } } /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void CreateUserCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void UpdateUserCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void DeleteUserCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.42")] public delegate void ChangeUserPasswordCompletedEventHandler(object sender, System.ComponentModel.AsyncCompletedEventArgs e); }
namespace StockSharp.Algo.Risk { using System; using System.Collections.Generic; using System.ComponentModel; using System.Runtime.CompilerServices; using Ecng.Common; using Ecng.Serialization; using StockSharp.Messages; using StockSharp.Localization; using StockSharp.Logging; /// <summary> /// Base risk-rule. /// </summary> public abstract class RiskRule : BaseLogReceiver, IRiskRule, INotifyPropertyChanged { /// <summary> /// Initialize <see cref="RiskRule"/>. /// </summary> protected RiskRule() { UpdateTitle(); } /// <summary> /// Get title. /// </summary> protected abstract string GetTitle(); /// <summary> /// Update title. /// </summary> protected void UpdateTitle() => Title = GetTitle(); private string _title; /// <summary> /// Header. /// </summary> [Browsable(false)] public string Title { get => _title; private set { _title = value; NotifyChanged(); } } private RiskActions _action; /// <inheritdoc /> [DisplayNameLoc(LocalizedStrings.Str722Key)] [DescriptionLoc(LocalizedStrings.Str859Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public RiskActions Action { get => _action; set { _action = value; NotifyChanged(); } } /// <inheritdoc /> public virtual void Reset() { } /// <inheritdoc /> public abstract bool ProcessMessage(Message message); /// <inheritdoc /> public override void Load(SettingsStorage storage) { Action = storage.GetValue<RiskActions>(nameof(Action)); base.Load(storage); } /// <inheritdoc /> public override void Save(SettingsStorage storage) { storage.SetValue(nameof(Action), Action.To<string>()); base.Save(storage); } private PropertyChangedEventHandler _propertyChanged; event PropertyChangedEventHandler INotifyPropertyChanged.PropertyChanged { add => _propertyChanged += value; remove => _propertyChanged -= value; } private void NotifyChanged([CallerMemberName]string propertyName = null) { _propertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } } /// <summary> /// Risk-rule, tracking profit-loss. /// </summary> [DisplayNameLoc(LocalizedStrings.PnLKey)] [DescriptionLoc(LocalizedStrings.Str860Key)] public class RiskPnLRule : RiskRule { private decimal? _initValue; /// <inheritdoc /> public override void Reset() { base.Reset(); _initValue = null; } private Unit _pnL = new(); /// <summary> /// Profit-loss. /// </summary> [DisplayNameLoc(LocalizedStrings.PnLKey)] [DescriptionLoc(LocalizedStrings.Str861Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public Unit PnL { get => _pnL; set { _pnL = value ?? throw new ArgumentNullException(nameof(value)); UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _pnL?.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.PositionChange) return false; var pfMsg = (PositionChangeMessage)message; if (!pfMsg.IsMoney()) return false; var currValue = pfMsg.TryGetDecimal(PositionChangeTypes.CurrentValue); if (currValue == null) return false; if (_initValue == null) { _initValue = currValue.Value; return false; } if (PnL.Type == UnitTypes.Limit) { if (PnL.Value > 0) return PnL.Value <= currValue.Value; else return PnL.Value >= currValue.Value; } if (PnL.Value > 0) return (_initValue + PnL) <= currValue.Value; else return (_initValue + PnL) >= currValue.Value; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(PnL), PnL); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); PnL = storage.GetValue<Unit>(nameof(PnL)); } } /// <summary> /// Risk-rule, tracking position size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str862Key)] [DescriptionLoc(LocalizedStrings.Str863Key)] public class RiskPositionSizeRule : RiskRule { private decimal _position; /// <summary> /// Position size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str862Key)] [DescriptionLoc(LocalizedStrings.Str864Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Position { get => _position; set { _position = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _position.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.PositionChange) return false; var posMsg = (PositionChangeMessage)message; var currValue = posMsg.TryGetDecimal(PositionChangeTypes.CurrentValue); if (currValue == null) return false; if (Position > 0) return currValue >= Position; else return currValue <= Position; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Position), Position); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Position = storage.GetValue<decimal>(nameof(Position)); } } /// <summary> /// Risk-rule, tracking position lifetime. /// </summary> [DisplayNameLoc(LocalizedStrings.Str865Key)] [DescriptionLoc(LocalizedStrings.Str866Key)] public class RiskPositionTimeRule : RiskRule { private readonly Dictionary<Tuple<SecurityId, string>, DateTimeOffset> _posOpenTime = new(); private TimeSpan _time; /// <summary> /// Position lifetime. /// </summary> [DisplayNameLoc(LocalizedStrings.TimeKey)] [DescriptionLoc(LocalizedStrings.Str867Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public TimeSpan Time { get => _time; set { if (value < TimeSpan.Zero) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _time = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _time.To<string>(); /// <inheritdoc /> public override void Reset() { base.Reset(); _posOpenTime.Clear(); } /// <inheritdoc /> public override bool ProcessMessage(Message message) { switch (message.Type) { case MessageTypes.PositionChange: { var posMsg = (PositionChangeMessage)message; var currValue = posMsg.TryGetDecimal(PositionChangeTypes.CurrentValue); if (currValue == null) return false; var key = Tuple.Create(posMsg.SecurityId, posMsg.PortfolioName); if (currValue == 0) { _posOpenTime.Remove(key); return false; } if (!_posOpenTime.TryGetValue(key, out var openTime)) { _posOpenTime.Add(key, posMsg.LocalTime); return false; } var diff = posMsg.LocalTime - openTime; if (diff < Time) return false; _posOpenTime.Remove(key); return true; } case MessageTypes.Time: { List<Tuple<SecurityId, string>> removingPos = null; foreach (var pair in _posOpenTime) { var diff = message.LocalTime - pair.Value; if (diff < Time) continue; if (removingPos == null) removingPos = new List<Tuple<SecurityId, string>>(); removingPos.Add(pair.Key); } removingPos?.ForEach(t => _posOpenTime.Remove(t)); return removingPos != null; } } return false; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Time), Time); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Time = storage.GetValue<TimeSpan>(nameof(Time)); } } /// <summary> /// Risk-rule, tracking commission size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str159Key)] [DescriptionLoc(LocalizedStrings.Str868Key)] public class RiskCommissionRule : RiskRule { private decimal _commission; /// <summary> /// Commission size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str159Key)] [DescriptionLoc(LocalizedStrings.Str869Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Commission { get => _commission; set { _commission = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _commission.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.PositionChange) return false; var pfMsg = (PositionChangeMessage)message; if (!pfMsg.IsMoney()) return false; var currValue = pfMsg.TryGetDecimal(PositionChangeTypes.Commission); if (currValue == null) return false; return currValue >= Commission; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Commission), Commission); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Commission = storage.GetValue<decimal>(nameof(Commission)); } } /// <summary> /// Risk-rule, tracking slippage size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str163Key)] [DescriptionLoc(LocalizedStrings.Str870Key)] public class RiskSlippageRule : RiskRule { private decimal _slippage; /// <summary> /// Slippage size. /// </summary> [DisplayNameLoc(LocalizedStrings.Str163Key)] [DescriptionLoc(LocalizedStrings.Str871Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Slippage { get => _slippage; set { _slippage = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _slippage.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.Execution) return false; var execMsg = (ExecutionMessage)message; var currValue = execMsg.Slippage; if (currValue == null) return false; if (Slippage > 0) return currValue > Slippage; else return currValue < Slippage; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Slippage), Slippage); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Slippage = storage.GetValue<decimal>(nameof(Slippage)); } } /// <summary> /// Risk-rule, tracking order price. /// </summary> [DisplayNameLoc(LocalizedStrings.Str872Key)] [DescriptionLoc(LocalizedStrings.Str873Key)] public class RiskOrderPriceRule : RiskRule { private decimal _price; /// <summary> /// Order price. /// </summary> [DisplayNameLoc(LocalizedStrings.PriceKey)] [DescriptionLoc(LocalizedStrings.OrderPriceKey)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Price { get => _price; set { _price = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _price.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { switch (message.Type) { case MessageTypes.OrderRegister: { var orderReg = (OrderRegisterMessage)message; return orderReg.Price >= Price; } case MessageTypes.OrderReplace: { var orderReplace = (OrderReplaceMessage)message; return orderReplace.Price > 0 && orderReplace.Price >= Price; } default: return false; } } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Price), Price); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Price = storage.GetValue<decimal>(nameof(Price)); } } /// <summary> /// Risk-rule, tracking order volume. /// </summary> [DisplayNameLoc(LocalizedStrings.Str662Key)] [DescriptionLoc(LocalizedStrings.Str874Key)] public class RiskOrderVolumeRule : RiskRule { private decimal _volume; /// <summary> /// Order volume. /// </summary> [DisplayNameLoc(LocalizedStrings.VolumeKey)] [DescriptionLoc(LocalizedStrings.Str875Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Volume { get => _volume; set { if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _volume = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _volume.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { switch (message.Type) { case MessageTypes.OrderRegister: { var orderReg = (OrderRegisterMessage)message; return orderReg.Volume >= Volume; } case MessageTypes.OrderReplace: { var orderReplace = (OrderReplaceMessage)message; return orderReplace.Volume > 0 && orderReplace.Volume >= Volume; } default: return false; } } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Volume), Volume); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Volume = storage.GetValue<decimal>(nameof(Volume)); } } /// <summary> /// Risk-rule, tracking orders placing frequency. /// </summary> [DisplayNameLoc(LocalizedStrings.Str876Key)] [DescriptionLoc(LocalizedStrings.Str877Key)] public class RiskOrderFreqRule : RiskRule { private DateTimeOffset? _endTime; private int _current; /// <inheritdoc /> protected override string GetTitle() => Count + " -> " + Interval; private int _count; /// <summary> /// Order count. /// </summary> [DisplayNameLoc(LocalizedStrings.Str878Key)] [DescriptionLoc(LocalizedStrings.Str957Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public int Count { get => _count; set { if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _count = value; UpdateTitle(); } } private TimeSpan _interval; /// <summary> /// Interval, during which orders quantity will be monitored. /// </summary> [DisplayNameLoc(LocalizedStrings.Str175Key)] [DescriptionLoc(LocalizedStrings.Str879Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public TimeSpan Interval { get => _interval; set { if (value < TimeSpan.Zero) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _interval = value; UpdateTitle(); } } /// <inheritdoc /> public override void Reset() { base.Reset(); _current = 0; _endTime = null; } /// <inheritdoc /> public override bool ProcessMessage(Message message) { switch (message.Type) { case MessageTypes.OrderRegister: case MessageTypes.OrderReplace: case MessageTypes.OrderPairReplace: { var time = message.LocalTime; if (time.IsDefault()) { this.AddWarningLog("Time is null. Msg={0}", message); return false; } if (_endTime == null) { _endTime = time + Interval; _current = 1; this.AddDebugLog("EndTime={0}", _endTime); return false; } if (time < _endTime) { _current++; this.AddDebugLog("Count={0} Msg={1}", _current, message); if (_current >= Count) { this.AddInfoLog("Count={0} EndTime={1}", _current, _endTime); _endTime = null; return true; } } else { _endTime = time + Interval; _current = 1; this.AddDebugLog("EndTime={0}", _endTime); } return false; } } return false; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Count), Count); storage.SetValue(nameof(Interval), Interval); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Count = storage.GetValue<int>(nameof(Count)); Interval = storage.GetValue<TimeSpan>(nameof(Interval)); } } /// <summary> /// Risk-rule, tracking trade price. /// </summary> [DisplayNameLoc(LocalizedStrings.Str672Key)] [DescriptionLoc(LocalizedStrings.Str880Key)] public class RiskTradePriceRule : RiskRule { private decimal _price; /// <summary> /// Trade price. /// </summary> [DisplayNameLoc(LocalizedStrings.PriceKey)] [DescriptionLoc(LocalizedStrings.Str147Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Price { get => _price; set { _price = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _price.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.Execution) return false; var execMsg = (ExecutionMessage)message; if (!execMsg.HasTradeInfo()) return false; return execMsg.TradePrice >= Price; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Price), Price); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Price = storage.GetValue<decimal>(nameof(Price)); } } /// <summary> /// Risk-rule, tracking trade volume. /// </summary> [DisplayNameLoc(LocalizedStrings.Str664Key)] [DescriptionLoc(LocalizedStrings.Str881Key)] public class RiskTradeVolumeRule : RiskRule { private decimal _volume; /// <summary> /// Trade volume. /// </summary> [DisplayNameLoc(LocalizedStrings.VolumeKey)] [DescriptionLoc(LocalizedStrings.Str882Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public decimal Volume { get => _volume; set { if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _volume = value; UpdateTitle(); } } /// <inheritdoc /> protected override string GetTitle() => _volume.To<string>(); /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.Execution) return false; var execMsg = (ExecutionMessage)message; if (!execMsg.HasTradeInfo()) return false; return execMsg.TradeVolume >= Volume; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Volume), Volume); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Volume = storage.GetValue<decimal>(nameof(Volume)); } } /// <summary> /// Risk-rule, tracking orders execution frequency. /// </summary> [DisplayNameLoc(LocalizedStrings.Str883Key)] [DescriptionLoc(LocalizedStrings.Str884Key)] public class RiskTradeFreqRule : RiskRule { private DateTimeOffset? _endTime; private int _current; /// <inheritdoc /> protected override string GetTitle() => Count + " -> " + Interval; private int _count; /// <summary> /// Number of trades. /// </summary> [DisplayNameLoc(LocalizedStrings.Str878Key)] [DescriptionLoc(LocalizedStrings.Str232Key, true)] [CategoryLoc(LocalizedStrings.GeneralKey)] public int Count { get => _count; set { if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _count = value; UpdateTitle(); } } private TimeSpan _interval; /// <summary> /// Interval, during which trades quantity will be monitored. /// </summary> [DisplayNameLoc(LocalizedStrings.Str175Key)] [DescriptionLoc(LocalizedStrings.Str885Key)] [CategoryLoc(LocalizedStrings.GeneralKey)] public TimeSpan Interval { get => _interval; set { if (value < TimeSpan.Zero) throw new ArgumentOutOfRangeException(nameof(value), value, LocalizedStrings.Str1219); _interval = value; UpdateTitle(); } } /// <inheritdoc /> public override void Reset() { base.Reset(); _current = 0; _endTime = null; } /// <inheritdoc /> public override bool ProcessMessage(Message message) { if (message.Type != MessageTypes.Execution) return false; var execMsg = (ExecutionMessage)message; if (!execMsg.HasTradeInfo()) return false; var time = message.LocalTime; if (time.IsDefault()) { this.AddWarningLog("Time is null. Msg={0}", message); return false; } if (_endTime == null) { _endTime = time + Interval; _current = 1; this.AddDebugLog("EndTime={0}", _endTime); return false; } if (time < _endTime) { _current++; this.AddDebugLog("Count={0} Msg={1}", _current, message); if (_current >= Count) { this.AddInfoLog("Count={0} EndTime={1}", _current, _endTime); _endTime = null; return true; } } else { _endTime = time + Interval; _current = 1; this.AddDebugLog("EndTime={0}", _endTime); } return false; } /// <inheritdoc /> public override void Save(SettingsStorage storage) { base.Save(storage); storage.SetValue(nameof(Count), Count); storage.SetValue(nameof(Interval), Interval); } /// <inheritdoc /> public override void Load(SettingsStorage storage) { base.Load(storage); Count = storage.GetValue<int>(nameof(Count)); Interval = storage.GetValue<TimeSpan>(nameof(Interval)); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.Linq; using Analyzer.Utilities; using Analyzer.Utilities.Extensions; using Analyzer.Utilities.FlowAnalysis.Analysis.PropertySetAnalysis; using Analyzer.Utilities.PooledObjects; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.FlowAnalysis.DataFlow; using Microsoft.CodeAnalysis.FlowAnalysis.DataFlow.PointsToAnalysis; using Microsoft.CodeAnalysis.FlowAnalysis.DataFlow.ValueContentAnalysis; using Microsoft.CodeAnalysis.Operations; using Microsoft.NetCore.Analyzers.Security.Helpers; namespace Microsoft.NetCore.Analyzers.Security { using static MicrosoftNetCoreAnalyzersResources; [DiagnosticAnalyzer(LanguageNames.CSharp, LanguageNames.VisualBasic)] public sealed class DoNotInstallRootCert : DiagnosticAnalyzer { internal static readonly DiagnosticDescriptor DefinitelyInstallRootCertRule = SecurityHelpers.CreateDiagnosticDescriptor( "CA5380", nameof(DefinitelyInstallRootCert), nameof(DefinitelyInstallRootCertMessage), RuleLevel.Disabled, isPortedFxCopRule: false, isDataflowRule: true, isReportedAtCompilationEnd: true, descriptionResourceStringName: nameof(DoNotInstallRootCertDescription)); internal static readonly DiagnosticDescriptor MaybeInstallRootCertRule = SecurityHelpers.CreateDiagnosticDescriptor( "CA5381", nameof(MaybeInstallRootCert), nameof(MaybeInstallRootCertMessage), RuleLevel.Disabled, isPortedFxCopRule: false, isDataflowRule: true, isReportedAtCompilationEnd: true, descriptionResourceStringName: nameof(DoNotInstallRootCertDescription)); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get; } = ImmutableArray.Create( DefinitelyInstallRootCertRule, MaybeInstallRootCertRule); private static readonly PropertyMapperCollection PropertyMappers = new( new PropertyMapper( "...dummy name", // There isn't *really* a property for what we're tracking; just the constructor argument. (PointsToAbstractValue v) => PropertySetAbstractValueKind.Unknown)); private static HazardousUsageEvaluationResult HazardousUsageCallback(IMethodSymbol methodSymbol, PropertySetAbstractValue propertySetAbstractValue) { return propertySetAbstractValue[0] switch { PropertySetAbstractValueKind.Flagged => HazardousUsageEvaluationResult.Flagged, PropertySetAbstractValueKind.MaybeFlagged => HazardousUsageEvaluationResult.MaybeFlagged, _ => HazardousUsageEvaluationResult.Unflagged, }; } public override void Initialize(AnalysisContext context) { context.EnableConcurrentExecution(); // Security analyzer - analyze and report diagnostics on generated code. context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.Analyze | GeneratedCodeAnalysisFlags.ReportDiagnostics); HazardousUsageEvaluatorCollection hazardousUsageEvaluators = new HazardousUsageEvaluatorCollection( new HazardousUsageEvaluator("Add", HazardousUsageCallback)); context.RegisterCompilationStartAction( (CompilationStartAnalysisContext compilationStartAnalysisContext) => { var wellKnownTypeProvider = WellKnownTypeProvider.GetOrCreate(compilationStartAnalysisContext.Compilation); if (!wellKnownTypeProvider.TryGetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemSecurityCryptographyX509CertificatesX509Store, out var x509TypeSymbol)) { return; } if (!wellKnownTypeProvider.TryGetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemSecurityCryptographyX509CertificatesStoreName, out var storeNameTypeSymbol)) { return; } // If X509Store is initialized with Root store, then that instance is flagged. var constructorMapper = new ConstructorMapper( (IMethodSymbol constructorMethod, IReadOnlyList<ValueContentAbstractValue> argumentValueContentAbstractValues, IReadOnlyList<PointsToAbstractValue> argumentPointsToAbstractValues) => { var kind = PropertySetAbstractValueKind.Unflagged; if (!constructorMethod.Parameters.IsEmpty) { if (constructorMethod.Parameters[0].Type.Equals(storeNameTypeSymbol)) { kind = PropertySetCallbacks.EvaluateLiteralValues(argumentValueContentAbstractValues[0], o => o != null && o.Equals(6)); } else if (constructorMethod.Parameters[0].Type.SpecialType == SpecialType.System_String) { kind = PropertySetCallbacks.EvaluateLiteralValues( argumentValueContentAbstractValues[0], s => s != null && string.Equals(s.ToString(), "root", StringComparison.OrdinalIgnoreCase)); } } return PropertySetAbstractValue.GetInstance(kind); }); var rootOperationsNeedingAnalysis = PooledHashSet<(IOperation, ISymbol)>.GetInstance(); compilationStartAnalysisContext.RegisterOperationBlockStartAction( (OperationBlockStartAnalysisContext operationBlockStartAnalysisContext) => { var owningSymbol = operationBlockStartAnalysisContext.OwningSymbol; // TODO: Handle case when exactly one of the below rules is configured to skip analysis. if (operationBlockStartAnalysisContext.Options.IsConfiguredToSkipAnalysis(DefinitelyInstallRootCertRule, owningSymbol, operationBlockStartAnalysisContext.Compilation) && operationBlockStartAnalysisContext.Options.IsConfiguredToSkipAnalysis(MaybeInstallRootCertRule, owningSymbol, operationBlockStartAnalysisContext.Compilation)) { return; } operationBlockStartAnalysisContext.RegisterOperationAction( (OperationAnalysisContext operationAnalysisContext) => { var invocationOperation = (IInvocationOperation)operationAnalysisContext.Operation; if (x509TypeSymbol.Equals(invocationOperation.Instance?.Type) && invocationOperation.TargetMethod.Name == "Add") { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add((invocationOperation.GetRoot(), operationAnalysisContext.ContainingSymbol)); } } }, OperationKind.Invocation); operationBlockStartAnalysisContext.RegisterOperationAction( (OperationAnalysisContext operationAnalysisContext) => { var argumentOperation = (IArgumentOperation)operationAnalysisContext.Operation; if (x509TypeSymbol.Equals(argumentOperation.Parameter.Type)) { lock (rootOperationsNeedingAnalysis) { rootOperationsNeedingAnalysis.Add((argumentOperation.GetRoot(), operationAnalysisContext.ContainingSymbol)); } } }, OperationKind.Argument); }); compilationStartAnalysisContext.RegisterCompilationEndAction( (CompilationAnalysisContext compilationAnalysisContext) => { PooledDictionary<(Location Location, IMethodSymbol? Method), HazardousUsageEvaluationResult>? allResults = null; try { lock (rootOperationsNeedingAnalysis) { if (!rootOperationsNeedingAnalysis.Any()) { return; } allResults = PropertySetAnalysis.BatchGetOrComputeHazardousUsages( compilationAnalysisContext.Compilation, rootOperationsNeedingAnalysis, compilationAnalysisContext.Options, WellKnownTypeNames.SystemSecurityCryptographyX509CertificatesX509Store, constructorMapper, PropertyMappers, hazardousUsageEvaluators, InterproceduralAnalysisConfiguration.Create( compilationAnalysisContext.Options, SupportedDiagnostics, rootOperationsNeedingAnalysis.First().Item1, compilationAnalysisContext.Compilation, defaultInterproceduralAnalysisKind: InterproceduralAnalysisKind.ContextSensitive)); } if (allResults == null) { return; } foreach (KeyValuePair<(Location Location, IMethodSymbol? Method), HazardousUsageEvaluationResult> kvp in allResults) { DiagnosticDescriptor descriptor; switch (kvp.Value) { case HazardousUsageEvaluationResult.Flagged: descriptor = DefinitelyInstallRootCertRule; break; case HazardousUsageEvaluationResult.MaybeFlagged: descriptor = MaybeInstallRootCertRule; break; default: Debug.Fail($"Unhandled result value {kvp.Value}"); continue; } RoslynDebug.Assert(kvp.Key.Method != null); // HazardousUsageEvaluations only for invocations. compilationAnalysisContext.ReportDiagnostic( Diagnostic.Create( descriptor, kvp.Key.Location, kvp.Key.Method.ToDisplayString( SymbolDisplayFormat.MinimallyQualifiedFormat))); } } finally { rootOperationsNeedingAnalysis.Free(compilationAnalysisContext.CancellationToken); allResults?.Free(compilationAnalysisContext.CancellationToken); } }); }); } } }
namespace ModuleZeroSampleProject.Migrations { using System; using System.Collections.Generic; using System.Data.Entity.Infrastructure.Annotations; using System.Data.Entity.Migrations; public partial class Create_AbpZero_Tables : DbMigration { public override void Up() { CreateTable( "dbo.AbpPermissions", c => new { Id = c.Long(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 128), IsGranted = c.Boolean(nullable: false), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), RoleId = c.Int(), UserId = c.Long(), Discriminator = c.String(nullable: false, maxLength: 128), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.UserId, cascadeDelete: true) .ForeignKey("dbo.AbpRoles", t => t.RoleId, cascadeDelete: true) .Index(t => t.RoleId) .Index(t => t.UserId); CreateTable( "dbo.AbpRoles", c => new { Id = c.Int(nullable: false, identity: true), TenantId = c.Int(), Name = c.String(), DisplayName = c.String(), LastModificationTime = c.DateTime(), LastModifierUserId = c.Long(), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.CreatorUserId) .ForeignKey("dbo.AbpUsers", t => t.LastModifierUserId) .ForeignKey("dbo.AbpTenants", t => t.TenantId) .Index(t => t.TenantId) .Index(t => t.LastModifierUserId) .Index(t => t.CreatorUserId); CreateTable( "dbo.AbpUsers", c => new { Id = c.Long(nullable: false, identity: true), TenantId = c.Int(), Name = c.String(nullable: false, maxLength: 30), Surname = c.String(nullable: false, maxLength: 30), UserName = c.String(nullable: false, maxLength: 32), Password = c.String(nullable: false, maxLength: 100), EmailAddress = c.String(nullable: false, maxLength: 100), IsEmailConfirmed = c.Boolean(nullable: false), EmailConfirmationCode = c.String(maxLength: 16), PasswordResetCode = c.String(maxLength: 32), LastLoginTime = c.DateTime(), IsDeleted = c.Boolean(nullable: false), DeleterUserId = c.Long(), DeletionTime = c.DateTime(), LastModificationTime = c.DateTime(), LastModifierUserId = c.Long(), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), }, annotations: new Dictionary<string, object> { { "Abp_SoftDelete", "True" }, }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.CreatorUserId) .ForeignKey("dbo.AbpUsers", t => t.DeleterUserId) .ForeignKey("dbo.AbpUsers", t => t.LastModifierUserId) .ForeignKey("dbo.AbpTenants", t => t.TenantId) .Index(t => t.TenantId) .Index(t => t.DeleterUserId) .Index(t => t.LastModifierUserId) .Index(t => t.CreatorUserId); CreateTable( "dbo.AbpUserLogins", c => new { Id = c.Long(nullable: false, identity: true), UserId = c.Long(nullable: false), LoginProvider = c.String(), ProviderKey = c.String(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.UserId, cascadeDelete: true) .Index(t => t.UserId); CreateTable( "dbo.AbpUserRoles", c => new { Id = c.Long(nullable: false, identity: true), UserId = c.Long(nullable: false), RoleId = c.Int(nullable: false), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.UserId, cascadeDelete: true) .Index(t => t.UserId); CreateTable( "dbo.AbpSettings", c => new { Id = c.Long(nullable: false, identity: true), TenantId = c.Int(), UserId = c.Long(), Name = c.String(), Value = c.String(), LastModificationTime = c.DateTime(), LastModifierUserId = c.Long(), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.UserId) .ForeignKey("dbo.AbpTenants", t => t.TenantId) .Index(t => t.TenantId) .Index(t => t.UserId); CreateTable( "dbo.AbpTenants", c => new { Id = c.Int(nullable: false, identity: true), TenancyName = c.String(), Name = c.String(), LastModificationTime = c.DateTime(), LastModifierUserId = c.Long(), CreationTime = c.DateTime(nullable: false), CreatorUserId = c.Long(), }) .PrimaryKey(t => t.Id) .ForeignKey("dbo.AbpUsers", t => t.CreatorUserId) .ForeignKey("dbo.AbpUsers", t => t.LastModifierUserId) .Index(t => t.LastModifierUserId) .Index(t => t.CreatorUserId); } public override void Down() { DropForeignKey("dbo.AbpRoles", "TenantId", "dbo.AbpTenants"); DropForeignKey("dbo.AbpPermissions", "RoleId", "dbo.AbpRoles"); DropForeignKey("dbo.AbpRoles", "LastModifierUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpRoles", "CreatorUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUsers", "TenantId", "dbo.AbpTenants"); DropForeignKey("dbo.AbpSettings", "TenantId", "dbo.AbpTenants"); DropForeignKey("dbo.AbpTenants", "LastModifierUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpTenants", "CreatorUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpSettings", "UserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUserRoles", "UserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpPermissions", "UserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUserLogins", "UserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUsers", "LastModifierUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUsers", "DeleterUserId", "dbo.AbpUsers"); DropForeignKey("dbo.AbpUsers", "CreatorUserId", "dbo.AbpUsers"); DropIndex("dbo.AbpTenants", new[] { "CreatorUserId" }); DropIndex("dbo.AbpTenants", new[] { "LastModifierUserId" }); DropIndex("dbo.AbpSettings", new[] { "UserId" }); DropIndex("dbo.AbpSettings", new[] { "TenantId" }); DropIndex("dbo.AbpUserRoles", new[] { "UserId" }); DropIndex("dbo.AbpUserLogins", new[] { "UserId" }); DropIndex("dbo.AbpUsers", new[] { "CreatorUserId" }); DropIndex("dbo.AbpUsers", new[] { "LastModifierUserId" }); DropIndex("dbo.AbpUsers", new[] { "DeleterUserId" }); DropIndex("dbo.AbpUsers", new[] { "TenantId" }); DropIndex("dbo.AbpRoles", new[] { "CreatorUserId" }); DropIndex("dbo.AbpRoles", new[] { "LastModifierUserId" }); DropIndex("dbo.AbpRoles", new[] { "TenantId" }); DropIndex("dbo.AbpPermissions", new[] { "UserId" }); DropIndex("dbo.AbpPermissions", new[] { "RoleId" }); DropTable("dbo.AbpTenants"); DropTable("dbo.AbpSettings"); DropTable("dbo.AbpUserRoles"); DropTable("dbo.AbpUserLogins"); DropTable("dbo.AbpUsers", removedAnnotations: new Dictionary<string, object> { { "Abp_SoftDelete", "True" }, }); DropTable("dbo.AbpRoles"); DropTable("dbo.AbpPermissions"); } } }
/* Copyright 2010 Google Inc Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* Change history * Oct 13 2008 Joe Feser joseph.feser@gmail.com * Converted List<object>s and other .NET 1.1 collections to use Generics * Combined IExtensionElement and IExtensionElementFactory interfaces * */ using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Text; using System.Text.RegularExpressions; using System.Xml; using System.Reflection; using Google.Apis.Util; namespace Google.Apis { /// <summary> String utilities. </summary> public static class Utilities { /// <summary> True when the Mono-runtime is used to execute this code. </summary> public static readonly bool IsRunningOnMono = (Type.GetType("Mono.Runtime") != null); /// <summary> xsd version of bool:true. </summary> public const string XSDTrue = "true"; /// <summary> xsd version of bool:false. </summary> public const string XSDFalse = "false"; /// <summary>returns a blank emptyDate. That's the default for an empty string date</summary> public static DateTime EmptyDate { get { // that's the blank value you get when setting a DateTime to an empty string inthe property browswer return new DateTime(1, 1, 1); } } /// <summary> Helper to read in a string and Encode it. </summary> /// <param name="content">the xmlreader string</param> /// <returns>UTF8 encoded string</returns> public static string EncodeString(string content) { // get the encoding Encoding utf8Encoder = Encoding.UTF8; Byte[] utf8Bytes = EncodeStringToUtf8(content); char[] utf8Chars = new char[utf8Encoder.GetCharCount(utf8Bytes, 0, utf8Bytes.Length)]; utf8Encoder.GetChars(utf8Bytes, 0, utf8Bytes.Length, utf8Chars, 0); String utf8String = new String(utf8Chars); return utf8String; } /// <summary> /// returns you a bytearray of UTF8 bytes from the string passed in /// the passed in string is assumed to be UTF16 /// </summary> /// <param name="content">UTF16 string</param> /// <returns>utf 8 byte array</returns> public static Byte[] EncodeStringToUtf8(string content) { // get the encoding Encoding utf8Encoder = Encoding.UTF8; Encoding utf16Encoder = Encoding.Unicode; Byte[] bytes = utf16Encoder.GetBytes(content); Byte[] utf8Bytes = Encoding.Convert(utf16Encoder, utf8Encoder, bytes); return utf8Bytes; } /// <summary>helper to read in a string and Encode it according to /// RFC 5023 rules for slugheaders</summary> /// <param name="slug">the Unicode string for the slug header</param> /// <returns>ASCII encoded string</returns> public static string EncodeSlugHeader(string slug) { if (slug == null) { return ""; } Byte[] bytes = EncodeStringToUtf8(slug); if (bytes == null) { return ""; } StringBuilder returnString = new StringBuilder(256); foreach (byte b in bytes) { if ((b < 0x20) || (b == 0x25) || (b > 0x7E)) { returnString.AppendFormat(CultureInfo.InvariantCulture, "%{0:X}", b); } else { returnString.Append((char)b); } } return returnString.ToString(); } /// <summary> /// tests an etag for weakness. returns TRUE for weak etags and for null strings /// </summary> /// <param name="eTag"></param> /// <returns></returns> public static bool IsWeakETag(string eTag) { if (eTag == null) { return true; } if (eTag.StartsWith("W/")) { return true; } return false; } /// <summary>Method to output just the date portion as string</summary> /// <param name="dateTime">the DateTime object to output as a string</param> /// <returns>an rfc-3339 string</returns> public static string LocalDateInUTC(DateTime dateTime) { // Add "full-date T partial-time" return dateTime.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture); } /// <summary>Helper method to format a TimeSpan as a string compliant with the "time-offset" format defined in RFC-3339</summary> /// <param name="spanFromUtc">the TimeSpan to format</param> /// <returns></returns> public static string FormatTimeOffset(TimeSpan spanFromUtc) { // Simply return "Z" if there is no offset if (spanFromUtc == TimeSpan.Zero) { return "Z"; } // Return the numeric offset TimeSpan absoluteSpan = spanFromUtc.Duration(); if (spanFromUtc > TimeSpan.Zero) { return "+" + FormatNumOffset(absoluteSpan); } else { return "-" + FormatNumOffset(absoluteSpan); } } /// <summary>Helper method to format a TimeSpan to {HH}:{MM}</summary> /// <param name="timeSpan">the TimeSpan to format</param> /// <returns>a string in "hh:mm" format.</returns> internal static string FormatNumOffset(TimeSpan timeSpan) { return String.Format(CultureInfo.InvariantCulture, "{0:00}:{1:00}", timeSpan.Hours, timeSpan.Minutes); } ///////////////////////////////////////////////////////////////////////////// /// <summary> Returns the version of the Core library. </summary> public static string GetLibraryVersion() { return Regex.Match(typeof(Utilities).Assembly.FullName, "Version=([\\d\\.]+)").Groups[1].ToString(); } /// <summary> /// Replaces all the specified characters within the input string with the given replacement /// </summary> public static string Replace(this string input, string replace, params char[] invalidCharacters) { invalidCharacters.ThrowIfNullOrEmpty("invalidCharacters"); // Create the resulting string var result = new StringBuilder(input.Length); foreach (char c in input) { // Replace invalid characters with the replacement string if (invalidCharacters.Contains(c)) { result.Append(replace); continue; } result.Append(c); } return result.ToString(); } #region LINQ extensions /// <summary> /// Returns the enumerable with the specified element removed /// </summary> public static IEnumerable<T> Without<T>(this IEnumerable<T> enumerable, T toRemove) { return enumerable.Except(new[] { toRemove }); } /// <summary> /// Returns the enumerable with the specified element added to the end of it. /// </summary> public static IEnumerable<T> Concat<T>(this IEnumerable<T> enumerable, T toAdd) { return enumerable.Concat(new[] { toAdd }); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Text; using System.Diagnostics; namespace Internal.TypeSystem { public sealed partial class InstantiatedType : MetadataType { private MetadataType _typeDef; private Instantiation _instantiation; internal InstantiatedType(MetadataType typeDef, Instantiation instantiation) { Debug.Assert(!(typeDef is InstantiatedType)); _typeDef = typeDef; Debug.Assert(instantiation.Length > 0); _instantiation = instantiation; _baseType = this; // Not yet initialized flag } private int _hashCode; public override int GetHashCode() { if (_hashCode == 0) _hashCode = _instantiation.ComputeGenericInstanceHashCode(_typeDef.GetHashCode()); return _hashCode; } public override TypeSystemContext Context { get { return _typeDef.Context; } } public override Instantiation Instantiation { get { return _instantiation; } } private MetadataType _baseType /* = this */; private MetadataType InitializeBaseType() { var uninst = _typeDef.MetadataBaseType; return (_baseType = (uninst != null) ? (MetadataType)uninst.InstantiateSignature(_instantiation, new Instantiation()) : null); } public override DefType BaseType { get { if (_baseType == this) return InitializeBaseType(); return _baseType; } } public override MetadataType MetadataBaseType { get { if (_baseType == this) return InitializeBaseType(); return _baseType; } } protected override TypeFlags ComputeTypeFlags(TypeFlags mask) { TypeFlags flags = 0; if ((mask & TypeFlags.CategoryMask) != 0) { flags |= _typeDef.Category; } if ((mask & TypeFlags.HasGenericVarianceComputed) != 0) { flags |= TypeFlags.HasGenericVarianceComputed; if (_typeDef.HasVariance) flags |= TypeFlags.HasGenericVariance; } if ((mask & TypeFlags.HasFinalizerComputed) != 0) { flags |= TypeFlags.HasFinalizerComputed; if (_typeDef.HasFinalizer) flags |= TypeFlags.HasFinalizer; } return flags; } public override string Name { get { return _typeDef.Name; } } public override string Namespace { get { return _typeDef.Namespace; } } public override IEnumerable<MethodDesc> GetMethods() { foreach (var typicalMethodDef in _typeDef.GetMethods()) { yield return _typeDef.Context.GetMethodForInstantiatedType(typicalMethodDef, this); } } // TODO: Substitutions, generics, modopts, ... public override MethodDesc GetMethod(string name, MethodSignature signature) { MethodDesc typicalMethodDef = _typeDef.GetMethod(name, signature); if (typicalMethodDef == null) return null; return _typeDef.Context.GetMethodForInstantiatedType(typicalMethodDef, this); } public override MethodDesc GetStaticConstructor() { MethodDesc typicalCctor = _typeDef.GetStaticConstructor(); if (typicalCctor == null) return null; return _typeDef.Context.GetMethodForInstantiatedType(typicalCctor, this); } public override MethodDesc GetDefaultConstructor() { MethodDesc typicalCtor = _typeDef.GetDefaultConstructor(); if (typicalCtor == null) return null; return _typeDef.Context.GetMethodForInstantiatedType(typicalCtor, this); } public override MethodDesc GetFinalizer() { MethodDesc typicalFinalizer = _typeDef.GetFinalizer(); if (typicalFinalizer == null) return null; MetadataType typeInHierarchy = this; // Note, we go back to the type definition/typical method definition in this code. // If the finalizer is implemented on a base type that is also a generic, then the // typicalFinalizer in that case is a MethodForInstantiatedType for an instantiated type // which is instantiated over the open type variables of the derived type. while (typicalFinalizer.OwningType.GetTypeDefinition() != typeInHierarchy.GetTypeDefinition()) { typeInHierarchy = typeInHierarchy.MetadataBaseType; } if (typeInHierarchy == typicalFinalizer.OwningType) { return typicalFinalizer; } else { Debug.Assert(typeInHierarchy is InstantiatedType); return _typeDef.Context.GetMethodForInstantiatedType(typicalFinalizer.GetTypicalMethodDefinition(), (InstantiatedType)typeInHierarchy); } } public override IEnumerable<FieldDesc> GetFields() { foreach (var fieldDef in _typeDef.GetFields()) { yield return _typeDef.Context.GetFieldForInstantiatedType(fieldDef, this); } } // TODO: Substitutions, generics, modopts, ... public override FieldDesc GetField(string name) { FieldDesc fieldDef = _typeDef.GetField(name); if (fieldDef == null) return null; return _typeDef.Context.GetFieldForInstantiatedType(fieldDef, this); } public override TypeDesc InstantiateSignature(Instantiation typeInstantiation, Instantiation methodInstantiation) { TypeDesc[] clone = null; for (int i = 0; i < _instantiation.Length; i++) { TypeDesc uninst = _instantiation[i]; TypeDesc inst = uninst.InstantiateSignature(typeInstantiation, methodInstantiation); if (inst != uninst) { if (clone == null) { clone = new TypeDesc[_instantiation.Length]; for (int j = 0; j < clone.Length; j++) { clone[j] = _instantiation[j]; } } clone[i] = inst; } } return (clone == null) ? this : _typeDef.Context.GetInstantiatedType(_typeDef, new Instantiation(clone)); } /// <summary> /// Instantiate an array of TypeDescs over typeInstantiation and methodInstantiation /// </summary> public static T[] InstantiateTypeArray<T>(T[] uninstantiatedTypes, Instantiation typeInstantiation, Instantiation methodInstantiation) where T : TypeDesc { T[] clone = null; for (int i = 0; i < uninstantiatedTypes.Length; i++) { T uninst = uninstantiatedTypes[i]; TypeDesc inst = uninst.InstantiateSignature(typeInstantiation, methodInstantiation); if (inst != uninst) { if (clone == null) { clone = new T[uninstantiatedTypes.Length]; for (int j = 0; j < clone.Length; j++) { clone[j] = uninstantiatedTypes[j]; } } clone[i] = (T)inst; } } return clone != null ? clone : uninstantiatedTypes; } // Strips instantiation. E.g C<int> -> C<T> public override TypeDesc GetTypeDefinition() { return _typeDef; } public override string ToString() { var sb = new StringBuilder(_typeDef.ToString()); sb.Append('<'); sb.Append(_instantiation.ToString()); sb.Append('>'); return sb.ToString(); } // Properties that are passed through from the type definition public override ClassLayoutMetadata GetClassLayout() { return _typeDef.GetClassLayout(); } public override bool IsExplicitLayout { get { return _typeDef.IsExplicitLayout; } } public override bool IsSequentialLayout { get { return _typeDef.IsSequentialLayout; } } public override bool IsBeforeFieldInit { get { return _typeDef.IsBeforeFieldInit; } } public override bool IsModuleType { get { // The global module type cannot be generic. return false; } } public override bool IsSealed { get { return _typeDef.IsSealed; } } public override bool IsAbstract { get { return _typeDef.IsAbstract; } } public override ModuleDesc Module { get { return _typeDef.Module; } } public override bool HasCustomAttribute(string attributeNamespace, string attributeName) { return _typeDef.HasCustomAttribute(attributeNamespace, attributeName); } public override DefType ContainingType { get { // Return the result from the typical type definition. return _typeDef.ContainingType; } } public override MetadataType GetNestedType(string name) { // Return the result from the typical type definition. return _typeDef.GetNestedType(name); } public override IEnumerable<MetadataType> GetNestedTypes() { // Return the result from the typical type definition. return _typeDef.GetNestedTypes(); } } }
// Copyright (c) 2015 hugula // direct https://github.com/tenvick/hugula // using UnityEngine; using System.Collections; using System.Collections.Generic; using System; using LuaInterface; [ExecuteInEditMode] [AddComponentMenu("UGUI/ScrollRectTable")] public class ScrollRectTable : MonoBehaviour { #region public static /// <summary> /// insert data /// </summary> public static string DataInsertStr = @"return function(data,index,script) if script.data==nil then script.data={} end local lenold=#script.data table.insert(script.data,index,data) end"; /// <summary> /// remove data from table /// </summary> public static string DataRemoveStr = @"return function(data,index,script) table.remove(data,index) end"; /// <summary> /// prerender /// </summary> public static string PreRenderStr = @"return function(referScipt,index,dataItem) referScipt.name=""Pre""..tostring(index) referScipt.gameObject:SetActive(false) end"; #endregion #region public attribute public enum Direction { Down, Up, } public int scrollDirection{get;private set;} public Direction direction = Direction.Down; public RectTransform moveContainer; public ScrollRectItem tileItem;//the template item public LuaFunction onItemRender;//function(tileItemClone,index,dataItem) public LuaFunction onPreRender;//function(tileItemClone,index,dataItem) public LuaFunction onDataRemove;//function(data,index,UIPanelCamackTable) public LuaFunction onDataInsert;//function(data,index,UIPanelCamackTable) public int pageSize=5; public float renderPerFrames=1; public int recordCount // { get; private set; } public int columns = 0; public Vector2 padding = Vector2.zero; public Vector3 tileSize=new Vector3(0,0,1); public LuaTable data{ get{return _data;} set{ if(_data!=null) { foreach(var obj in this.repositionTileList) obj.gameObject.SetActive(false); } _data=value; CalcBounds(); CalcPage(); this.currFirstIndex=0; this.lastEndIndex=0; } } #endregion #region private attribute LuaTable _data;//data public int headIndex{ get; ////the camera position index private set; } public int currFirstIndex{ get;private set;}//=0;//current pageBegin data index int lastEndIndex=0;// last time render last data index int currRenderIndex=0;//current render index int lastHeadIndex=0; List<int> repositionIntList=new List<int>(); List<ScrollRectItem> repositionTileList=new List<ScrollRectItem>(); List<ScrollRectItem> preRenderList=new List<ScrollRectItem>();// Vector3 dtmove; Vector3 beginPosition; Vector3 currPosition; bool mStarted = false; bool foward=false;//panel true camera Rect rect; private Vector2 sizeDelta; #endregion #region public method public int getIndex(ScrollRectItem item) { int i=this.repositionTileList.IndexOf(item); int j=-1; if(i>=0)j=this.currFirstIndex+i; return j; } public LuaTable getDataFromIndex(int index) { return (LuaTable)data[index + 1]; } public int removeChild(ScrollRectItem item) { int i=getIndex(item); if(i>=0) { if (onDataRemove == null) onDataRemove = PLua.instance.lua.lua.LoadString(DataRemoveStr, "onDataRemove"); onDataRemove.Call(this.data,i+1,this); this.CalcPage(); } return i; } public int insertData(object item,int index) { if(index<0)index=0; if(index>=this.recordCount)index=this.recordCount; if (onDataInsert == null) onDataInsert = PLua.instance.lua.lua.LoadString(DataInsertStr, "onDataInsert"); onDataInsert.Call(item,index+1,this); this.CalcPage(); return index; } public int removeDataAt(int index) { if(index>=0 && index<this.recordCount) { if (onDataRemove == null) onDataRemove = PLua.instance.lua.lua.LoadString(DataRemoveStr, "onDataRemove"); onDataRemove.Call(data,index+1,this); this.CalcPage(); return index; } return -1; } public void clear() { foreach(var item in repositionTileList) { item.gameObject.SetActive(false); } } public void scrollTo(int index) { Vector3 currPos=moveContainer.transform.localPosition; if(index<0)index=0; if(columns==0) { float x=index*rect.width; currPos.x=beginPosition.x-x; currPos.y=beginPosition.y; currPos.z=beginPosition.z; }else if(columns>0) { float y=((int)((float)index/(float)columns))*rect.height; currPos.x=beginPosition.x; currPos.z=beginPosition.z; if(this.direction==Direction.Down) currPos.y=Math.Abs(beginPosition.y+y+this.padding.y);//pos.y=-(rect.height*y+ this.padding.y); else currPos.y=beginPosition.y-y-this.padding.y;//pos.y=(rect.height*y+ this.padding.y); } moveContainer.localPosition = currPos; // SpringPanel.Begin(moveContainer,currPos,13f); } /// <summary> /// Refresh the form give begin data Index. /// </summary> /// <param name='begin'> /// Begin. /// </param> public void Refresh(int begin=-1,int end=-1) { if(!mStarted)return; int bg=0,ed=0; if(begin<0) { bg=0;//Debug.Log("Refresh 0 ");calc ed=this.pageSize; if(moveContainer!=null) moveContainer.transform.localPosition=this.beginPosition; scroll(0,true); } else { bg=begin; if (end==-1) end=this.currFirstIndex+this.pageSize; refresh(bg,end); } } /// <summary> /// Refresh the specified item's position. /// </summary> /// <param name='item'> /// Item. /// </param> public void Refresh(ScrollRectItem item) { int i=getIndex(item); if(i>=0) { i=i+currFirstIndex; preRefresh(i); } } #endregion #region private method void CalcPage() { if(this._data!=null) { recordCount=this._data.Values.Count; } else { recordCount=0; } SetRangeSymbol(); } void SetRangeSymbol() { if(moveContainer!=null) { var delt=moveContainer.sizeDelta; if(columns<=0) delt.x = recordCount*rect.width+this.padding.x*2; else { int y=(recordCount)/columns+1; int x=(recordCount) % columns; if(this.direction==Direction.Down) delt.y=(rect.height*y+ this.padding.y*2); else delt.y=-(rect.height*y+ this.padding.y*2); // delt.x=rect.width*x+this.padding.x*2; } moveContainer.sizeDelta=delt; sizeDelta = delt; } } void CalcLastEndIndex() { int last=this.currFirstIndex+this.pageSize-1; if(last>=this.recordCount)last=recordCount-1; this.lastEndIndex=last; } void CalcBounds() { if( tileItem ) { float wi,he; RectTransform rectTrans =tileItem.rectTransform; var size=rectTrans.sizeDelta; wi=tileSize.x<=0?size.x:tileSize.x; he=tileSize.y<=0?size.y:tileSize.y; rect = new Rect(0,0,wi+padding.x,he+padding.y); } } float renderframs=0; void renderItem() { if(renderPerFrames<1) { renderframs+=renderPerFrames; if(renderframs>=1) { renderframs=0; render(); } }else { for(int i=0;i<this.renderPerFrames;i++) { render(); } } } void render() { if(this.preRenderList.Count>0) { ScrollRectItem item=preRenderList[0]; currRenderIndex=this.repositionIntList[0]; preRenderList.RemoveAt(0); repositionIntList.RemoveAt(0); if(currRenderIndex+1<=recordCount) { if(onItemRender!=null)onItemRender.Call(item,currRenderIndex,data[currRenderIndex+1]); } } } void setPosition(Transform trans,int index) { if(trans.parent!=this.transform)trans.SetParent(this.transform); Vector3 pos=Vector3.zero; if(this.columns==0) { pos.x=rect.width*index+rect.width*.5f+this.padding.x; if(this.direction==Direction.Down) pos.y=-(this.padding.y+rect.height*.5f); else pos.y=this.padding.y+rect.height*.5f; } else { int y=index/columns; int x=index % columns; if(this.direction==Direction.Down) pos.y=-(rect.height*y+ this.padding.y+rect.height*.5f); else pos.y=rect.height*y+ this.padding.y+rect.height*.5f; pos.x=rect.width*x+rect.width*.5f+this.padding.x; } trans.localPosition=pos; } void preRender(ScrollRectItem item,int index) { setPosition(item.transform,index); if (this.onPreRender == null) onPreRender = PLua.instance.lua.lua.LoadString(PreRenderStr, "onPreRenderStr"); object dataI=index+1<=recordCount?data[index+1]:null; onPreRender.Call(item,index,dataI); } void preRefresh(int i) { if(i>=0) { int Cindex=i-this.currFirstIndex; if(Cindex>=0) { if(repositionTileList.Count<this.pageSize) { GameObject obj =this.tileItem.gameObject; GameObject clone=(GameObject)GameObject.Instantiate(obj); ScrollRectItem cloneRefer=clone.GetComponent<ScrollRectItem>(); repositionTileList.Add(cloneRefer); Vector3 scale=clone.transform.localScale; clone.transform.SetParent(this.transform); clone.transform.localScale=scale; this.lastEndIndex=i; } if(Cindex<this.pageSize) { ScrollRectItem tile=repositionTileList[Cindex]; this.preRenderList.Add(tile); repositionIntList.Add(i); scrollDirection=0; preRender(tile,i);//Debug.Log(String.Format("preRefresh:{0}",i)); } } } } void preLeftUp(int i) { if(i>=this.pageSize && !repositionIntList.Contains(i) && i<this.recordCount) { ScrollRectItem tile=repositionTileList[0]; repositionTileList.RemoveAt(0);//remove first repositionTileList.Add(tile);//to end this.preRenderList.Add(tile);//add to preRenderList repositionIntList.Add(i);//add data index currFirstIndex++; if(currFirstIndex+pageSize>recordCount)currFirstIndex=recordCount-this.pageSize; this.lastEndIndex=i;//recorde last render data index scrollDirection=1; preRender(tile,i);//call preRender,set Postion } } void preRightDown(int i) { if(i>=0 && !repositionIntList.Contains(i) && i+pageSize<=recordCount) //i>pageSize)// { int end1=repositionTileList.Count-1; ScrollRectItem tile=repositionTileList[end1]; repositionTileList.RemoveAt(end1);//remove end repositionTileList.Insert(0,tile);//to first this.preRenderList.Add(tile);//add to preRenderList repositionIntList.Add(i);//add data index currFirstIndex--; if(currFirstIndex<0)currFirstIndex=0; CalcLastEndIndex(); scrollDirection=-1; preRender(tile,i); } } void scroll(int newHead ,bool force) { if(newHead<0)newHead=0; int step=newHead-currFirstIndex; if((step!=0 && this.headIndex!=lastHeadIndex) || force) { if(step>0) { int begin=lastEndIndex+1; int end=begin+step; for(int i=begin;i<end;i++) { preLeftUp(i); } }else if(step<0) { int begin=currFirstIndex-1; int end=begin+step; if(begin<0)begin=0; for(int i=begin;i>end;i--) { preRightDown(i); } }else { scrollDirection=0; int begin=newHead;//lastHeadIndex; int end=begin+this.pageSize; if(end>this.recordCount)end=recordCount; for(int i=begin;i<end;i++)preRefresh(i); if(begin==0) { CalcLastEndIndex(); } } } } void refresh(int begin,int end) { for(int i=begin;i<=end;i++) { if(i>=this.currFirstIndex)preRefresh(i); } } #endregion /// <summary> /// Position the grid's contents when the script starts. /// </summary> /// void Start () { mStarted = true; if (moveContainer == null) moveContainer = this.GetComponent<RectTransform> (); if (moveContainer!=null) { Vector3 bg=moveContainer.transform.localPosition; if(direction== Direction.Down) beginPosition=new Vector3(bg.x,bg.y,bg.z); else beginPosition=new Vector3(bg.x,-bg.y,bg.z); foward = false; } CalcBounds(); scroll(0,true); } void Update() { if(moveContainer!=null && data!=null) { currPosition=moveContainer.localPosition; dtmove=beginPosition-currPosition; if(columns==0 ) { headIndex=(int)(dtmove.x/rect.width); scroll(headIndex,false); }else if(columns>0) { int cloumnIndex=(int)(dtmove.y/rect.height); headIndex= (int) Mathf.Ceil((float)(Mathf.Abs(cloumnIndex)*this.columns)/(float)this.columns)*columns;// if(headIndex!=lastHeadIndex) { scroll(headIndex,false); } } lastHeadIndex=headIndex; } } /// <summary> /// Is it time to reposition? Do so now. /// </summary> void LateUpdate () { if(this.repositionIntList.Count>0)renderItem(); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Text; using System.Reflection; using System.Collections.Generic; using Debug = System.Diagnostics.Debug; using global::Internal.Metadata.NativeFormat; namespace Internal.Runtime.TypeLoader { internal static class MetadataNameExtentions { public static string GetFullName(this Handle handle, MetadataReader reader) { switch (handle.HandleType) { case HandleType.TypeDefinition: return handle.ToTypeDefinitionHandle(reader).GetFullName(reader); case HandleType.TypeReference: return handle.ToTypeReferenceHandle(reader).GetFullName(reader); case HandleType.NamespaceDefinition: return handle.ToNamespaceDefinitionHandle(reader).GetFullName(reader); case HandleType.NamespaceReference: return handle.ToNamespaceReferenceHandle(reader).GetFullName(reader); case HandleType.TypeSpecification: return handle.ToTypeSpecificationHandle(reader).GetFullName(reader); case HandleType.TypeInstantiationSignature: return handle.ToTypeInstantiationSignatureHandle(reader).GetFullName(reader); case HandleType.ArraySignature: return handle.ToArraySignatureHandle(reader).GetFullName(reader); case HandleType.SZArraySignature: return handle.ToSZArraySignatureHandle(reader).GetFullName(reader); case HandleType.PointerSignature: return handle.ToPointerSignatureHandle(reader).GetFullName(reader); case HandleType.ByReferenceSignature: return handle.ToByReferenceSignatureHandle(reader).GetFullName(reader); case HandleType.ScopeDefinition: return handle.ToScopeDefinitionHandle(reader).GetFullName(reader); case HandleType.ScopeReference: return handle.ToScopeReferenceHandle(reader).GetFullName(reader); } return null; } public static string GetFullName(this ByReferenceSignatureHandle handle, MetadataReader reader) { var result = handle.GetByReferenceSignature(reader).Type.GetFullName(reader); if (result == null) return null; return result + "&"; } public static string GetFullName(this PointerSignatureHandle handle, MetadataReader reader) { var result = handle.GetPointerSignature(reader).Type.GetFullName(reader); if (result == null) return null; return result + "*"; } public static string GetFullName(this ArraySignatureHandle handle, MetadataReader reader) { ArraySignature array = handle.GetArraySignature(reader); var result = array.ElementType.GetFullName(reader); if (result == null) return null; return result + "[" + (new string(',', array.Rank - 1)) + "]"; } public static string GetFullName(this SZArraySignatureHandle handle, MetadataReader reader) { var result = handle.GetSZArraySignature(reader).ElementType.GetFullName(reader); if (result == null) return null; return result + "[]"; } public static string GetFullName(this TypeSpecificationHandle typeSpecHandle, MetadataReader reader) { var typeSpec = typeSpecHandle.GetTypeSpecification(reader); if (typeSpec.Signature.IsNull(reader)) return null; return typeSpec.Signature.GetFullName(reader); } public static string GetFullName(this TypeInstantiationSignatureHandle typeInstSigHandle, MetadataReader reader) { var typeInstSig = typeInstSigHandle.GetTypeInstantiationSignature(reader); if (typeInstSig.GenericType.IsNull(reader)) return null; var name = typeInstSig.GenericType.GetFullName(reader); if (name == null) return null; var index = 0; string argsString = null; foreach (var argHandle in typeInstSig.GenericTypeArguments) { if (index > 0) argsString += ","; var argName = argHandle.GetFullName(reader); if (argName == null) return name; argsString += argName; index++; } return name + "<" + argsString + ">"; } public static void GetFullName(this TypeDefinitionHandle typeDefHandle, MetadataReader reader, out string name, out string enclosing, out string nspace) { var typeDef = typeDefHandle.GetTypeDefinition(reader); Debug.Assert(!typeDef.Name.IsNull(reader)); name = typeDef.Name.GetConstantStringValue(reader).Value; enclosing = typeDef.EnclosingType.IsNull(reader) ? null : typeDef.EnclosingType.GetFullName(reader); nspace = typeDef.NamespaceDefinition.IsNull(reader) ? null : typeDef.NamespaceDefinition.GetFullName(reader); } public static string GetFullName(this TypeDefinitionHandle typeDefHandle, MetadataReader reader) { string name; string enclosing; string nspace; typeDefHandle.GetFullName(reader, out name, out enclosing, out nspace); if (enclosing != null && name != null) return enclosing + "+" + name; else if (nspace != null && name != null) return nspace + "." + name; return name; } public static string GetContainingModuleName(this TypeDefinitionHandle typeDefHandle, MetadataReader reader) { var typeDef = typeDefHandle.GetTypeDefinition(reader); Handle currentHandle = !typeDef.EnclosingType.IsNull(reader) ? (Handle)typeDef.EnclosingType : (Handle)typeDef.NamespaceDefinition; Debug.Assert(!currentHandle.IsNull(reader)); while (!currentHandle.IsNull(reader)) { switch (currentHandle.HandleType) { case HandleType.TypeDefinition: typeDef = currentHandle.ToTypeDefinitionHandle(reader).GetTypeDefinition(reader); currentHandle = !typeDef.EnclosingType.IsNull(reader) ? (Handle)typeDef.EnclosingType : (Handle)typeDef.NamespaceDefinition; break; case HandleType.NamespaceDefinition: currentHandle = currentHandle.ToNamespaceDefinitionHandle(reader).GetNamespaceDefinition(reader).ParentScopeOrNamespace; break; case HandleType.ScopeDefinition: return currentHandle.GetFullName(reader); default: return "?"; } } return "?"; } public static string GetFullName(this NamespaceDefinitionHandle namespaceHandle, MetadataReader reader) { var nspace = namespaceHandle.GetNamespaceDefinition(reader); if (nspace.Name.IsNull(reader)) return null; var name = nspace.Name.GetConstantStringValue(reader).Value; var containingNamespace = nspace.ParentScopeOrNamespace.IsNull(reader) ? null : nspace.ParentScopeOrNamespace.GetFullName(reader); if (containingNamespace != null) return containingNamespace + "." + name; return name; } public static void GetFullName(this TypeReferenceHandle typeRefHandle, MetadataReader reader, out string name, out string enclosing, out string nspace) { var typeRef = typeRefHandle.GetTypeReference(reader); Debug.Assert(!typeRef.TypeName.IsNull(reader)); name = typeRef.TypeName.GetConstantStringValue(reader).Value; enclosing = typeRef.ParentNamespaceOrType.HandleType == HandleType.TypeReference ? typeRef.ParentNamespaceOrType.GetFullName(reader) : null; nspace = typeRef.ParentNamespaceOrType.HandleType == HandleType.NamespaceReference ? typeRef.ParentNamespaceOrType.GetFullName(reader) : null; } public static string GetFullName(this TypeReferenceHandle typeRefHandle, MetadataReader reader) { string name; string enclosing; string nspace; typeRefHandle.GetFullName(reader, out name, out enclosing, out nspace); if (enclosing != null && name != null) return enclosing + "+" + name; else if (nspace != null && name != null) return nspace + "." + name; return name; } public static string GetContainingModuleName(this TypeReferenceHandle typeRefHandle, MetadataReader reader) { var typeRef = typeRefHandle.GetTypeReference(reader); Handle currentHandle = typeRef.ParentNamespaceOrType; Debug.Assert(!currentHandle.IsNull(reader)); while (!currentHandle.IsNull(reader)) { switch(currentHandle.HandleType) { case HandleType.TypeReference: case HandleType.NamespaceReference: currentHandle = typeRef.ParentNamespaceOrType; break; case HandleType.ScopeReference: return currentHandle.GetFullName(reader); default: return "?"; } } return "?"; } public static string GetFullName(this NamespaceReferenceHandle namespaceHandle, MetadataReader reader) { var nspace = namespaceHandle.GetNamespaceReference(reader); if (nspace.Name.IsNull(reader)) return null; var name = nspace.Name.GetConstantStringValue(reader).Value; var containingNamespace = nspace.ParentScopeOrNamespace.IsNull(reader) ? null : nspace.ParentScopeOrNamespace.GetFullName(reader); if (containingNamespace != null) return containingNamespace + "." + name; return name; } public static string GetFullName(this ScopeDefinitionHandle scopeDefHandle, MetadataReader reader) { var scopeDef = scopeDefHandle.GetScopeDefinition(reader); Debug.Assert(!scopeDef.Name.IsNull(reader)); var assemblyName = new AssemblyName { Name = scopeDef.Name.GetConstantStringValue(reader).Value, CultureName = scopeDef.Culture.IsNull(reader) ? null : scopeDef.Culture.GetConstantStringValue(reader).Value, Version = new Version(scopeDef.MajorVersion, scopeDef.MinorVersion, scopeDef.BuildNumber, scopeDef.RevisionNumber) }; if (scopeDef.PublicKey.Count > 0) { var pkt = new byte[scopeDef.PublicKey.Count]; int index = 0; foreach (var b in scopeDef.PublicKey) pkt[index++] = b; assemblyName.SetPublicKeyToken(pkt); } else { assemblyName.SetPublicKeyToken(Array.Empty<byte>()); } return assemblyName.FullName; } public static string GetFullName(this ScopeReferenceHandle scopeRefHandle, MetadataReader reader) { var scopeRef = scopeRefHandle.GetScopeReference(reader); Debug.Assert(!scopeRef.Name.IsNull(reader)); var assemblyName = new AssemblyName { Name = scopeRef.Name.GetConstantStringValue(reader).Value, CultureName = scopeRef.Culture.IsNull(reader) ? null : scopeRef.Culture.GetConstantStringValue(reader).Value, Version = new Version(scopeRef.MajorVersion, scopeRef.MinorVersion, scopeRef.BuildNumber, scopeRef.RevisionNumber) }; if (scopeRef.PublicKeyOrToken.Count > 0) { var pkt = new byte[scopeRef.PublicKeyOrToken.Count]; int index = 0; foreach (var b in scopeRef.PublicKeyOrToken) pkt[index++] = b; assemblyName.SetPublicKeyToken(pkt); } else { assemblyName.SetPublicKeyToken(Array.Empty<byte>()); } return assemblyName.FullName; } } }
// InflaterDynHeader.cs // Copyright (C) 2001 Mike Krueger // // This file was translated from java, it was part of the GNU Classpath // Copyright (C) 2001 Free Software Foundation, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. // 2010-08-13 Sky Sanders - Modified for Silverlight 3/4 and Windows Phone 7 using System; using ICSharpCode.SharpZipLib.Zip.Compression.Streams; namespace ICSharpCode.SharpZipLib.Zip.Compression { class InflaterDynHeader { #region Constants const int LNUM = 0; const int DNUM = 1; const int BLNUM = 2; const int BLLENS = 3; const int LENS = 4; const int REPS = 5; static readonly int[] repMin = { 3, 3, 11 }; static readonly int[] repBits = { 2, 3, 7 }; static readonly int[] BL_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; #endregion #region Constructors public InflaterDynHeader() { } #endregion public bool Decode(StreamManipulator input) { decode_loop: for (;;) { switch (mode) { case LNUM: lnum = input.PeekBits(5); if (lnum < 0) { return false; } lnum += 257; input.DropBits(5); // System.err.println("LNUM: "+lnum); mode = DNUM; goto case DNUM; // fall through case DNUM: dnum = input.PeekBits(5); if (dnum < 0) { return false; } dnum++; input.DropBits(5); // System.err.println("DNUM: "+dnum); num = lnum+dnum; litdistLens = new byte[num]; mode = BLNUM; goto case BLNUM; // fall through case BLNUM: blnum = input.PeekBits(4); if (blnum < 0) { return false; } blnum += 4; input.DropBits(4); blLens = new byte[19]; ptr = 0; // System.err.println("BLNUM: "+blnum); mode = BLLENS; goto case BLLENS; // fall through case BLLENS: while (ptr < blnum) { int len = input.PeekBits(3); if (len < 0) { return false; } input.DropBits(3); // System.err.println("blLens["+BL_ORDER[ptr]+"]: "+len); blLens[BL_ORDER[ptr]] = (byte) len; ptr++; } blTree = new InflaterHuffmanTree(blLens); blLens = null; ptr = 0; mode = LENS; goto case LENS; // fall through case LENS: { int symbol; while (((symbol = blTree.GetSymbol(input)) & ~15) == 0) { /* Normal case: symbol in [0..15] */ // System.err.println("litdistLens["+ptr+"]: "+symbol); litdistLens[ptr++] = lastLen = (byte)symbol; if (ptr == num) { /* Finished */ return true; } } /* need more input ? */ if (symbol < 0) { return false; } /* otherwise repeat code */ if (symbol >= 17) { /* repeat zero */ // System.err.println("repeating zero"); lastLen = 0; } else { if (ptr == 0) { throw new SharpZipBaseException(); } } repSymbol = symbol-16; } mode = REPS; goto case REPS; // fall through case REPS: { int bits = repBits[repSymbol]; int count = input.PeekBits(bits); if (count < 0) { return false; } input.DropBits(bits); count += repMin[repSymbol]; // System.err.println("litdistLens repeated: "+count); if (ptr + count > num) { throw new SharpZipBaseException(); } while (count-- > 0) { litdistLens[ptr++] = lastLen; } if (ptr == num) { /* Finished */ return true; } } mode = LENS; goto decode_loop; } } } public InflaterHuffmanTree BuildLitLenTree() { byte[] litlenLens = new byte[lnum]; Array.Copy(litdistLens, 0, litlenLens, 0, lnum); return new InflaterHuffmanTree(litlenLens); } public InflaterHuffmanTree BuildDistTree() { byte[] distLens = new byte[dnum]; Array.Copy(litdistLens, lnum, distLens, 0, dnum); return new InflaterHuffmanTree(distLens); } #region Instance Fields byte[] blLens; byte[] litdistLens; InflaterHuffmanTree blTree; /// <summary> /// The current decode mode /// </summary> int mode; int lnum, dnum, blnum, num; int repSymbol; byte lastLen; int ptr; #endregion } }
// --------------------------------------------------------------------------- // Copyright (C) 2006 Microsoft Corporation All Rights Reserved // --------------------------------------------------------------------------- #define CODE_ANALYSIS using System.CodeDom; using System.ComponentModel; using System.ComponentModel.Design; using System.Diagnostics.CodeAnalysis; using System.Drawing; using System.Globalization; using System.Reflection; using System.Windows.Forms; using System.Windows.Forms.Design; using System.Workflow.ComponentModel; using System.Workflow.ComponentModel.Compiler; using System.Workflow.ComponentModel.Design; namespace System.Workflow.Activities.Rules.Design { public partial class RuleConditionDialog : Form { RuleExpressionCondition ruleExpressionCondition = new RuleExpressionCondition(); private IServiceProvider serviceProvider; private Parser ruleParser; private Exception syntaxException; private bool wasOKed; public RuleConditionDialog(Activity activity, CodeExpression expression) { if (activity == null) throw (new ArgumentNullException("activity")); InitializeComponent(); ITypeProvider typeProvider; serviceProvider = activity.Site; if (serviceProvider != null) { IUIService uisvc = serviceProvider.GetService(typeof(IUIService)) as IUIService; if (uisvc != null) this.Font = (Font)uisvc.Styles["DialogFont"]; typeProvider = (ITypeProvider)serviceProvider.GetService(typeof(ITypeProvider)); if (typeProvider == null) { string message = string.Format(CultureInfo.CurrentCulture, Messages.MissingService, typeof(ITypeProvider).FullName); throw new InvalidOperationException(message); } WorkflowDesignerLoader loader = serviceProvider.GetService(typeof(WorkflowDesignerLoader)) as WorkflowDesignerLoader; if (loader != null) loader.Flush(); } else { // no service provider, so make a TypeProvider that has all loaded Assemblies TypeProvider newProvider = new TypeProvider(null); foreach (Assembly a in AppDomain.CurrentDomain.GetAssemblies()) newProvider.AddAssembly(a); typeProvider = newProvider; } RuleValidation validation = new RuleValidation(activity, typeProvider, false); this.ruleParser = new Parser(validation); InitializeDialog(expression); } public RuleConditionDialog(Type activityType, ITypeProvider typeProvider, CodeExpression expression) { if (activityType == null) throw (new ArgumentNullException("activityType")); InitializeComponent(); RuleValidation validation = new RuleValidation(activityType, typeProvider); this.ruleParser = new Parser(validation); InitializeDialog(expression); } private void InitializeDialog(CodeExpression expression) { HelpRequested += new HelpEventHandler(OnHelpRequested); HelpButtonClicked += new CancelEventHandler(OnHelpClicked); if (expression != null) { this.ruleExpressionCondition.Expression = RuleExpressionWalker.Clone(expression); this.conditionTextBox.Text = ruleExpressionCondition.ToString().Replace("\n", "\r\n"); } else this.conditionTextBox.Text = string.Empty; this.conditionTextBox.PopulateAutoCompleteList += new EventHandler<AutoCompletionEventArgs>(ConditionTextBox_PopulateAutoCompleteList); this.conditionTextBox.PopulateToolTipList += new EventHandler<AutoCompletionEventArgs>(ConditionTextBox_PopulateAutoCompleteList); try { this.ruleParser.ParseCondition(this.conditionTextBox.Text); conditionErrorProvider.SetError(this.conditionTextBox, string.Empty); } catch (RuleSyntaxException ex) { conditionErrorProvider.SetError(this.conditionTextBox, ex.Message); } } public CodeExpression Expression { get { return this.ruleExpressionCondition.Expression; } } private void ConditionTextBox_PopulateAutoCompleteList(object sender, AutoCompletionEventArgs e) { e.AutoCompleteValues = this.ruleParser.GetExpressionCompletions(e.Prefix); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")] private void conditionTextBox_Validating(object sender, CancelEventArgs e) { try { this.ruleExpressionCondition = (RuleExpressionCondition)this.ruleParser.ParseCondition(this.conditionTextBox.Text); if (!string.IsNullOrEmpty(this.conditionTextBox.Text)) this.conditionTextBox.Text = this.ruleExpressionCondition.ToString().Replace("\n", "\r\n"); conditionErrorProvider.SetError(this.conditionTextBox, string.Empty); syntaxException = null; } catch (Exception ex) { syntaxException = ex; conditionErrorProvider.SetError(this.conditionTextBox, ex.Message); } } private void OnHelpClicked(object sender, CancelEventArgs e) { e.Cancel = true; ShowHelp(); } private void OnHelpRequested(object sender, HelpEventArgs e) { ShowHelp(); } private void ShowHelp() { if (serviceProvider != null) { IHelpService helpService = serviceProvider.GetService(typeof(IHelpService)) as IHelpService; if (helpService != null) { helpService.ShowHelpFromKeyword(this.GetType().FullName + ".UI"); } else { IUIService uisvc = serviceProvider.GetService(typeof(IUIService)) as IUIService; if (uisvc != null) uisvc.ShowError(Messages.NoHelp); } } else { IUIService uisvc = (IUIService)GetService(typeof(IUIService)); if (uisvc != null) uisvc.ShowError(Messages.NoHelp); } } private void okButton_Click(object sender, EventArgs e) { wasOKed = true; } private void RuleConditionDialog_FormClosing(object sender, FormClosingEventArgs e) { if (wasOKed && syntaxException != null) { e.Cancel = true; DesignerHelpers.DisplayError(Messages.Error_ConditionParser + "\n" + syntaxException.Message, this.Text, this.serviceProvider); if (syntaxException is RuleSyntaxException) this.conditionTextBox.SelectionStart = ((RuleSyntaxException)syntaxException).Position; this.conditionTextBox.SelectionLength = 0; this.conditionTextBox.ScrollToCaret(); wasOKed = false; } } } }
//------------------------------------------------------------------------------- // <copyright file="StateMachine.cs" company="Appccelerate"> // Copyright (c) 2008-2019 Appccelerate // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> //------------------------------------------------------------------------------- namespace Appccelerate.StateMachine.Machine { using System; using Events; using Infrastructure; using States; /// <summary> /// Base implementation of a state machine. /// </summary> /// <typeparam name="TState">The type of the state.</typeparam> /// <typeparam name="TEvent">The type of the event.</typeparam> public class StateMachine<TState, TEvent> : INotifier<TState, TEvent> where TState : IComparable where TEvent : IComparable { private readonly IFactory<TState, TEvent> factory; private readonly IStateLogic<TState, TEvent> stateLogic; /// <summary> /// Initializes a new instance of the <see cref="StateMachine{TState,TEvent}"/> class. /// </summary> /// <param name="factory">The factory used to create internal instances.</param> /// <param name="stateLogic">The state logic used to handle state changes.</param> public StateMachine(IFactory<TState, TEvent> factory, IStateLogic<TState, TEvent> stateLogic) { this.factory = factory; this.stateLogic = stateLogic; } /// <summary> /// Occurs when no transition could be executed. /// </summary> public event EventHandler<TransitionEventArgs<TState, TEvent>> TransitionDeclined; /// <summary> /// Occurs when an exception was thrown inside a transition of the state machine. /// </summary> public event EventHandler<TransitionExceptionEventArgs<TState, TEvent>> TransitionExceptionThrown; /// <summary> /// Occurs when a transition begins. /// </summary> public event EventHandler<TransitionEventArgs<TState, TEvent>> TransitionBegin; /// <summary> /// Occurs when a transition completed. /// </summary> public event EventHandler<TransitionCompletedEventArgs<TState, TEvent>> TransitionCompleted; private static void SwitchStateTo( IStateDefinition<TState, TEvent> newState, StateContainer<TState, TEvent> stateContainer, IStateDefinitionDictionary<TState, TEvent> stateDefinitions) { var oldState = stateContainer .CurrentStateId .Map(x => stateDefinitions[x]) .ExtractOr(null); stateContainer.CurrentStateId = Initializable<TState>.Initialized(newState.Id); stateContainer.ForEach(extension => extension.SwitchedState(oldState, newState)); } /// <summary> /// Enters the initial state as specified with <paramref name="initialState"/>. /// </summary> /// <param name="stateContainer">Contains all mutable state of of the state machine.</param> /// <param name="stateDefinitions">The definitions for all states of this state Machine.</param> /// <param name="initialState">The initial state the state machine should enter.</param> public void EnterInitialState( StateContainer<TState, TEvent> stateContainer, IStateDefinitionDictionary<TState, TEvent> stateDefinitions, TState initialState) { stateContainer.ForEach(extension => extension.EnteringInitialState(initialState)); var context = this.factory.CreateTransitionContext(null, new Missable<TEvent>(), Missing.Value, this); this.EnterInitialState(context, stateContainer, stateDefinitions, initialState); stateContainer.ForEach(extension => extension.EnteredInitialState(initialState, context)); } /// <summary> /// Fires the specified event. /// </summary> /// <param name="eventId">The event.</param> /// <param name="stateContainer">Contains all mutable state of of the state machine.</param> /// <param name="stateDefinitions">The definitions for all states of this state Machine.</param> public void Fire( TEvent eventId, StateContainer<TState, TEvent> stateContainer, IStateDefinitionDictionary<TState, TEvent> stateDefinitions) { this.Fire(eventId, Missing.Value, stateContainer, stateDefinitions); } /// <summary> /// Fires the specified event. /// </summary> /// <param name="eventId">The event.</param> /// <param name="eventArgument">The event argument.</param> /// <param name="stateContainer">Contains all mutable state of of the state machine.</param> /// <param name="stateDefinitions">The definitions for all states of this state Machine.</param> public void Fire( TEvent eventId, object eventArgument, StateContainer<TState, TEvent> stateContainer, IStateDefinitionDictionary<TState, TEvent> stateDefinitions) { CheckThatStateMachineHasEnteredInitialState(stateContainer); stateContainer.ForEach(extension => extension.FiringEvent(ref eventId, ref eventArgument)); var currentState = stateContainer .CurrentStateId .Map(x => stateDefinitions[x]) .ExtractOrThrow(); var context = this.factory.CreateTransitionContext(currentState, new Missable<TEvent>(eventId), eventArgument, this); var result = this.stateLogic.Fire(currentState, context, stateContainer, stateDefinitions); if (!result.Fired) { this.OnTransitionDeclined(context); return; } var newState = stateDefinitions[result.NewState]; SwitchStateTo(newState, stateContainer, stateDefinitions); stateContainer.ForEach(extension => extension.FiredEvent(context)); this.OnTransitionCompleted(context, stateContainer.CurrentStateId.ExtractOrThrow()); } public void OnExceptionThrown(ITransitionContext<TState, TEvent> context, Exception exception) { RethrowExceptionIfNoHandlerRegistered(exception, this.TransitionExceptionThrown); this.RaiseEvent(this.TransitionExceptionThrown, new TransitionExceptionEventArgs<TState, TEvent>(context, exception), context, false); } /// <summary> /// Fires the <see cref="TransitionBegin"/> event. /// </summary> /// <param name="transitionContext">The transition context.</param> public void OnTransitionBegin(ITransitionContext<TState, TEvent> transitionContext) { this.RaiseEvent(this.TransitionBegin, new TransitionEventArgs<TState, TEvent>(transitionContext), transitionContext, true); } // ReSharper disable once UnusedParameter.Local private static void RethrowExceptionIfNoHandlerRegistered<T>(Exception exception, EventHandler<T> exceptionHandler) where T : EventArgs { if (exceptionHandler == null) { throw new StateMachineException("No exception listener is registered. Exception: ", exception); } } /// <summary> /// Fires the <see cref="TransitionDeclined"/> event. /// </summary> /// <param name="transitionContext">The transition event context.</param> private void OnTransitionDeclined(ITransitionContext<TState, TEvent> transitionContext) { this.RaiseEvent(this.TransitionDeclined, new TransitionEventArgs<TState, TEvent>(transitionContext), transitionContext, true); } private void OnTransitionCompleted(ITransitionContext<TState, TEvent> transitionContext, TState currentStateId) { this.RaiseEvent( this.TransitionCompleted, new TransitionCompletedEventArgs<TState, TEvent>( currentStateId, transitionContext), transitionContext, true); } private void EnterInitialState( ITransitionContext<TState, TEvent> context, StateContainer<TState, TEvent> stateContainer, IStateDefinitionDictionary<TState, TEvent> stateDefinitions, TState initialStateId) { var initialState = stateDefinitions[initialStateId]; var initializer = this.factory.CreateStateMachineInitializer(initialState, context); var newStateId = initializer.EnterInitialState(this.stateLogic, stateContainer, stateDefinitions); var newStateDefinition = stateDefinitions[newStateId]; SwitchStateTo(newStateDefinition, stateContainer, stateDefinitions); } private void RaiseEvent<T>(EventHandler<T> eventHandler, T arguments, ITransitionContext<TState, TEvent> context, bool raiseEventOnException) where T : EventArgs { try { if (eventHandler == null) { return; } eventHandler(this, arguments); } catch (Exception e) { if (!raiseEventOnException) { throw; } ((INotifier<TState, TEvent>)this).OnExceptionThrown(context, e); } } private static void CheckThatStateMachineHasEnteredInitialState(StateContainer<TState, TEvent> stateContainer) { if (!stateContainer.CurrentStateId.IsInitialized) { throw new InvalidOperationException(ExceptionMessages.StateMachineHasNotYetEnteredInitialState); } } } }
// // Message Definitions for ERAM // // Header // Message Definitions // // Values are 32 bit values laid out as follows: // // 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1 // 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 // +---+-+-+-----------------------+-------------------------------+ // |Sev|C|R| Facility | Code | // +---+-+-+-----------------------+-------------------------------+ // // where // // Sev - is the severity code // // 00 - Success // 01 - Informational // 10 - Warning // 11 - Error // // C - is the Customer code flag // // R - is a reserved bit // // Facility - is the facility code // // Code - is the facility's status code // // // Define the facility codes // // // Define the severity codes // // // MessageId: ERAM_ERROR_FUNCTIONERROR // // MessageText: // // Function %2 returned a failure.%0 // #define ERAM_ERROR_FUNCTIONERROR ((NTSTATUS)0xC0FF0001L) // // MessageId: ERAM_ERROR_MAXMEM_ALREADY_FREE // // MessageText: // // OS-Unmanaged Memory is already released.%0 // #define ERAM_ERROR_MAXMEM_ALREADY_FREE ((NTSTATUS)0xC0FF0002L) // // MessageId: ERAM_ERROR_MAXMEM_MAP_FAILED // // MessageText: // // Failed to map OS-Unmanaged Memory.%0 // #define ERAM_ERROR_MAXMEM_MAP_FAILED ((NTSTATUS)0xC0FF0003L) // // MessageId: ERAM_ERROR_MAXMEM_REPORT_USAGE_FAILED // // MessageText: // // Failed to notify usage of OS-Unmanaged Memory.%0 // #define ERAM_ERROR_MAXMEM_REPORT_USAGE_FAILED ((NTSTATUS)0xC0FF0004L) // // MessageId: ERAM_ERROR_MAXMEM_REPORT_USAGE_CONFLICT // // MessageText: // // OS-Unmanaged Memory is being partially used by another device. Please shrink the disk size.%0 // #define ERAM_ERROR_MAXMEM_REPORT_USAGE_CONFLICT ((NTSTATUS)0xC0FF0005L) // // MessageId: ERAM_ERROR_CREATE_THREAD // // MessageText: // // Failed to create a system thread.%0 // #define ERAM_ERROR_CREATE_THREAD ((NTSTATUS)0xC0FF0006L) // // MessageId: ERAM_ERROR_MAXMEM_NO_MEMORY // // MessageText: // // OS-Unmanaged Memory was not detected.%0 // #define ERAM_ERROR_MAXMEM_NO_MEMORY ((NTSTATUS)0xC0FF0007L) // // MessageId: ERAM_ERROR_MAXMEM_NOT_DETECTED // // MessageText: // // OS-Unmanaged Memory was not found.%0 // #define ERAM_ERROR_MAXMEM_NOT_DETECTED ((NTSTATUS)0xC0FF0008L) // // MessageId: ERAM_ERROR_OPTION_WORK_ALLOC_FAILED // // MessageText: // // Failed to allocate the OS startup option(s).%0 // #define ERAM_ERROR_OPTION_WORK_ALLOC_FAILED ((NTSTATUS)0xC0FF0009L) // // MessageId: ERAM_ERROR_OPTION_GET_FAILED // // MessageText: // // Failed to get the OS startup option(s).%0 // #define ERAM_ERROR_OPTION_GET_FAILED ((NTSTATUS)0xC0FF000AL) // // MessageId: ERAM_ERROR_MAXMEM_NO_OPTION // // MessageText: // // There was no %bMAXMEM=n%b in startup options.%0 // #define ERAM_ERROR_MAXMEM_NO_OPTION ((NTSTATUS)0xC0FF000BL) // // MessageId: ERAM_ERROR_MAXMEM_CAPITAL_FAILED // // MessageText: // // Failed to capitalize the startup option(s).%0 // #define ERAM_ERROR_MAXMEM_CAPITAL_FAILED ((NTSTATUS)0xC0FF000CL) // // MessageId: ERAM_ERROR_MAXMEM_ATOU // // MessageText: // // Failed to numerize MAXMEM.%0 // #define ERAM_ERROR_MAXMEM_ATOU ((NTSTATUS)0xC0FF000DL) // // MessageId: ERAM_ERROR_MAXMEM_TOO_SMALL // // MessageText: // // Please specify 17 MB or more for MAXMEM.%0 // #define ERAM_ERROR_MAXMEM_TOO_SMALL ((NTSTATUS)0xC0FF000EL) // // MessageId: ERAM_ERROR_MAXMEM_TOO_BIG // // MessageText: // // Please specify a value smaller than 4095 MB for MAXMEM.%0 // #define ERAM_ERROR_MAXMEM_TOO_BIG ((NTSTATUS)0xC0FF000FL) // // MessageId: ERAM_ERROR_MAXMEM_INVALID // // MessageText: // // The startup option(s) has no MAXMEM=n or has an invalid MAXMEM value.%0 // #define ERAM_ERROR_MAXMEM_INVALID ((NTSTATUS)0xC0FF0010L) // // MessageId: ERAM_ERROR_EXTSTART_TOO_BIG // // MessageText: // // Please specify a value smaller than 4095 MB for ExtStart.%0 // #define ERAM_ERROR_EXTSTART_TOO_BIG ((NTSTATUS)0xC0FF0011L) // // MessageId: ERAM_ERROR_WORK_ALLOC_FAILED // // MessageText: // // Failed to allocate the work-area memory.%0 // #define ERAM_ERROR_WORK_ALLOC_FAILED ((NTSTATUS)0xC0FF0012L) // // MessageId: ERAM_ERROR_REG_KEY_APPEND_FAILED // // MessageText: // // Failed to combine the registry key strings.%0 // #define ERAM_ERROR_REG_KEY_APPEND_FAILED ((NTSTATUS)0xC0FF0013L) // // MessageId: ERAM_ERROR_CREATE_DEVICE_FAILED // // MessageText: // // Failed to create a device.%0 // #define ERAM_ERROR_CREATE_DEVICE_FAILED ((NTSTATUS)0xC0FF0014L) // // MessageId: ERAM_ERROR_DISK_SIZE_TOO_SMALL // // MessageText: // // The assigned disk size is too small.%0 // #define ERAM_ERROR_DISK_SIZE_TOO_SMALL ((NTSTATUS)0xC0FF0015L) // // MessageId: ERAM_ERROR_DEVICE_NAME_ALLOC_FAILED // // MessageText: // // Failed to allocate the device name area.%0 // #define ERAM_ERROR_DEVICE_NAME_ALLOC_FAILED ((NTSTATUS)0xC0FF0016L) // // MessageId: ERAM_ERROR_CREATE_SYMBOLIC_LINK_FAILED // // MessageText: // // Failed to create a symbolic link.%0 // #define ERAM_ERROR_CREATE_SYMBOLIC_LINK_FAILED ((NTSTATUS)0xC0FF0017L) // // MessageId: ERAM_ERROR_DISK_ALLOC_FAILED // // MessageText: // // Failed to allocate the memory to be used for the disk.%0 // #define ERAM_ERROR_DISK_ALLOC_FAILED ((NTSTATUS)0xC0FF0018L) // // MessageId: ERAM_ERROR_DISK_SIZE_IS_0 // // MessageText: // // The size of the disk cannot be 0.%0 // #define ERAM_ERROR_DISK_SIZE_IS_0 ((NTSTATUS)0xC0FF0019L) // // MessageId: ERAM_ERROR_TRANSLATE_ADDRESS_FAILED // // MessageText: // // Failed to convert to OS-Managed Address.%0 // #define ERAM_ERROR_TRANSLATE_ADDRESS_FAILED ((NTSTATUS)0xC0FF001AL) // // MessageId: ERAM_ERROR_PORT_MAPPED // // MessageText: // // No support of I/O space mapping.%0 // #define ERAM_ERROR_PORT_MAPPED ((NTSTATUS)0xC0FF001BL) // // MessageId: ERAM_ERROR_CREATE_EXT_FILE // // MessageText: // // Failed to create the external file.%0 // #define ERAM_ERROR_CREATE_EXT_FILE ((NTSTATUS)0xC0FF001CL) // // MessageId: ERAM_ERROR_SET_INFO_EXT_FILE // // MessageText: // // Failed to adjust the size of the external file.%0 // #define ERAM_ERROR_SET_INFO_EXT_FILE ((NTSTATUS)0xC0FF001DL) // // MessageId: ERAM_ERROR_CREATE_EXT_FILE_SECTION // // MessageText: // // Failed to create a section of the external file.%0 // #define ERAM_ERROR_CREATE_EXT_FILE_SECTION ((NTSTATUS)0xC0FF001EL) // // MessageId: ERAM_ERROR_MAP_EXT_FILE // // MessageText: // // Failed to map the external file.%0 // #define ERAM_ERROR_MAP_EXT_FILE ((NTSTATUS)0xC0FF001FL) // // MessageId: ERAM_ERROR_GET_THREAD_OBJECT // // MessageText: // // Failed to get the object of the system thread.%0 // #define ERAM_ERROR_GET_THREAD_OBJECT ((NTSTATUS)0xC0FF0020L) // // MessageId: ERAM_WARN_MAXMEM_DISK_SIZE_FIXED // // MessageText: // // The RAM disk was shrinked because OS-Unmanaged Memory was smaller than expected.%0 // #define ERAM_WARN_MAXMEM_DISK_SIZE_FIXED ((NTSTATUS)0x80FF0006L) // // MessageId: ERAM_INF_MEMORY_SIZE // // MessageText: // // %2KB is able to be allocated.%0 // #define ERAM_INF_MEMORY_SIZE ((NTSTATUS)0x40FF001CL)
// Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices; using System.Text; using System.Linq; using Xunit; using Moq; using Microsoft.DotNet.Cli.Utils; using Microsoft.DotNet.ProjectModel; using Microsoft.DotNet.Tools.Test.Utilities; using Microsoft.Extensions.PlatformAbstractions; using System.Threading; using FluentAssertions; using NuGet.Frameworks; namespace Microsoft.DotNet.Cli.Utils.Tests { public class GivenAProjectDependenciesCommandResolver { private static readonly string s_liveProjectDirectory = Path.Combine(AppContext.BaseDirectory, "TestAssets/TestProjects/AppWithDirectDependency"); [Fact] public void It_returns_null_when_CommandName_is_null() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = null, CommandArguments = new string[] {""}, ProjectDirectory = "/some/directory", Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().BeNull(); } [Fact] public void It_returns_null_when_ProjectDirectory_is_null() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "command", CommandArguments = new string[] {""}, ProjectDirectory = null, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().BeNull(); } [Fact] public void It_returns_null_when_Framework_is_null() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "command", CommandArguments = new string[] {""}, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = null }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().BeNull(); } [Fact] public void It_returns_null_when_Configuration_is_null() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "command", CommandArguments = new string[] {""}, ProjectDirectory = s_liveProjectDirectory, Configuration = null, Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().BeNull(); } [Fact] public void It_returns_null_when_CommandName_does_not_exist_in_ProjectDependencies() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "nonexistent-command", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().BeNull(); } [Fact] public void It_returns_a_CommandSpec_with_CoreHost_as_FileName_and_CommandName_in_Args_when_CommandName_exists_in_ProjectDependencies() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "dotnet-hello", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); var commandFile = Path.GetFileNameWithoutExtension(result.Path); commandFile.Should().Be("corehost"); result.Args.Should().Contain(commandResolverArguments.CommandName); } [Fact] public void It_escapes_CommandArguments_when_returning_a_CommandSpec() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "dotnet-hello", CommandArguments = new [] { "arg with space"}, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); result.Args.Should().Contain("\"arg with space\""); } [Fact] public void It_passes_depsfile_arg_to_corehost_when_returning_a_commandspec() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "dotnet-hello", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); result.Args.Should().Contain("--depsfile"); } [Fact] public void It_sets_depsfile_based_on_output_path_when_returning_a_commandspec() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments { CommandName = "dotnet-hello", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15, OutputPath = AppContext.BaseDirectory }; var projectContext = ProjectContext.Create( s_liveProjectDirectory, FrameworkConstants.CommonFrameworks.NetStandardApp15, PlatformServices.Default.Runtime.GetAllCandidateRuntimeIdentifiers()); var depsFilePath = projectContext.GetOutputPaths("Debug", outputPath: AppContext.BaseDirectory).RuntimeFiles.DepsJson; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); result.Args.Should().Contain($"--depsfile {depsFilePath}"); } [Fact] public void It_sets_depsfile_based_on_build_base_path_when_returning_a_commandspec() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments { CommandName = "dotnet-hello", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15, BuildBasePath = AppContext.BaseDirectory }; var projectContext = ProjectContext.Create( s_liveProjectDirectory, FrameworkConstants.CommonFrameworks.NetStandardApp15, PlatformServices.Default.Runtime.GetAllCandidateRuntimeIdentifiers()); var depsFilePath = projectContext.GetOutputPaths("Debug", AppContext.BaseDirectory).RuntimeFiles.DepsJson; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); result.Args.Should().Contain($"--depsfile {depsFilePath}"); } [Fact] public void It_returns_a_CommandSpec_with_CommandName_in_Args_when_returning_a_CommandSpec_and_CommandArguments_are_null() { var projectDependenciesCommandResolver = SetupProjectDependenciesCommandResolver(); var commandResolverArguments = new CommandResolverArguments() { CommandName = "dotnet-hello", CommandArguments = null, ProjectDirectory = s_liveProjectDirectory, Configuration = "Debug", Framework = FrameworkConstants.CommonFrameworks.NetStandardApp15 }; var result = projectDependenciesCommandResolver.Resolve(commandResolverArguments); result.Should().NotBeNull(); result.Args.Should().Contain("dotnet-hello"); } private ProjectDependenciesCommandResolver SetupProjectDependenciesCommandResolver( IEnvironmentProvider environment = null, IPackagedCommandSpecFactory packagedCommandSpecFactory = null) { environment = environment ?? new EnvironmentProvider(); packagedCommandSpecFactory = packagedCommandSpecFactory ?? new PackagedCommandSpecFactory(); var projectDependenciesCommandResolver = new ProjectDependenciesCommandResolver(environment, packagedCommandSpecFactory); return projectDependenciesCommandResolver; } } }
// <copyright file="GPGSUtil.cs" company="Google Inc."> // Copyright (C) 2014 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> namespace GooglePlayGames { using System; using System.IO; using UnityEditor; using UnityEngine; public static class GPGSUtil { public const string SERVICEIDPLACEHOLDER = "__NEARBY_SERVICE_ID__"; public const string SERVICEIDKEY = "App.NearbdServiceId"; public const string APPIDPLACEHOLDER = "___APP_ID___"; public const string APPIDKEY = "proj.AppId"; public const string ANDROIDCLIENTIDPLACEHOLDER = "__ANDROID_CLIENTID__"; public const string ANDROIDCLIENTIDKEY = "and.ClientId"; public const string IOSCLIENTIDPLACEHOLDER = "__IOS_CLIENTID__"; public const string IOSCLIENTIDKEY = "ios.ClientId"; public const string IOSBUNDLEIDPLACEHOLDER = "__BUNDLEID__"; public const string IOSBUNDLEIDKEY = "ios.BundleId"; private const string GameInfoPath = "Assets/GooglePlayGames/GameInfo.cs"; private const string GameInfoTemplatePath = "Assets/GooglePlayGames/Editor/GameInfo.template"; public static string SlashesToPlatformSeparator(string path) { return path.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString()); } public static string ReadFile(string filePath) { filePath = SlashesToPlatformSeparator(filePath); if (!File.Exists(filePath)) { Alert("Plugin error: file not found: " + filePath); return null; } StreamReader sr = new StreamReader(filePath); string body = sr.ReadToEnd(); sr.Close(); return body; } public static string ReadEditorTemplate(string name) { return ReadFile(SlashesToPlatformSeparator("Assets/GooglePlayGames/Editor/" + name + ".txt")); } public static string ReadFully(string path) { return ReadFile(SlashesToPlatformSeparator(path)); } public static void WriteFile(string file, string body) { file = SlashesToPlatformSeparator(file); using (var wr = new StreamWriter(file, false)) { wr.Write(body); } } public static bool LooksLikeValidServiceId(string s) { if (s.Length < 3) { return false; } foreach (char c in s) { if (!char.IsLetterOrDigit(c) && c != '.') { return false; } } return true; } public static bool LooksLikeValidAppId(string s) { if (s.Length < 5) { return false; } foreach (char c in s) { if (c < '0' || c > '9') { return false; } } return true; } public static bool LooksLikeValidClientId(string s) { return s.EndsWith(".googleusercontent.com"); } public static bool LooksLikeValidBundleId(string s) { return s.Length > 3; } public static bool LooksLikeValidPackageName(string s) { return !s.Contains(" ") && s.Split(new char[] { '.' }).Length > 1; } public static void Alert(string s) { Alert(GPGSStrings.Error, s); } public static void Alert(string title, string s) { EditorUtility.DisplayDialog(title, s, GPGSStrings.Ok); } public static string GetAndroidSdkPath() { string sdkPath = EditorPrefs.GetString("AndroidSdkRoot"); if (sdkPath != null && (sdkPath.EndsWith("/") || sdkPath.EndsWith("\\"))) { sdkPath = sdkPath.Substring(0, sdkPath.Length - 1); } return sdkPath; } public static bool HasAndroidSdk() { string sdkPath = GetAndroidSdkPath(); return sdkPath != null && sdkPath.Trim() != string.Empty && System.IO.Directory.Exists(sdkPath); } public static void CopySupportLibs() { string sdkPath = GetAndroidSdkPath(); string supportJarPath = sdkPath + GPGSUtil.SlashesToPlatformSeparator( "/extras/android/support/v4/android-support-v4.jar"); string supportJarDest = GPGSUtil.SlashesToPlatformSeparator("Assets/Plugins/Android/libs/android-support-v4.jar"); string libProjPath = sdkPath + GPGSUtil.SlashesToPlatformSeparator( "/extras/google/google_play_services/libproject/google-play-services_lib"); string libProjAM = libProjPath + GPGSUtil.SlashesToPlatformSeparator("/AndroidManifest.xml"); string libProjDestDir = GPGSUtil.SlashesToPlatformSeparator( "Assets/Plugins/Android/google-play-services_lib"); // check that the Google Play Services lib project is there if (!System.IO.Directory.Exists(libProjPath) || !System.IO.File.Exists(libProjAM)) { Debug.LogError("Google Play Services lib project not found at: " + libProjPath); EditorUtility.DisplayDialog(GPGSStrings.AndroidSetup.LibProjNotFound, GPGSStrings.AndroidSetup.LibProjNotFoundBlurb, GPGSStrings.Ok); return; } // clear out the destination library project GPGSUtil.DeleteDirIfExists(libProjDestDir); // Copy Google Play Services library FileUtil.CopyFileOrDirectory(libProjPath, libProjDestDir); if (!System.IO.File.Exists(supportJarPath)) { // check for the new location supportJarPath = sdkPath + GPGSUtil.SlashesToPlatformSeparator( "/extras/android/support/v7/appcompat/libs/android-support-v4.jar"); Debug.LogError("Android support library v4 not found at: " + supportJarPath); if (!System.IO.File.Exists(supportJarPath)) { EditorUtility.DisplayDialog(GPGSStrings.AndroidSetup.SupportJarNotFound, GPGSStrings.AndroidSetup.SupportJarNotFoundBlurb, GPGSStrings.Ok); return; } } // create needed directories GPGSUtil.EnsureDirExists("Assets/Plugins"); GPGSUtil.EnsureDirExists("Assets/Plugins/Android"); // Clear out any stale version of the support jar. File.Delete(supportJarDest); // Copy Android Support Library FileUtil.CopyFileOrDirectory(supportJarPath, supportJarDest); } public static void GenerateAndroidManifest() { string destFilename = GPGSUtil.SlashesToPlatformSeparator( "Assets/Plugins/Android/MainLibProj/AndroidManifest.xml"); // Generate AndroidManifest.xml string appId = GPGSProjectSettings.Instance.Get(APPIDKEY, string.Empty); string nearbyServiceId = GPGSProjectSettings.Instance.Get(SERVICEIDKEY, string.Empty); string manifestBody = GPGSUtil.ReadEditorTemplate("template-AndroidManifest"); manifestBody = manifestBody.Replace(APPIDPLACEHOLDER, appId); manifestBody = manifestBody.Replace(SERVICEIDPLACEHOLDER, nearbyServiceId); GPGSUtil.WriteFile(destFilename, manifestBody); GPGSUtil.UpdateGameInfo(); } public static void UpdateGameInfo() { string fileBody = GPGSUtil.ReadFully(GameInfoTemplatePath); var appId = GPGSProjectSettings.Instance.Get(APPIDKEY, null); if (appId != null) { fileBody = fileBody.Replace(APPIDPLACEHOLDER, appId); } var nearbyServiceId = GPGSProjectSettings.Instance.Get(SERVICEIDKEY, null); if (nearbyServiceId != null) { fileBody = fileBody.Replace(SERVICEIDPLACEHOLDER, appId); } var iosClientId = GPGSProjectSettings.Instance.Get(IOSCLIENTIDKEY, null); if (iosClientId != null) { fileBody = fileBody.Replace(IOSCLIENTIDPLACEHOLDER, iosClientId); } var andClientId = GPGSProjectSettings.Instance.Get(ANDROIDCLIENTIDKEY, null); if (andClientId != null) { fileBody = fileBody.Replace(ANDROIDCLIENTIDPLACEHOLDER, andClientId); } var bundleId = GPGSProjectSettings.Instance.Get(IOSBUNDLEIDKEY, null); if (bundleId != null) { fileBody = fileBody.Replace(IOSBUNDLEIDPLACEHOLDER, bundleId); } GPGSUtil.WriteFile(GameInfoPath, fileBody); } public static void EnsureDirExists(string dir) { dir = dir.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString()); if (!System.IO.Directory.Exists(dir)) { System.IO.Directory.CreateDirectory(dir); } } public static void DeleteDirIfExists(string dir) { if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } } } }
/* Generated SBE (Simple Binary Encoding) message codec */ #pragma warning disable 1591 // disable warning on missing comments using System; using Adaptive.SimpleBinaryEncoding; namespace Adaptive.SimpleBinaryEncoding.PerfTests.Bench.SBE.FIX { public sealed partial class MarketDataIncrementalRefreshTrades { public const ushort BlockLength = (ushort)11; public const ushort TemplateId = (ushort)2; public const ushort SchemaId = (ushort)2; public const ushort Schema_Version = (ushort)0; public const string SematicType = "X"; private readonly MarketDataIncrementalRefreshTrades _parentMessage; private DirectBuffer _buffer; private int _offset; private int _limit; private int _actingBlockLength; private int _actingVersion; public int Offset { get { return _offset; } } public MarketDataIncrementalRefreshTrades() { _parentMessage = this; } public void WrapForEncode(DirectBuffer buffer, int offset) { _buffer = buffer; _offset = offset; _actingBlockLength = BlockLength; _actingVersion = Schema_Version; Limit = offset + _actingBlockLength; } public void WrapForDecode(DirectBuffer buffer, int offset, int actingBlockLength, int actingVersion) { _buffer = buffer; _offset = offset; _actingBlockLength = actingBlockLength; _actingVersion = actingVersion; Limit = offset + _actingBlockLength; } public int Size { get { return _limit - _offset; } } public int Limit { get { return _limit; } set { _buffer.CheckLimit(_limit); _limit = value; } } public const int TransactTimeId = 60; public static string TransactTimeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanossecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const ulong TransactTimeNullValue = 0x8000000000000000UL; public const ulong TransactTimeMinValue = 0x0UL; public const ulong TransactTimeMaxValue = 0x7fffffffffffffffUL; public ulong TransactTime { get { return _buffer.Uint64GetLittleEndian(_offset + 0); } set { _buffer.Uint64PutLittleEndian(_offset + 0, value); } } public const int EventTimeDeltaId = 37704; public static string EventTimeDeltaMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const ushort EventTimeDeltaNullValue = (ushort)65535; public const ushort EventTimeDeltaMinValue = (ushort)0; public const ushort EventTimeDeltaMaxValue = (ushort)65534; public ushort EventTimeDelta { get { return _buffer.Uint16GetLittleEndian(_offset + 8); } set { _buffer.Uint16PutLittleEndian(_offset + 8, value); } } public const int MatchEventIndicatorId = 5799; public static string MatchEventIndicatorMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public MatchEventIndicator MatchEventIndicator { get { return (MatchEventIndicator)_buffer.CharGet(_offset + 10); } set { _buffer.CharPut(_offset + 10, (byte)value); } } private readonly MdIncGrpGroup _mdIncGrp = new MdIncGrpGroup(); public const long MdIncGrpId = 268; public MdIncGrpGroup MdIncGrp { get { _mdIncGrp.WrapForDecode(_parentMessage, _buffer, _actingVersion); return _mdIncGrp; } } public MdIncGrpGroup MdIncGrpCount(int count) { _mdIncGrp.WrapForEncode(_parentMessage, _buffer, count); return _mdIncGrp; } public sealed partial class MdIncGrpGroup { private readonly GroupSizeEncoding _dimensions = new GroupSizeEncoding(); private MarketDataIncrementalRefreshTrades _parentMessage; private DirectBuffer _buffer; private int _blockLength; private int _actingVersion; private int _count; private int _index; private int _offset; public void WrapForDecode(MarketDataIncrementalRefreshTrades parentMessage, DirectBuffer buffer, int actingVersion) { _parentMessage = parentMessage; _buffer = buffer; _dimensions.Wrap(buffer, parentMessage.Limit, actingVersion); _count = _dimensions.NumInGroup; _blockLength = _dimensions.BlockLength; _actingVersion = actingVersion; _index = -1; _parentMessage.Limit = parentMessage.Limit + SbeHeaderSize; } public void WrapForEncode(MarketDataIncrementalRefreshTrades parentMessage, DirectBuffer buffer, int count) { _parentMessage = parentMessage; _buffer = buffer; _dimensions.Wrap(buffer, parentMessage.Limit, _actingVersion); _dimensions.NumInGroup = (byte)count; _dimensions.BlockLength = (ushort)34; _index = -1; _count = count; _blockLength = 34; parentMessage.Limit = parentMessage.Limit + SbeHeaderSize; } public const int SbeBlockLength = 34; public const int SbeHeaderSize = 3; public int ActingBlockLength { get { return _blockLength; } } public int Count { get { return _count; } } public bool HasNext { get { return (_index + 1) < _count; } } public MdIncGrpGroup Next() { if (_index + 1 >= _count) { throw new InvalidOperationException(); } _offset = _parentMessage.Limit; _parentMessage.Limit = _offset + _blockLength; ++_index; return this; } public const int TradeIdId = 1003; public static string TradeIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const ulong TradeIdNullValue = 0x8000000000000000UL; public const ulong TradeIdMinValue = 0x0UL; public const ulong TradeIdMaxValue = 0x7fffffffffffffffUL; public ulong TradeId { get { return _buffer.Uint64GetLittleEndian(_offset + 0); } set { _buffer.Uint64PutLittleEndian(_offset + 0, value); } } public const int SecurityIdId = 48; public static string SecurityIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const ulong SecurityIdNullValue = 0x8000000000000000UL; public const ulong SecurityIdMinValue = 0x0UL; public const ulong SecurityIdMaxValue = 0x7fffffffffffffffUL; public ulong SecurityId { get { return _buffer.Uint64GetLittleEndian(_offset + 8); } set { _buffer.Uint64PutLittleEndian(_offset + 8, value); } } public const int MdEntryPxId = 270; public static string MdEntryPxMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } private readonly Decimal64 _mdEntryPx = new Decimal64(); public Decimal64 MdEntryPx { get { _mdEntryPx.Wrap(_buffer, _offset + 16, _actingVersion); return _mdEntryPx; } } public const int MdEntrySizeId = 271; public static string MdEntrySizeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } private readonly IntQty32 _mdEntrySize = new IntQty32(); public IntQty32 MdEntrySize { get { _mdEntrySize.Wrap(_buffer, _offset + 24, _actingVersion); return _mdEntrySize; } } public const int NumberOfOrdersId = 346; public static string NumberOfOrdersMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const ushort NumberOfOrdersNullValue = (ushort)65535; public const ushort NumberOfOrdersMinValue = (ushort)0; public const ushort NumberOfOrdersMaxValue = (ushort)65534; public ushort NumberOfOrders { get { return _buffer.Uint16GetLittleEndian(_offset + 28); } set { _buffer.Uint16PutLittleEndian(_offset + 28, value); } } public const int MdUpdateActionId = 279; public static string MdUpdateActionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public MDUpdateAction MdUpdateAction { get { return (MDUpdateAction)_buffer.Uint8Get(_offset + 30); } set { _buffer.Uint8Put(_offset + 30, (byte)value); } } public const int RptSeqId = 83; public static string RptSeqMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public const byte RptSeqNullValue = (byte)255; public const byte RptSeqMinValue = (byte)0; public const byte RptSeqMaxValue = (byte)254; public byte RptSeq { get { return _buffer.Uint8Get(_offset + 31); } set { _buffer.Uint8Put(_offset + 31, value); } } public const int AggressorSideId = 5797; public static string AggressorSideMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public Side AggressorSide { get { return (Side)_buffer.CharGet(_offset + 32); } set { _buffer.CharPut(_offset + 32, (byte)value); } } public const int MdEntryTypeId = 269; public static string MdEntryTypeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return ""; } return ""; } public MDEntryType MdEntryType { get { return (MDEntryType)_buffer.CharGet(_offset + 33); } set { _buffer.CharPut(_offset + 33, (byte)value); } } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using AutoMapper.Execution; namespace AutoMapper { /// <summary> /// Context information regarding resolution of a destination value /// </summary> public class ResolutionContext : IRuntimeMapper { private Dictionary<ContextCacheKey, object> _instanceCache; private Dictionary<TypePair, int> _typeDepth; private readonly IRuntimeMapper _inner; public ResolutionContext(IMappingOperationOptions options, IRuntimeMapper mapper) { Options = options; _inner = mapper; } /// <summary> /// Mapping operation options /// </summary> public IMappingOperationOptions Options { get; } /// <summary> /// Context items from <see cref="Options"/> /// </summary> public IDictionary<string, object> Items { get { CheckDefault(); return Options.Items; } } /// <summary> /// Current mapper /// </summary> public IRuntimeMapper Mapper => this; public IConfigurationProvider ConfigurationProvider => _inner.ConfigurationProvider; Func<Type, object> IMapper.ServiceCtor => _inner.ServiceCtor; ResolutionContext IRuntimeMapper.DefaultContext => _inner.DefaultContext; /// <summary> /// Instance cache for resolving circular references /// </summary> public Dictionary<ContextCacheKey, object> InstanceCache { get { CheckDefault(); if(_instanceCache != null) { return _instanceCache; } _instanceCache = new Dictionary<ContextCacheKey, object>(); return _instanceCache; } } /// <summary> /// Instance cache for resolving keeping track of depth /// </summary> private Dictionary<TypePair, int> TypeDepth { get { CheckDefault(); if(_typeDepth != null) { return _typeDepth; } _typeDepth = new Dictionary<TypePair, int>(); return _typeDepth; } } TDestination IMapper.Map<TDestination>(object source) => (TDestination)_inner.Map(source, null, source?.GetType() ?? typeof(object), typeof(TDestination), this); TDestination IRuntimeMapper.Map<TDestination>(object source, Action<IMappingOperationOptions> opts) => ((IMapper)this).Map<TDestination>(source, opts); TDestination IMapper.Map<TDestination>(object source, Action<IMappingOperationOptions> opts) { opts(Options); return ((IMapper)this).Map<TDestination>(source); } TDestination IMapper.Map<TSource, TDestination>(TSource source) => _inner.Map(source, default(TDestination), this); TDestination IRuntimeMapper.Map<TSource, TDestination>(TSource source, Action<IMappingOperationOptions<TSource, TDestination>> opts) => ((IMapper)this).Map<TSource, TDestination>(source, opts); TDestination IMapper.Map<TSource, TDestination>(TSource source, Action<IMappingOperationOptions<TSource, TDestination>> opts) { var typedOptions = new MappingOperationOptions<TSource, TDestination>(_inner.ServiceCtor); opts(typedOptions); var destination = default(TDestination); typedOptions.BeforeMapAction(source, destination); destination = _inner.Map(source, destination, this); typedOptions.AfterMapAction(source, destination); return destination; } TDestination IMapper.Map<TSource, TDestination>(TSource source, TDestination destination) => _inner.Map(source, destination, this); TDestination IRuntimeMapper.Map<TSource, TDestination>(TSource source, TDestination destination, Action<IMappingOperationOptions<TSource, TDestination>> opts) => ((IMapper)this).Map<TSource, TDestination>(source, destination, opts); TDestination IMapper.Map<TSource, TDestination>(TSource source, TDestination destination, Action<IMappingOperationOptions<TSource, TDestination>> opts) { var typedOptions = new MappingOperationOptions<TSource, TDestination>(_inner.ServiceCtor); opts(typedOptions); typedOptions.BeforeMapAction(source, destination); destination = _inner.Map(source, destination, this); typedOptions.AfterMapAction(source, destination); return destination; } object IMapper.Map(object source, Type sourceType, Type destinationType) => _inner.Map(source, null, sourceType, destinationType, this); object IRuntimeMapper.Map(object source, Type sourceType, Type destinationType, Action<IMappingOperationOptions> opts) => ((IMapper)this).Map(source, sourceType, destinationType, opts); object IMapper.Map(object source, Type sourceType, Type destinationType, Action<IMappingOperationOptions> opts) { opts(Options); return ((IMapper)this).Map(source, sourceType, destinationType); } object IMapper.Map(object source, object destination, Type sourceType, Type destinationType) => _inner.Map(source, destination, sourceType, destinationType, this); object IRuntimeMapper.Map(object source, object destination, Type sourceType, Type destinationType, Action<IMappingOperationOptions> opts) => ((IMapper)this).Map(source, destination, sourceType, destinationType, opts); object IMapper.Map(object source, object destination, Type sourceType, Type destinationType, Action<IMappingOperationOptions> opts) { opts(Options); return ((IMapper)this).Map(source, destination, sourceType, destinationType); } object IRuntimeMapper.Map(object source, object destination, Type sourceType, Type destinationType, ResolutionContext context, IMemberMap memberMap) => _inner.Map(source, destination, sourceType, destinationType, context, memberMap); TDestination IRuntimeMapper.Map<TSource, TDestination>(TSource source, TDestination destination, ResolutionContext context, IMemberMap memberMap) => _inner.Map(source, destination, context, memberMap); IQueryable<TDestination> IMapper.ProjectTo<TDestination>(IQueryable source, object parameters, params Expression<Func<TDestination, object>>[] membersToExpand) => _inner.ProjectTo(source, parameters, membersToExpand); IQueryable<TDestination> IMapper.ProjectTo<TDestination>(IQueryable source, IDictionary<string, object> parameters, params string[] membersToExpand) => _inner.ProjectTo<TDestination>(source, parameters, membersToExpand); IQueryable IMapper.ProjectTo(IQueryable source, Type destinationType, IDictionary<string, object> parameters, params string[] membersToExpand) => _inner.ProjectTo(source, destinationType, parameters, membersToExpand); internal object GetDestination(object source, Type destinationType) { InstanceCache.TryGetValue(new ContextCacheKey(source, destinationType), out object destination); return destination; } internal void CacheDestination(object source, Type destinationType, object destination) { InstanceCache[new ContextCacheKey(source, destinationType)] = destination; } internal void IncrementTypeDepth(TypePair types) { TypeDepth[types]++; } internal void DecrementTypeDepth(TypePair types) { TypeDepth[types]--; } internal int GetTypeDepth(TypePair types) { if (!TypeDepth.ContainsKey(types)) TypeDepth[types] = 1; return TypeDepth[types]; } internal void ValidateMap(TypeMap typeMap) => ConfigurationProvider.AssertConfigurationIsValid(typeMap); internal bool IsDefault => this == _inner.DefaultContext; internal TDestination Map<TSource, TDestination>(TSource source, TDestination destination, IMemberMap memberMap) => _inner.Map(source, destination, this, memberMap); internal object Map(object source, object destination, Type sourceType, Type destinationType, IMemberMap memberMap) => _inner.Map(source, destination, sourceType, destinationType, this, memberMap); private void CheckDefault() { if (IsDefault) { throw new InvalidOperationException("You must use a Map overload that takes Action<IMappingOperationOptions>!"); } } } public struct ContextCacheKey : IEquatable<ContextCacheKey> { public static bool operator ==(ContextCacheKey left, ContextCacheKey right) => left.Equals(right); public static bool operator !=(ContextCacheKey left, ContextCacheKey right) => !left.Equals(right); private readonly Type _destinationType; public ContextCacheKey(object source, Type destinationType) { Source = source; _destinationType = destinationType; } public object Source { get; } public override int GetHashCode() => HashCodeCombiner.Combine(Source, _destinationType); public bool Equals(ContextCacheKey other) => Source == other.Source && _destinationType == other._destinationType; public override bool Equals(object other) => other is ContextCacheKey && Equals((ContextCacheKey)other); } }
/* * @author Valentin Simonov / http://va.lent.in/ */ using System.Collections.Generic; using TouchScript.Utils; using UnityEngine; namespace TouchScript.Clusters { /// <summary> /// Represents a pool of points separated into two clusters. /// </summary> public sealed class Clusters { #region Constants /// <summary>The first cluster.</summary> public const int CLUSTER1 = 0; /// <summary>The second cluster.</summary> public const int CLUSTER2 = 1; #endregion #region Public properties /// <summary>Gets the total number of points in clusters represented by this object.</summary> public int PointsCount { get { return points.Count; } } /// <summary>Gets or sets minimum distance in pixels between clusters to treat them as two separate clusters.</summary> /// <value>Minimum distance in pixels which must be between cluster centers to consider them as separate clusters.</value> /// <remarks>This value is used to set the limit of how close cluster can be. Sometimes very close points shouldn't be treated as being in separate clusters.</remarks> public float MinPointsDistance { get { return minPointDistance; } set { minPointDistance = value; minPointDistanceSqr = value * value; } } /// <summary>Indicates that this cluster instance has two valid clusters.</summary> /// <value><c>true</c> if this instance has clusters; otherwise, <c>false</c>.</value> public bool HasClusters { get { if (dirty) distributePoints(); return hasClusters; } } #endregion #region Private variables private List<ITouch> points = new List<ITouch>(); private bool dirty; private List<ITouch> cluster1 = new List<ITouch>(); private List<ITouch> cluster2 = new List<ITouch>(); private float minPointDistance, minPointDistanceSqr; private bool hasClusters = false; #endregion /// <summary>Initializes a new instance of the <see cref="Clusters"/> class.</summary> public Clusters() { MinPointsDistance = 0; markDirty(); } #region Public methods /// <summary>Calculates the center position of one of the clusters.</summary> /// <param name="id">Cluster id. Either <see cref="CLUSTER1"/> or <see cref="CLUSTER2"/>.</param> /// <returns>Cluster's centroid position or <see cref="TouchManager.INVALID_POSITION"/> if cluster contains no points.</returns> public Vector2 GetCenterPosition(int id) { if (!HasClusters) return TouchManager.INVALID_POSITION; Vector2 result; switch (id) { case CLUSTER1: result = ClusterUtils.Get2DCenterPosition(cluster1); break; case CLUSTER2: result = ClusterUtils.Get2DCenterPosition(cluster2); break; default: return TouchManager.INVALID_POSITION; } return result; } /// <summary>Calculates previous center position of one of the clusters.</summary> /// <param name="id">Cluster id. Either <see cref="CLUSTER1"/> or <see cref="CLUSTER2"/>.</param> /// <returns>Cluster's centroid previous position or <see cref="TouchManager.INVALID_POSITION"/> if cluster contains no points.</returns> public Vector2 GetPreviousCenterPosition(int id) { if (!HasClusters) return TouchManager.INVALID_POSITION; Vector2 result; switch (id) { case CLUSTER1: result = ClusterUtils.GetPrevious2DCenterPosition(cluster1); break; case CLUSTER2: result = ClusterUtils.GetPrevious2DCenterPosition(cluster2); break; default: return TouchManager.INVALID_POSITION; } return result; } /// <summary>Adds a point to cluster.</summary> /// <param name="point">A point.</param> public void AddPoint(ITouch point) { if (points.Contains(point)) return; points.Add(point); markDirty(); } /// <summary>Adds a list of points to cluster.</summary> /// <param name="points">List of points.</param> public void AddPoints(IList<ITouch> points) { foreach (var point in points) AddPoint(point); } /// <summary>Removes a point from cluster.</summary> /// <param name="point">A point.</param> public void RemovePoint(ITouch point) { if (!points.Contains(point)) return; points.Remove(point); markDirty(); } /// <summary>Removes a list of points from cluster.</summary> /// <param name="points">List of points.</param> public void RemovePoints(IList<ITouch> points) { foreach (var point in points) RemovePoint(point); } /// <summary>Removes all points from cluster.</summary> public void RemoveAllPoints() { points.Clear(); markDirty(); } /// <summary>Invalidates cluster state. Call this method to recalculate cluster properties.</summary> public void Invalidate() { markDirty(); } #endregion #region Private functions private void distributePoints() { cluster1.Clear(); cluster2.Clear(); hasClusters = checkClusters(); if (!hasClusters) return; cluster1.Add(points[0]); cluster2.Add(points[1]); var total = points.Count; if (total == 2) return; var oldHash1 = ""; var oldHash2 = ""; var hash1 = "#"; var hash2 = "#"; while (oldHash1 != hash1 || oldHash2 != hash2) { var center1 = ClusterUtils.Get2DCenterPosition(cluster1); var center2 = ClusterUtils.Get2DCenterPosition(cluster2); ITouch obj1 = null; ITouch obj2 = null; // Take most distant points from cluster1 and cluster2 var maxDist1 = -float.MaxValue; var maxDist2 = -float.MaxValue; for (var i = 0; i < total; i++) { var obj = points[i]; var dist = (center1 - obj.Position).sqrMagnitude; if (dist > maxDist2) { maxDist2 = dist; obj2 = obj; } dist = (center2 - obj.Position).sqrMagnitude; if (dist > maxDist1) { maxDist1 = dist; obj1 = obj; } } // If it is the same point it means that this point is too far away from both clusters and has to be in a separate cluster if (obj1 == obj2) { center1 = (center1 + center2) * .5f; center2 = obj2.Position; } else { center1 = obj1.Position; center2 = obj2.Position; } cluster1.Clear(); cluster2.Clear(); for (var i = 0; i < total; i++) { var obj = points[i]; if ((center1 - obj.Position).sqrMagnitude < (center2 - obj.Position).sqrMagnitude) { cluster1.Add(obj); } else { cluster2.Add(obj); } } oldHash1 = hash1; oldHash2 = hash2; hash1 = ClusterUtils.GetPointsHash(cluster1); hash2 = ClusterUtils.GetPointsHash(cluster2); } markClean(); } private bool checkClusters() { var length = points.Count - 1; if (length < 1) return false; if (length == 1) { var p1 = points[0].Position; var p2 = points[1].Position; var dx = p1.x - p2.x; var dy = p1.y - p2.y; if (dx * dx + dy * dy >= minPointDistanceSqr) return true; return false; } for (var i = 0; i < length; i++) { for (var j = i + 1; j <= length; j++) { var p1 = points[i].Position; var p2 = points[j].Position; var dx = p1.x - p2.x; var dy = p1.y - p2.y; if (dx * dx + dy * dy >= minPointDistanceSqr) return true; } } return false; } private void markDirty() { dirty = true; } private void markClean() { dirty = false; } #endregion } }
using System; using fomm.Scripting; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Text; using System.Text.RegularExpressions; using System.IO; using System.Windows.Forms; using System.Globalization; using System.Collections.Generic; class Script : SkyrimBaseScript { static Version SKSE_MIN_VERSION = new Version("0.1.7.3"); // Main dialog static Form mainInstallForm; static TextBox textArea; static Button refreshButton; static Button installButton; static Button cancelButton; static string[] checkedLooseFiles = { "Scripts/SKI_ActiveEffectsWidget.pex", "Scripts/SKI_ConfigBase.pex", "Scripts/SKI_ConfigManager.pex", "Scripts/SKI_Main.pex", "Scripts/SKI_MeterWidget.pex", "Scripts/SKI_PlayerLoadGameAlias.pex", "Scripts/SKI_QuestBase.pex", "Scripts/SKI_StatusWidget.pex", "Scripts/SKI_WidgetBase.pex", "Scripts/SKI_WidgetManager.pex", "Interface/skyui_cfg.txt", "Interface/skyui_translate.txt", "Interface/bartermenu.swf", "Interface/containermenu.swf", "Interface/inventorymenu.swf", "Interface/magicmenu.swf", "Interface/skyui/inventorylists.swf", "Interface/skyui/tabbedinventorylists.swf", "Interface/skyui/skyui_icons_cat.swf", "Interface/skyui/skyui_icons_inv.swf", "Interface/skyui/skyui_icons_magic.swf" }; static bool install = false; static int problemCount = 0; static List<string> foundLooseFiles = new List<string>(); static bool noSKSE = false; static bool noSKSEScripts = false; static Version skseVersion; public static bool OnActivate() { DetectProblems(); // Detected problems? Show report. Otherwise just install if (problemCount > 0) { InitializeComponents(); mainInstallForm.ShowDialog(); } else { install = true; } if (install) PerformBasicInstall(); return install; } static void DetectProblems() { // Clean up previous data problemCount = 0; foundLooseFiles.Clear(); noSKSE = false; noSKSEScripts = false; skseVersion = new Version(); // 1. Check Loose files foreach (string file in checkedLooseFiles) if (GetExistingDataFile(file) != null) foundLooseFiles.Add(file); if (foundLooseFiles.Count > 0) problemCount++; // 2. Check if skse is present noSKSE = !ScriptExtenderPresent(); if (noSKSE) problemCount++; // 3. Check SKSE version skseVersion = GetSkseVersion(); if (skseVersion == null || skseVersion < SKSE_MIN_VERSION) problemCount++; // 4. Check missing SKSE.pex if (GetExistingDataFile("Scripts/SKSE.pex") == null) { noSKSEScripts = true; problemCount++; } } static void GenerateReport() { int c = 0; textArea.Clear(); if (problemCount == 0) { PrintReport("All problems have been resolved."); return; } PrintReport("This report informs you about potential problems with your SkyUI installation."); PrintReport(""); PrintReport("Fix these problems , then press 'Refresh' to confirm that they're gone."); PrintReport("After all problems have been resolved, you can continue with the installation."); PrintReport(""); if (foundLooseFiles.Count > 0) { c++; PrintReport("-----------"); PrintReport("Problem #" + c + ":"); PrintReport("-----------"); PrintReport("There are files in your 'Data/' folder, which override newer versions from the SkyUI.bsa archive."); PrintReport(""); PrintReport("These files are:"); foreach (string file in foundLooseFiles) PrintReport("\tData/" + file); PrintReport(""); PrintReport("Potential causes:"); PrintReport("* An old SkyUI version was not uninstalled before installing the new one."); PrintReport(""); PrintReport("Solution:"); PrintReport("1. If you have an old SkyUI version installed in NMM, uninstall it, then try again."); PrintReport("2. If there are still problematic files reported, delete them manually."); PrintReport(""); } if (noSKSE) { c++; PrintReport("-----------"); PrintReport("Problem #" + c + ":"); PrintReport("-----------"); PrintReport("The Skyrim Script Extender (SKSE) is not installed."); PrintReport(""); PrintReport("Solution:"); PrintReport("Get the latest SKSE version from 'http://skse.silverlock.org/' and install it."); } else if (skseVersion == null || skseVersion < SKSE_MIN_VERSION) { c++; PrintReport("-----------"); PrintReport("Problem #" + c + ":"); PrintReport("-----------"); PrintReport("Your SKSE version is too old."); PrintReport(""); PrintReport("Detected version: " + skseVersion); PrintReport("Required version: " + SKSE_MIN_VERSION + " (or newer)"); PrintReport(""); PrintReport("Solution:"); PrintReport("Get the latest SKSE version from 'http://skse.silverlock.org/' and install it."); } else if (noSKSEScripts) { c++; PrintReport("-----------"); PrintReport("Problem #" + c + ":"); PrintReport("-----------"); PrintReport("The SKSE scripts are missing."); PrintReport(""); PrintReport("Potential causes:"); PrintReport("* You didn't install the scripts with the rest of SKSE."); PrintReport(""); PrintReport("Solution:"); PrintReport("Get the latest SKSE version from 'http://skse.silverlock.org/' and install it."); } } static void PrintReport(string line) { textArea.AppendText(line + "\n"); } static void InitializeComponents() { textArea = new System.Windows.Forms.TextBox(); refreshButton = new System.Windows.Forms.Button(); installButton = new System.Windows.Forms.Button(); cancelButton = new System.Windows.Forms.Button(); // // textArea // textArea.BackColor = System.Drawing.SystemColors.ControlLightLight; textArea.Location = new System.Drawing.Point(12, 12); textArea.Font = new System.Drawing.Font("Courier New", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); textArea.Multiline = true; textArea.Name = "textArea"; textArea.ReadOnly = true; textArea.ScrollBars = System.Windows.Forms.ScrollBars.Both; textArea.Size = new System.Drawing.Size(610, 439); textArea.TabIndex = 0; // // refreshButton // refreshButton.Location = new System.Drawing.Point(280, 457); refreshButton.Name = "refreshButton"; refreshButton.Size = new System.Drawing.Size(75, 23); refreshButton.TabIndex = 1; refreshButton.Text = "Refresh"; refreshButton.UseVisualStyleBackColor = true; refreshButton.Click += new System.EventHandler(refreshButton_Click); // // installButton // installButton.Location = new System.Drawing.Point(547, 457); installButton.Name = "installButton"; installButton.Size = new System.Drawing.Size(75, 23); installButton.TabIndex = 2; installButton.Text = "Install"; installButton.UseVisualStyleBackColor = true; installButton.Click += new System.EventHandler(installButton_Click); // // cancelButton // cancelButton.Location = new System.Drawing.Point(12, 457); cancelButton.Name = "cancelButton"; cancelButton.Size = new System.Drawing.Size(75, 23); cancelButton.TabIndex = 3; cancelButton.Text = "Cancel"; cancelButton.UseVisualStyleBackColor = true; cancelButton.Click += new System.EventHandler(cancelButton_Click); // // mainInstallForm // mainInstallForm = CreateCustomForm(); mainInstallForm.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); mainInstallForm.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; mainInstallForm.ClientSize = new System.Drawing.Size(634, 492); mainInstallForm.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; mainInstallForm.MaximizeBox = false; mainInstallForm.MinimizeBox = false; mainInstallForm.Controls.Add(installButton); mainInstallForm.Controls.Add(refreshButton); mainInstallForm.Controls.Add(cancelButton); mainInstallForm.Controls.Add(textArea); mainInstallForm.Name = "mainInstallForm"; mainInstallForm.Text = "SkyUI Problem Report"; mainInstallForm.Load += new System.EventHandler(mainInstallForm_Load); } static void installButton_Click(object sender, EventArgs e) { install = true; mainInstallForm.Close(); } static void refreshButton_Click(object sender, EventArgs e) { DetectProblems(); GenerateReport(); } static void cancelButton_Click(object sender, EventArgs e) { install = false; mainInstallForm.Close(); } static void mainInstallForm_Load(object sender, EventArgs e) { GenerateReport(); } static bool IsPluginActive(String pluginName) { string[] loadOrder = GetActivePlugins(); for (int i = 0; i < loadOrder.Length; ++i) { if (loadOrder[i].Equals(pluginName, StringComparison.InvariantCultureIgnoreCase)) { return true; } } return false; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Drawing.Printing { using System.Diagnostics; using System.Runtime.InteropServices; using System.Security; /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController"]/*' /> /// <devdoc> /// <para>Controls how a document is printed.</para> /// </devdoc> public abstract class PrintController { // DEVMODEs are pretty expensive, so we cache one here and share it with the // Standard and Preview print controllers. If it weren't for all the rules about API changes, // I'd consider making this protected. #region SafeDeviceModeHandle Class /// <summary> /// Represents a SafeHandle for a Printer's Device Mode struct handle (DEVMODE) /// </summary> /// <SecurityNote> /// Critical: base class SafeHandle is critical /// </SecurityNote> [SecurityCritical] internal sealed class SafeDeviceModeHandle : SafeHandle { // This constructor is used by the P/Invoke marshaling layer // to allocate a SafeHandle instance. P/Invoke then does the // appropriate method call, storing the handle in this class. private SafeDeviceModeHandle() : base(IntPtr.Zero, true) { return; } internal SafeDeviceModeHandle(IntPtr handle) : base(IntPtr.Zero, true) // "true" means "owns the handle" { SetHandle(handle); } public override bool IsInvalid { get { return handle == IntPtr.Zero; } } // Specifies how to free the handle. // The boolean returned should be true for success and false if the runtime // should fire a SafeHandleCriticalFailure MDA (CustomerDebugProbe) if that // MDA is enabled. [SecurityCritical] protected override bool ReleaseHandle() { if (!IsInvalid) { SafeNativeMethods.GlobalFree(new HandleRef(this, handle)); } handle = IntPtr.Zero; return true; } public static implicit operator IntPtr(SafeDeviceModeHandle handle) { return (handle == null) ? IntPtr.Zero : handle.handle; } public static explicit operator SafeDeviceModeHandle(IntPtr handle) { return new SafeDeviceModeHandle(handle); } } #endregion internal SafeDeviceModeHandle modeHandle = null; /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.PrintController"]/*' /> /// <devdoc> /// <para> /// Initializes a new instance of the <see cref='System.Drawing.Printing.PrintController'/> class. /// </para> /// </devdoc> protected PrintController() { } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.IsPreview"]/*' /> /// <devdoc> /// <para> /// This is new public property which notifies if this controller is used for PrintPreview. /// </para> /// </devdoc> public virtual bool IsPreview { get { return false; } } // WARNING: if you have nested PrintControllers, this method won't get called on the inner one. // Add initialization code to StartPrint or StartPage instead. internal void Print(PrintDocument document) { // // Get the PrintAction for this event PrintAction printAction; if (IsPreview) { printAction = PrintAction.PrintToPreview; } else { printAction = document.PrinterSettings.PrintToFile ? PrintAction.PrintToFile : PrintAction.PrintToPrinter; } // Check that user has permission to print to this particular printer PrintEventArgs printEvent = new PrintEventArgs(printAction); document._OnBeginPrint(printEvent); if (printEvent.Cancel) { document._OnEndPrint(printEvent); return; } OnStartPrint(document, printEvent); if (printEvent.Cancel) { document._OnEndPrint(printEvent); OnEndPrint(document, printEvent); return; } bool canceled = true; try { // To enable optimization of the preview dialog, add the following to the config file: // <runtime > // <!-- AppContextSwitchOverrides values are in the form of 'key1=true|false;key2=true|false --> // <AppContextSwitchOverrides value = "Switch.System.Drawing.Printing.OptimizePrintPreview=true" /> // </runtime > canceled = LocalAppContextSwitches.OptimizePrintPreview ? PrintLoopOptimized(document) : PrintLoop(document); } finally { try { document._OnEndPrint(printEvent); printEvent.Cancel = canceled | printEvent.Cancel; } finally { OnEndPrint(document, printEvent); } } } // Returns true if print was aborted. // WARNING: if you have nested PrintControllers, this method won't get called on the inner one // Add initialization code to StartPrint or StartPage instead. private bool PrintLoop(PrintDocument document) { QueryPageSettingsEventArgs queryEvent = new QueryPageSettingsEventArgs((PageSettings)document.DefaultPageSettings.Clone()); for (;;) { document._OnQueryPageSettings(queryEvent); if (queryEvent.Cancel) { return true; } PrintPageEventArgs pageEvent = CreatePrintPageEvent(queryEvent.PageSettings); Graphics graphics = OnStartPage(document, pageEvent); pageEvent.SetGraphics(graphics); try { document._OnPrintPage(pageEvent); OnEndPage(document, pageEvent); } finally { pageEvent.Dispose(); } if (pageEvent.Cancel) { return true; } else if (!pageEvent.HasMorePages) { return false; } else { // loop } } } private bool PrintLoopOptimized(PrintDocument document) { PrintPageEventArgs pageEvent = null; PageSettings documentPageSettings = (PageSettings)document.DefaultPageSettings.Clone(); QueryPageSettingsEventArgs queryEvent = new QueryPageSettingsEventArgs(documentPageSettings); for (;;) { queryEvent.PageSettingsChanged = false; document._OnQueryPageSettings(queryEvent); if (queryEvent.Cancel) { return true; } if (!queryEvent.PageSettingsChanged) { // QueryPageSettings event handler did not change the page settings, // thus we use default page settings from the document object. if (pageEvent == null) { pageEvent = CreatePrintPageEvent(queryEvent.PageSettings); } else { // This is not the first page and the settings had not changed since the previous page, // thus don't re-apply them. pageEvent.CopySettingsToDevMode = false; } Graphics graphics = OnStartPage(document, pageEvent); pageEvent.SetGraphics(graphics); } else { // Page settings were customized, so use the customized ones in the start page event. pageEvent = CreatePrintPageEvent(queryEvent.PageSettings); Graphics graphics = OnStartPage(document, pageEvent); pageEvent.SetGraphics(graphics); } try { document._OnPrintPage(pageEvent); OnEndPage(document, pageEvent); } finally { pageEvent.Graphics.Dispose(); pageEvent.SetGraphics(null); } if (pageEvent.Cancel) { return true; } else if (!pageEvent.HasMorePages) { return false; } } } private PrintPageEventArgs CreatePrintPageEvent(PageSettings pageSettings) { Debug.Assert((modeHandle != null), "modeHandle is null. Someone must have forgot to call base.StartPrint"); Rectangle pageBounds = pageSettings.GetBounds(modeHandle); Rectangle marginBounds = new Rectangle(pageSettings.Margins.Left, pageSettings.Margins.Top, pageBounds.Width - (pageSettings.Margins.Left + pageSettings.Margins.Right), pageBounds.Height - (pageSettings.Margins.Top + pageSettings.Margins.Bottom)); PrintPageEventArgs pageEvent = new PrintPageEventArgs(null, marginBounds, pageBounds, pageSettings); return pageEvent; } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnStartPrint"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, begins the control sequence of when and how to print a document.</para> /// </devdoc> public virtual void OnStartPrint(PrintDocument document, PrintEventArgs e) { modeHandle = (SafeDeviceModeHandle)document.PrinterSettings.GetHdevmode(document.DefaultPageSettings); } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnStartPage"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, begins the control /// sequence of when and how to print a page in a document.</para> /// </devdoc> public virtual Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e) { return null; } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnEndPage"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, completes the control sequence of when and how /// to print a page in a document.</para> /// </devdoc> public virtual void OnEndPage(PrintDocument document, PrintPageEventArgs e) { } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnEndPrint"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, completes the /// control sequence of when and how to print a document.</para> /// </devdoc> public virtual void OnEndPrint(PrintDocument document, PrintEventArgs e) { Debug.Assert((modeHandle != null), "modeHandle is null. Someone must have forgot to call base.StartPrint"); if (modeHandle != null) { modeHandle.Close(); } } } }
using J2N.Collections.Generic.Extensions; using J2N.Text; using Antlr.Runtime; using Antlr.Runtime.Tree; using Lucene.Net.Queries.Function; using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Reflection; using System.Reflection.Emit; using JCG = J2N.Collections.Generic; using J2N; #if NETSTANDARD using System.IO; #else using System.Configuration; #endif namespace Lucene.Net.Expressions.JS { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary>An expression compiler for javascript expressions.</summary> /// <remarks> /// An expression compiler for javascript expressions. /// <para/> /// Example: /// <code> /// Expression foo = JavascriptCompiler.Compile("((0.3*popularity)/10.0)+(0.7*score)"); /// </code> /// <para/> /// See the <see cref="Lucene.Net.Expressions.JS">package documentation</see> for /// the supported syntax and default functions. /// <para> /// You can compile with an alternate set of functions via <see cref="Compile(string, IDictionary{string, MethodInfo})"/>. /// For example: /// <code> /// IDictionary&lt;string, MethodInfo&gt; functions = new Dictionary&lt;string, MethodInfo&gt;(); /// // add all the default functions /// functions.PutAll(JavascriptCompiler.DEFAULT_FUNCTIONS); /// // add sqrt() /// functions.Put("sqrt", (typeof(Math)).GetMethod("Sqrt", new Type[] { typeof(double) })); /// // call compile with customized function map /// Expression foo = JavascriptCompiler.Compile("sqrt(score)+ln(popularity)", functions); /// </code> /// </para> /// @lucene.experimental /// </remarks> public class JavascriptCompiler { private static readonly string COMPILED_EXPRESSION_CLASS = typeof(Expression).Namespace + ".CompiledExpression"; private static readonly string COMPILED_EXPRESSION_INTERNAL = COMPILED_EXPRESSION_CLASS.Replace('.', '/'); private static readonly Type EXPRESSION_TYPE = Type.GetType(typeof(Expression).FullName); private static readonly Type FUNCTION_VALUES_TYPE = typeof(FunctionValues); private static readonly ConstructorInfo EXPRESSION_CTOR = typeof(Expression). GetConstructor(new Type[] { typeof(string), typeof(string[]) }); private static readonly MethodInfo EVALUATE_METHOD = GetMethod(EXPRESSION_TYPE, "Evaluate", new[] { typeof(int), typeof(FunctionValues[]) }); private static readonly MethodInfo DOUBLE_VAL_METHOD = GetMethod(FUNCTION_VALUES_TYPE, "DoubleVal", new[] { typeof(int) }); // We use the same class name for all generated classes as they all have their own class loader. // The source code is displayed as "source file name" in stack trace. // to work around import clash: private static MethodInfo GetMethod(Type type, string method, Type[] parms) { return type.GetMethod(method, parms); } private readonly string sourceText; private readonly IDictionary<string, int> externalsMap = new JCG.LinkedDictionary<string, int>(); private TypeBuilder dynamicType; private readonly IDictionary<string, MethodInfo> functions; private ILGenerator gen; private AssemblyBuilder asmBuilder; private MethodBuilder evalMethod; private ModuleBuilder modBuilder; // This maximum length is theoretically 65535 bytes, but as its CESU-8 encoded we dont know how large it is in bytes, so be safe // rcmuir: "If your ranking function is that large you need to check yourself into a mental institution!" /// <summary>Compiles the given expression.</summary> /// <param name="sourceText">The expression to compile</param> /// <returns>A new compiled expression</returns> /// <exception cref="ParseException">on failure to compile</exception> // LUCENENET TODO: ParseException not being thrown here - need to check // where this is thrown in Java and throw the equivalent in .NET public static Expression Compile(string sourceText) { return new JavascriptCompiler(sourceText).CompileExpression(); } /// <summary>Compiles the given expression with the supplied custom functions.</summary> /// <remarks> /// Compiles the given expression with the supplied custom functions. /// <para/> /// Functions must be <c>public static</c>, return <see cref="double"/> and /// can take from zero to 256 <see cref="double"/> parameters. /// </remarks> /// <param name="sourceText">The expression to compile</param> /// <param name="functions">map of <see cref="string"/> names to functions</param> /// <returns>A new compiled expression</returns> /// <exception cref="ParseException">on failure to compile</exception> public static Expression Compile(string sourceText, IDictionary<string, MethodInfo> functions) { foreach (MethodInfo m in functions.Values) { CheckFunction(m); } return new JavascriptCompiler(sourceText, functions).CompileExpression(); } /// <summary>This method is unused, it is just here to make sure that the function signatures don't change.</summary> /// <remarks> /// This method is unused, it is just here to make sure that the function signatures don't change. /// If this method fails to compile, you also have to change the byte code generator to correctly /// use the <see cref="FunctionValues"/> class. /// </remarks> private static void UnusedTestCompile() { FunctionValues f = null; double ret = f.DoubleVal(2); } /// <summary>Constructs a compiler for expressions.</summary> /// <param name="sourceText">The expression to compile</param> private JavascriptCompiler(string sourceText) : this(sourceText, DEFAULT_FUNCTIONS) { } /// <summary>Constructs a compiler for expressions with specific set of functions</summary> /// <param name="sourceText">The expression to compile</param> /// <param name="functions">The set of functions to compile with</param> private JavascriptCompiler(string sourceText, IDictionary<string, MethodInfo> functions) { if (sourceText == null) { throw new ArgumentNullException(); } this.sourceText = sourceText; this.functions = functions; } /// <summary>Compiles the given expression with the specified parent classloader</summary> /// <returns>A new compiled expression</returns> /// <exception cref="ParseException">on failure to compile</exception> private Expression CompileExpression() { try { ITree antlrTree = GetAntlrComputedExpressionTree(); BeginCompile(); RecursiveCompile(antlrTree, typeof(double)); EndCompile(); return (Expression) Activator.CreateInstance(dynamicType.CreateTypeInfo().AsType(), sourceText, externalsMap.Keys.ToArray()); } catch (MemberAccessException exception) { throw new InvalidOperationException("An internal error occurred attempting to compile the expression (" + sourceText + ").", exception); } catch (TargetInvocationException exception) { throw new InvalidOperationException("An internal error occurred attempting to compile the expression (" + sourceText + ").", exception); } } private void BeginCompile() { var assemblyName = new AssemblyName("Lucene.Net.Expressions.Dynamic" + new Random().Next()); asmBuilder = AssemblyBuilder.DefineDynamicAssembly(assemblyName, AssemblyBuilderAccess.RunAndCollect); modBuilder = asmBuilder.DefineDynamicModule(assemblyName.Name + ".dll"); dynamicType = modBuilder.DefineType(COMPILED_EXPRESSION_CLASS, TypeAttributes.AnsiClass | TypeAttributes.AutoClass | TypeAttributes.Public | TypeAttributes.Class | TypeAttributes.BeforeFieldInit | TypeAttributes.AutoLayout, EXPRESSION_TYPE); ConstructorBuilder constructorBuilder = dynamicType.DefineConstructor(MethodAttributes.Public, CallingConventions.HasThis, new[] { typeof(string), typeof(string[]) }); ILGenerator ctorGen = constructorBuilder.GetILGenerator(); ctorGen.Emit(OpCodes.Ldarg_0); ctorGen.Emit(OpCodes.Ldarg_1); ctorGen.Emit(OpCodes.Ldarg_2); ctorGen.Emit(OpCodes.Call, EXPRESSION_CTOR); ctorGen.Emit(OpCodes.Nop); ctorGen.Emit(OpCodes.Nop); ctorGen.Emit(OpCodes.Ret); evalMethod = dynamicType.DefineMethod("Evaluate", MethodAttributes.Public | MethodAttributes.Virtual, typeof(double), new[] { typeof(int), typeof(FunctionValues[]) }); gen = evalMethod.GetILGenerator(); } private void RecursiveCompile(ITree current, Type expected) { int type = current.Type; string text = current.Text; switch (type) { case JavascriptParser.AT_CALL: { ITree identifier = current.GetChild(0); string call = identifier.Text; int arguments = current.ChildCount - 1; MethodInfo method; if (!functions.TryGetValue(call, out method) || method == null) { throw new ArgumentException("Unrecognized method call (" + call + ")."); } int arity = method.GetParameters().Length; if (arguments != arity) { throw new ArgumentException("Expected (" + arity + ") arguments for method call (" + call + "), but found (" + arguments + ")."); } for (int argument = 1; argument <= arguments; ++argument) { RecursiveCompile(current.GetChild(argument), typeof(double)); } gen.Emit(OpCodes.Call, method); break; } case JavascriptParser.NAMESPACE_ID: { if (!externalsMap.TryGetValue(text, out int index)) { externalsMap[text] = index = externalsMap.Count; } gen.Emit(OpCodes.Nop); gen.Emit(OpCodes.Ldarg_2); gen.Emit(OpCodes.Ldc_I4, index); gen.Emit(OpCodes.Ldelem_Ref); gen.Emit(OpCodes.Ldarg_1); gen.Emit(OpCodes.Callvirt, DOUBLE_VAL_METHOD); break; } case JavascriptParser.HEX: { PushInt64(Convert.ToInt64(text, 16)); break; } case JavascriptParser.OCTAL: { PushInt64(Convert.ToInt64(text, 8)); break; } case JavascriptParser.DECIMAL: { //.NET Port. This is a bit hack-y but was needed since .NET can't perform bitwise ops on longs & doubles var bitwiseOps = new[]{ ">>","<<","&","~","|","^"}; if (bitwiseOps.Any(s => sourceText.Contains(s))) { int val; if (int.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out val)) { gen.Emit(OpCodes.Ldc_I4, val); } else { gen.Emit(OpCodes.Ldc_I8,long.Parse(text, CultureInfo.InvariantCulture)); gen.Emit(OpCodes.Conv_Ovf_U4_Un); } } else { gen.Emit(OpCodes.Ldc_R8, double.Parse(text, CultureInfo.InvariantCulture)); } break; } case JavascriptParser.AT_NEGATE: { RecursiveCompile(current.GetChild(0), typeof(double)); gen.Emit(OpCodes.Neg); break; } case JavascriptParser.AT_ADD: { PushArith(OpCodes.Add, current, expected); break; } case JavascriptParser.AT_SUBTRACT: { PushArith(OpCodes.Sub, current, expected); break; } case JavascriptParser.AT_MULTIPLY: { PushArith(OpCodes.Mul, current, expected); break; } case JavascriptParser.AT_DIVIDE: { PushArith(OpCodes.Div, current, expected); break; } case JavascriptParser.AT_MODULO: { PushArith(OpCodes.Rem, current, expected); break; } case JavascriptParser.AT_BIT_SHL: { PushShift(OpCodes.Shl, current); break; } case JavascriptParser.AT_BIT_SHR: { PushShift(OpCodes.Shr, current); break; } case JavascriptParser.AT_BIT_SHU: { PushShift(OpCodes.Shr_Un, current); break; } case JavascriptParser.AT_BIT_AND: { PushBitwise(OpCodes.And, current); break; } case JavascriptParser.AT_BIT_OR: { PushBitwise(OpCodes.Or, current); break; } case JavascriptParser.AT_BIT_XOR: { PushBitwise(OpCodes.Xor, current); break; } case JavascriptParser.AT_BIT_NOT: { RecursiveCompile(current.GetChild(0), typeof(long)); gen.Emit(OpCodes.Not); gen.Emit(OpCodes.Conv_R8); break; } case JavascriptParser.AT_COMP_EQ: { PushCond(OpCodes.Ceq, current, expected); break; } case JavascriptParser.AT_COMP_NEQ: { PushCondEq(OpCodes.Ceq, current, expected); break; } case JavascriptParser.AT_COMP_LT: { PushCond(OpCodes.Clt, current, expected); break; } case JavascriptParser.AT_COMP_GT: { PushCond(OpCodes.Cgt, current, expected); break; } case JavascriptParser.AT_COMP_LTE: { PushCondEq(OpCodes.Cgt, current, expected); break; } case JavascriptParser.AT_COMP_GTE: { PushCondEq(OpCodes.Clt, current, expected); break; } case JavascriptParser.AT_BOOL_NOT: { RecursiveCompile(current.GetChild(0), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Conv_R8); break; } case JavascriptParser.AT_BOOL_AND: { RecursiveCompile(current.GetChild(0), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); RecursiveCompile(current.GetChild(1), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Or); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Conv_R8); break; } case JavascriptParser.AT_BOOL_OR: { RecursiveCompile(current.GetChild(0), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Ldc_I4_1); gen.Emit(OpCodes.Xor); RecursiveCompile(current.GetChild(1), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Ldc_I4_1); gen.Emit(OpCodes.Xor); gen.Emit(OpCodes.Or); gen.Emit(OpCodes.Ldc_I4_1); gen.Emit(OpCodes.Ceq); gen.Emit(OpCodes.Conv_R8); break; } case JavascriptParser.AT_COND_QUE: { Label condFalse = gen.DefineLabel(); Label condEnd = gen.DefineLabel(); RecursiveCompile(current.GetChild(0), typeof(int)); gen.Emit(OpCodes.Ldc_I4_0); gen.Emit(OpCodes.Beq,condFalse); RecursiveCompile(current.GetChild(1), expected); gen.Emit(OpCodes.Br_S,condEnd); gen.MarkLabel(condFalse); RecursiveCompile(current.GetChild(2), expected); gen.MarkLabel(condEnd); break; } default: { throw new InvalidOperationException("Unknown operation specified: (" + current.Text + ")."); } } } private void PushCondEq(OpCode opCode, ITree current, Type expected) { RecursiveCompile(current.GetChild(0), expected); RecursiveCompile(current.GetChild(1), expected); gen.Emit(opCode); gen.Emit(OpCodes.Ldc_I4_1); gen.Emit(OpCodes.Xor); gen.Emit(OpCodes.Conv_R8); } private void PushArith(OpCode op, ITree current, Type expected) { PushBinaryOp(op, current, typeof(double), typeof(double)); } private void PushShift(OpCode op, ITree current) { PushBinaryShiftOp(op, current, typeof(int), typeof(int)); } private void PushBinaryShiftOp(OpCode op, ITree current, Type arg1, Type arg2) { gen.Emit(OpCodes.Nop); RecursiveCompile(current.GetChild(0), arg1); RecursiveCompile(current.GetChild(1), arg2); gen.Emit(op); gen.Emit(OpCodes.Conv_R8); } private void PushBitwise(OpCode op, ITree current) { PushBinaryOp(op, current, typeof(long), typeof(long)); } private void PushBinaryOp(OpCode op, ITree current, Type arg1, Type arg2) { gen.Emit(OpCodes.Nop); RecursiveCompile(current.GetChild(0), arg1); RecursiveCompile(current.GetChild(1), arg2); gen.Emit(op); gen.Emit(OpCodes.Conv_R8); } private void PushCond(OpCode opCode, ITree current, Type expected) { RecursiveCompile(current.GetChild(0), expected); RecursiveCompile(current.GetChild(1), expected); gen.Emit(opCode); gen.Emit(OpCodes.Conv_R8); } /// <summary> /// NOTE: This was pushLong() in Lucene /// </summary> private void PushInt64(long i) { gen.Emit(OpCodes.Ldc_I8,i); if (!sourceText.Contains("<<")) { gen.Emit(OpCodes.Conv_R8); } } private void EndCompile() { gen.Emit(OpCodes.Ret); dynamicType.DefineMethodOverride(evalMethod, EVALUATE_METHOD); } private ITree GetAntlrComputedExpressionTree() { ICharStream input = new ANTLRStringStream(sourceText); JavascriptLexer lexer = new JavascriptLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavascriptParser parser = new JavascriptParser(tokens); try { return parser.Expression().Tree; } catch (RecognitionException re) { throw new ArgumentException(re.Message, re); } } /// <summary>The default set of functions available to expressions.</summary> /// <remarks> /// The default set of functions available to expressions. /// <para/> /// See the <see cref="Lucene.Net.Expressions.JS">package documentation</see> for a list. /// </remarks> public static readonly IDictionary<string, MethodInfo> DEFAULT_FUNCTIONS = LoadDefaultFunctions(); private static IDictionary<string, MethodInfo> LoadDefaultFunctions() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { IDictionary<string, MethodInfo> map = new Dictionary<string, MethodInfo>(); try { foreach (var property in GetDefaultSettings()) { string[] vals = property.Value.Split(',').TrimEnd(); if (vals.Length != 3) { throw new Exception("Error reading Javascript functions from settings"); } string typeName = vals[0]; Type clazz; if (vals[0].Contains("Lucene.Net")) { clazz = GetType(vals[0] + ", Lucene.Net"); } else { clazz = GetType(typeName); } string methodName = vals[1].Trim(); int arity = int.Parse(vals[2], CultureInfo.InvariantCulture); Type[] args = new Type[arity]; Arrays.Fill(args, typeof(double)); MethodInfo method = clazz.GetMethod(methodName, args); CheckFunction(method); map[property.Key] = method; } } catch (Exception e) { throw new Exception("Cannot resolve function", e); } return map.AsReadOnly(); } private static Type GetType(string typeName) { try { return Type.GetType(typeName, true); } catch { return null; } } private static IDictionary<string, string> GetDefaultSettings() { #if NETSTANDARD var settings = new Dictionary<string, string>(); var type = typeof(JavascriptCompiler); using (var reader = new StreamReader(type.FindAndGetManifestResourceStream(type.Name + ".properties"))) { string line; while(!string.IsNullOrWhiteSpace(line = reader.ReadLine())) { if (line.StartsWith("#", StringComparison.Ordinal) || !line.Contains('=')) { continue; } var parts = line.Split('=').Select(x => x.Trim()).ToArray(); settings[parts[0]] = parts[1]; } } return settings; #else var props = Properties.Settings.Default; return props.Properties .Cast<SettingsProperty>() .ToDictionary(key => key.Name, value => props[value.Name].ToString()); #endif } private static void CheckFunction(MethodInfo method) { // do some checks if the signature is "compatible": if (!(method.IsStatic)) { throw new ArgumentException(method + " is not static."); } if (!(method.IsPublic)) { throw new ArgumentException(method + " is not public."); } if (!method.DeclaringType.IsPublic) { //.NET Port. Inner class is being returned as not public even when declared public if (method.DeclaringType.IsNestedAssembly) { throw new ArgumentException(method.DeclaringType.FullName + " is not public."); } } if (method.GetParameters().Any(parmType => parmType.ParameterType != (typeof(double)))) { throw new ArgumentException(method + " must take only double parameters"); } if (method.ReturnType != typeof(double)) { throw new ArgumentException(method + " does not return a double."); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsBodyComplex { using Microsoft.Rest; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// Array operations. /// </summary> public partial class Array : IServiceOperations<AutoRestComplexTestService>, IArray { /// <summary> /// Initializes a new instance of the Array class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public Array(AutoRestComplexTestService client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the AutoRestComplexTestService /// </summary> public AutoRestComplexTestService Client { get; private set; } /// <summary> /// Get complex types with array property /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<ArrayWrapper>> GetValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetValid", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/array/valid").ToString(); // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<ArrayWrapper>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<ArrayWrapper>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Put complex types with array property /// </summary> /// <param name='array'> /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse> PutValidWithHttpMessagesAsync(IList<string> array = default(IList<string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { ArrayWrapper complexBody = new ArrayWrapper(); if (array != null) { complexBody.Array = array; } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("complexBody", complexBody); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "PutValid", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/array/valid").ToString(); // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(complexBody != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(complexBody, Client.SerializationSettings); _httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Get complex types with array property which is empty /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<ArrayWrapper>> GetEmptyWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetEmpty", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/array/empty").ToString(); // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<ArrayWrapper>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<ArrayWrapper>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Put complex types with array property which is empty /// </summary> /// <param name='array'> /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse> PutEmptyWithHttpMessagesAsync(IList<string> array = default(IList<string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { ArrayWrapper complexBody = new ArrayWrapper(); if (array != null) { complexBody.Array = array; } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("complexBody", complexBody); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "PutEmpty", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/array/empty").ToString(); // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(complexBody != null) { _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(complexBody, Client.SerializationSettings); _httpRequest.Content = new System.Net.Http.StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Get complex types with array property while server doesn't provide a /// response payload /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<ArrayWrapper>> GetNotProvidedWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetNotProvided", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "complex/array/notprovided").ToString(); // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<ArrayWrapper>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<ArrayWrapper>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using MySqlConnector; using System; using System.Collections.Generic; using System.Data; using System.Data.Common; using System.Threading.Tasks; using Telegram.Bot.Types; using Telegram.Bot.Types.Enums; namespace MagicNewCardsBot { public class Database { private static String _connectionString; public static readonly Int32 MAX_CARDS = 200; public enum CardStatus { NotFound = 0, Complete = 1, WithoutRarity = 2, NotSent = 3 } public static void SetConnectionString(String connectionString) { _connectionString = connectionString; } #region Update Methods async public static Task UpdateIsSentAsync(Card card, Boolean isSent) { using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"UPDATE ScryfallCard SET IsCardSent = @IsCardSent WHERE FullUrlWebSite = @FullUrlWebSite"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FullUrlWebSite", DbType = DbType.StringFixedLength, Value = card.FullUrlWebSite, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@IsCardSent", DbType = DbType.Boolean, Value = isSent, }); await cmd.ExecuteNonQueryAsync(); } if (conn.State == ConnectionState.Open) { conn.Close(); } } async public static Task UpdateHasRarityAsync(Card card, Boolean hasRarity) { using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"UPDATE ScryfallCard SET HasRarity = @HasRarity WHERE FullUrlWebSite = @FullUrlWebSite"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FullUrlWebSite", DbType = DbType.StringFixedLength, Value = card.FullUrlWebSite, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@HasRarity", DbType = DbType.Boolean, Value = hasRarity, }); await cmd.ExecuteNonQueryAsync(); } if (conn.State == ConnectionState.Open) { conn.Close(); } } async public static Task UpdateWantedRaritiesForChatAsync(Chat chat, string wantedRarities) { using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"UPDATE Chat SET WantedRarities = @WantedRarities WHERE ChatId = @ChatId"; if (wantedRarities == "ALL") { cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@WantedRarities", DbType = DbType.StringFixedLength, Value = DBNull.Value, }); } else { cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@WantedRarities", DbType = DbType.String, Value = wantedRarities, }); } cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@ChatId", DbType = DbType.Int64, Value = chat.Id, }); await cmd.ExecuteNonQueryAsync(); } if (conn.State == ConnectionState.Open) { conn.Close(); } } #endregion #region Is In Methods async public static Task<Boolean> IsExtraSideInDatabase(Card mainCard, Boolean isSent) { using MySqlConnection conn = new(_connectionString); Int64 count = -1; if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } foreach (Card card in mainCard.ExtraSides) { if (!string.IsNullOrWhiteSpace(card.FullUrlWebSite)) { using MySqlCommand cmd = conn.CreateCommand(); cmd.CommandText = @"SELECT count(1) FROM ScryfallCard WHERE FullUrlWebSite = @FullUrlWebSite AND IsCardSent = @IsCardSent AND Date > @Date"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FullUrlWebSite", DbType = DbType.StringFixedLength, Value = card.FullUrlWebSite, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@IsCardSent", DbType = DbType.Boolean, Value = isSent, }); //dominaria workaround cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Date", MySqlDbType = MySqlDbType.DateTime, Value = new DateTime(2018, 03, 11, 0, 0, 0), //the day that scryfall sent all the new card }); using DbDataReader reader = await cmd.ExecuteReaderAsync(); while (await reader.ReadAsync()) { count = await reader.GetFieldValueAsync<Int64>(0); if (count > 0) return true; } } } if (conn.State == ConnectionState.Open) { conn.Close(); } return false; } async public static Task<CardStatus> GetCardStatus(Card card) { using MySqlConnection conn = new(_connectionString); int? hasRarity = null; int? isSent = null; if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using MySqlCommand cmd = conn.CreateCommand(); cmd.CommandText = @"SELECT HasRarity, IsCardSent FROM ScryfallCard WHERE FullUrlWebSite = @FullUrlWebSite AND Date > @Date"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FullUrlWebSite", DbType = DbType.StringFixedLength, Value = card.FullUrlWebSite, }); //dominaria workaround cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Date", MySqlDbType = MySqlDbType.DateTime, Value = new DateTime(2018, 03, 11, 0, 0, 0), //the day that scryfall sent all the new card }); using (DbDataReader reader = await cmd.ExecuteReaderAsync()) { while (await reader.ReadAsync()) { hasRarity = await reader.GetFieldValueAsync<int>(0); isSent = await reader.GetFieldValueAsync<int>(1); } } if (conn.State == ConnectionState.Open) { conn.Close(); } if(isSent.HasValue && isSent.Value == 0) { return CardStatus.NotSent; } if (hasRarity.HasValue) { if (hasRarity.Value == 1) { return CardStatus.Complete; } else { return CardStatus.WithoutRarity; } } return CardStatus.NotFound; } async public static Task<Boolean> ChatExistsAsync(Chat chat) { using MySqlConnection conn = new(_connectionString); Int64 count = -1; if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using MySqlCommand cmd = conn.CreateCommand(); cmd.CommandText = @"SELECT count(1) FROM Chat WHERE ChatId = @ChatId"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@ChatId", DbType = DbType.Int64, Value = chat.Id, }); using (DbDataReader reader = await cmd.ExecuteReaderAsync()) { while (await reader.ReadAsync()) { count = await reader.GetFieldValueAsync<Int64>(0); } } if (conn.State == ConnectionState.Open) { conn.Close(); } return count > 0; } #endregion #region Get Methods async public static Task<List<Set>> GetAllCrawlableSetsAsync() { using MySqlConnection conn = new(_connectionString); List<Set> retList = new(); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"SELECT SetID, ifNull(SetURL,''), ifNull(SetName,'') FROM Sets WHERE ShouldCrawl = 1 ORDER BY SetID desc"; using DbDataReader reader = await cmd.ExecuteReaderAsync(); while (await reader.ReadAsync()) { Set set = new() { ID = await reader.GetFieldValueAsync<Int64>(0), URL = await reader.GetFieldValueAsync<String>(1), Name = await reader.GetFieldValueAsync<String>(2), }; retList.Add(set); } } if (conn.State == ConnectionState.Open) { conn.Close(); } return retList; } async public static IAsyncEnumerable<Chat> GetChatsAsync(string wantedRarities = null) { using MySqlConnection conn = new(_connectionString); List<Chat> retList = new(); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"SELECT ChatId, ifNull(Title,''), ifNull(FirstName,''), ifNull(Type,'') FROM Chat WHERE IsBlocked = 0 "; if (!String.IsNullOrWhiteSpace(wantedRarities)) { cmd.CommandText += "AND WantedRarities like @WantedRarities"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@WantedRarities", DbType = DbType.String, Value = $"%{wantedRarities}%", }); } else { cmd.CommandText += "AND WantedRarities IS NULL"; } using DbDataReader reader = await cmd.ExecuteReaderAsync(); while (await reader.ReadAsync()) { ChatType type = (ChatType)Enum.Parse(typeof(ChatType), await reader.GetFieldValueAsync<String>(3)); Chat chat = new() { Id = await reader.GetFieldValueAsync<Int64>(0), Title = await reader.GetFieldValueAsync<String>(1), FirstName = await reader.GetFieldValueAsync<String>(2), Type = type, }; yield return chat; } } if (conn.State == ConnectionState.Open) { conn.Close(); } } #endregion #region Insert Methods async public static Task InsertScryfallCardAsync(Card card, bool isSent, bool hasRarity) { if (await GetCardStatus(card) != CardStatus.NotFound) { await UpdateIsSentAsync(card, isSent); await UpdateHasRarityAsync(card, hasRarity); return; } using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"INSERT INTO ScryfallCard (ScryfallCardId, Name, FullUrlWebSite, IsCardSent, HasRarity) VALUES (@ScryfallCardId, @Name, @FullUrlWebSite, @IsCardSent, @HasRarity )"; String id; if (card.GetType() == typeof(ScryfallCard)) { id = ((ScryfallCard)card).id; } else { id = Guid.NewGuid().ToString(); } cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@ScryfallCardId", DbType = DbType.StringFixedLength, Value = id, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Name", DbType = DbType.StringFixedLength, Value = card.Name, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FullUrlWebSite", DbType = DbType.StringFixedLength, Value = card.FullUrlWebSite, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@IsCardSent", DbType = DbType.Boolean, Value = isSent, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@HasRarity", DbType = DbType.Boolean, Value = hasRarity, }); await cmd.ExecuteNonQueryAsync(); } if (conn.State == ConnectionState.Open) { conn.Close(); } } /// <summary> /// Insert new log info /// </summary> /// <param name="methodName">name of the method</param> /// <param name="spoilName">name of the spoil(if any)</param> /// <param name="message">message of the log</param> /// <returns>ID of the saved log</returns> async public static Task<Int32> InsertLogAsync(String methodName, String spoilName, String message) { try { Int32 result = -1; using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"INSERT INTO Log (Message, Method, SpoilName ) VALUES (@Message, @Method, @SpoilName)"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Message", DbType = DbType.StringFixedLength, Value = message, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Method", DbType = DbType.StringFixedLength, Value = methodName, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@SpoilName", DbType = DbType.StringFixedLength, Value = spoilName, }); await cmd.ExecuteNonQueryAsync(); result = (Int32)cmd.LastInsertedId; } if (conn.State == ConnectionState.Open) { conn.Close(); } return result; } catch (Exception) { Console.WriteLine("Error inserting log, possible that the server was offline"); return -1; } } async public static Task<Int64> InsertChatAsync(Chat chat) { Int64 ret = -1; using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"INSERT INTO Chat (ChatId, Title, FirstName, Type) VALUES (@ChatId, @Title, @FirstName, @Type)"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@ChatId", DbType = DbType.Int64, Value = chat.Id, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Title", DbType = DbType.StringFixedLength, Value = chat.Title, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@FirstName", DbType = DbType.StringFixedLength, Value = chat.FirstName, }); cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@Type", DbType = DbType.StringFixedLength, Value = chat.Type.ToString(), }); await cmd.ExecuteNonQueryAsync(); ret = (Int64)cmd.LastInsertedId; } if (conn.State == ConnectionState.Open) { conn.Close(); } return ret; } #endregion #region Delete Methods async public static Task<int> DeleteFromChatAsync(Chat chat) { return await DeleteFromChatAsync(chat.Id); } async public static Task<int> DeleteFromChatAsync(long chatId) { int result; using MySqlConnection conn = new(_connectionString); if (conn.State != ConnectionState.Open) { await conn.OpenAsync(); } using (MySqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = @"DELETE FROM Chat WHERE ChatId = @ChatId"; cmd.Parameters.Add(new MySqlParameter() { ParameterName = "@ChatId", DbType = DbType.Int64, Value = chatId, }); result = await cmd.ExecuteNonQueryAsync(); } if (conn.State == ConnectionState.Open) { conn.Close(); } return result; } #endregion } }
// Copyright 2009 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using System; using System.Collections.Generic; using NodaTime.TimeZones; using NodaTime.Utility; namespace NodaTime.TzdbCompiler.Tzdb { /// <summary> /// Provides a means of programatically creating complex time zones. Currently internal, but we /// may want to make it public again eventually. /// </summary> /// <remarks> /// <para> /// DateTimeZoneBuilder allows complex DateTimeZones to be constructed. Since creating a new /// DateTimeZone this way is a relatively expensive operation, built zones can be written to a /// file. Reading back the encoded data is a quick operation. /// </para> /// <para> /// DateTimeZoneBuilder itself is mutable and not thread-safe, but the DateTimeZone objects that /// it builds are thread-safe and immutable. /// </para> /// <para> /// It is intended that {@link NodaTime.TzdbCompiler} be used to read time zone data files, /// indirectly calling DateTimeZoneBuilder. The following complex example defines the /// America/Los_Angeles time zone, with all historical transitions: /// </para> /// <para> /// <example> /// DateTimeZone America_Los_Angeles = new DateTimeZoneBuilder() /// .AddCutover(-2147483648, 'w', 1, 1, 0, false, 0) /// .SetStandardOffset(-28378000) /// .SetFixedSavings("LMT", 0) /// .AddCutover(1883, 'w', 11, 18, 0, false, 43200000) /// .SetStandardOffset(-28800000) /// .AddRecurringSavings("PDT", 3600000, 1918, 1919, 'w', 3, -1, 7, false, 7200000) /// .AddRecurringSavings("PST", 0, 1918, 1919, 'w', 10, -1, 7, false, 7200000) /// .AddRecurringSavings("PWT", 3600000, 1942, 1942, 'w', 2, 9, 0, false, 7200000) /// .AddRecurringSavings("PPT", 3600000, 1945, 1945, 'u', 8, 14, 0, false, 82800000) /// .AddRecurringSavings("PST", 0, 1945, 1945, 'w', 9, 30, 0, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1948, 1948, 'w', 3, 14, 0, false, 7200000) /// .AddRecurringSavings("PST", 0, 1949, 1949, 'w', 1, 1, 0, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1950, 1966, 'w', 4, -1, 7, false, 7200000) /// .AddRecurringSavings("PST", 0, 1950, 1961, 'w', 9, -1, 7, false, 7200000) /// .AddRecurringSavings("PST", 0, 1962, 1966, 'w', 10, -1, 7, false, 7200000) /// .AddRecurringSavings("PST", 0, 1967, 2147483647, 'w', 10, -1, 7, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1967, 1973, 'w', 4, -1, 7, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1974, 1974, 'w', 1, 6, 0, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1975, 1975, 'w', 2, 23, 0, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1976, 1986, 'w', 4, -1, 7, false, 7200000) /// .AddRecurringSavings("PDT", 3600000, 1987, 2147483647, 'w', 4, 1, 7, true, 7200000) /// .ToDateTimeZone("America/Los_Angeles"); /// </example> /// </para> /// <para> /// Original name: DateTimeZoneBuilder. /// </para> /// </remarks> internal sealed class DateTimeZoneBuilder { private readonly IList<ZoneRecurrenceCollection> ruleSets = new List<ZoneRecurrenceCollection>(); /// <summary> /// Gets the last rule set in this builder. If there are currently no rule sets, /// one that spans all of time is created and returned. /// </summary> /// <value>The last rule set.</value> private ZoneRecurrenceCollection LastRuleSet { get { if (ruleSets.Count == 0) { AddEndOfTimeRuleSet(); } return ruleSets[ruleSets.Count - 1]; } } /// <summary> /// Adds a cutover for added rules. /// </summary> /// <remarks> /// A cutover is a point where the standard offset from GMT/UTC changed. This occurs mostly /// pre-1900. The standard offset at the cutover defaults to 0. /// Call <see cref="DateTimeZoneBuilder.SetStandardOffset"/> afterwards to change it. /// </remarks> /// <param name="year">The year of cutover.</param> /// <param name="yearOffset">The offset into the year of the cutover.</param> /// <returns>This <see cref="DateTimeZoneBuilder"/> for chaining.</returns> public DateTimeZoneBuilder AddCutover(int year, ZoneYearOffset yearOffset) { Preconditions.CheckNotNull(yearOffset, "yearOffset"); if (ruleSets.Count > 0) { LastRuleSet.SetUpperLimit(year, yearOffset); } AddEndOfTimeRuleSet(); return this; } /// <summary> /// Sets the standard offset to use for newly added rules until the next cutover is added. /// </summary> /// <param name="standardOffset">The standard offset.</param> /// <returns>This <see cref="DateTimeZoneBuilder"/> for chaining.</returns> public DateTimeZoneBuilder SetStandardOffset(Offset standardOffset) { LastRuleSet.StandardOffset = standardOffset; return this; } /// <summary> /// Sets a fixed savings rule at the cutover. /// </summary> /// <param name="nameKey">The name key of new rule.</param> /// <param name="savings">The <see cref="Duration"/> to add to standard offset.</param> /// <returns>This <see cref="DateTimeZoneBuilder"/> for chaining.</returns> public DateTimeZoneBuilder SetFixedSavings(String nameKey, Offset savings) { LastRuleSet.SetFixedSavings(nameKey, savings); return this; } /// <summary> /// Adds a recurring daylight saving time rule. /// </summary> /// <param name="recurrence">The zone recurrence defining the recurrening savings.</param> /// <returns>This <see cref="DateTimeZoneBuilder"/> for chaining.</returns> public DateTimeZoneBuilder AddRecurringSavings(ZoneRecurrence recurrence) { Preconditions.CheckNotNull(recurrence, "recurrence"); if (recurrence.FromYear <= recurrence.ToYear) { LastRuleSet.AddRule(recurrence); } return this; } /// <summary> /// Processes all the rules and builds a DateTimeZone. /// </summary> /// <param name="zoneId">Time zone ID to assign</param> public DateTimeZone ToDateTimeZone(String zoneId) { Preconditions.CheckNotNull(zoneId, "zoneId"); var transitions = new List<ZoneTransition>(); DateTimeZone tailZone = null; Instant instant = Instant.BeforeMinValue; // TODO: See whether PartialZoneIntervalMap would help to tidy this up. int ruleSetCount = ruleSets.Count; bool tailZoneSeamValid = false; for (int i = 0; i < ruleSetCount; i++) { var ruleSet = ruleSets[i]; var transitionIterator = ruleSet.Iterator(instant); ZoneTransition nextTransition = transitionIterator.First(); if (nextTransition == null) { continue; } AddTransition(transitions, nextTransition); while ((nextTransition = transitionIterator.Next()) != null) { if (AddTransition(transitions, nextTransition)) { if (tailZone != null) { // Got the extra transition before DaylightSavingsTimeZone. // This final transition has a valid start point and offset, but // we don't know where it ends - which is fine, as the tail zone will // take over. tailZoneSeamValid = true; break; } } if (tailZone == null && i == ruleSetCount - 1) { tailZone = transitionIterator.BuildTailZone(zoneId); // If tailZone is not null, don't break out of main loop until at least one // more transition is calculated. This ensures a correct 'seam' to the // DaylightSavingsTimeZone. } } instant = ruleSet.GetUpperLimit(transitionIterator.Savings); } // Simple case where we don't have a trailing daylight saving zone. if (tailZone == null) { switch (transitions.Count) { case 0: return new FixedDateTimeZone(zoneId, Offset.Zero); case 1: return new FixedDateTimeZone(zoneId, transitions[0].WallOffset, transitions[0].Name); default: var ret = CreatePrecalculatedDateTimeZone(zoneId, transitions, Instant.AfterMaxValue, null); return ret.IsCachable() ? CachedDateTimeZone.ForZone(ret) : ret; } } // Sanity check if (!tailZoneSeamValid) { throw new InvalidOperationException("Invalid time zone data for id " + zoneId + "; no valid transition before tail zone"); } // The final transition should not be used for a zone interval, // although it should have the same offset etc as the tail zone for its starting point. var lastTransition = transitions[transitions.Count - 1]; var firstTailZoneInterval = tailZone.GetZoneInterval(lastTransition.Instant); if (lastTransition.StandardOffset != firstTailZoneInterval.StandardOffset || lastTransition.WallOffset != firstTailZoneInterval.WallOffset || lastTransition.Savings != firstTailZoneInterval.Savings || lastTransition.Name != firstTailZoneInterval.Name) { throw new InvalidOperationException( string.Format("Invalid seam to tail zone in time zone {0}; final transition {1} different to first tail zone interval {2}", zoneId, lastTransition, firstTailZoneInterval)); } transitions.RemoveAt(transitions.Count - 1); var zone = CreatePrecalculatedDateTimeZone(zoneId, transitions, lastTransition.Instant, tailZone); return zone.IsCachable() ? CachedDateTimeZone.ForZone(zone) : zone; } private static PrecalculatedDateTimeZone CreatePrecalculatedDateTimeZone(string id, IList<ZoneTransition> transitions, Instant tailZoneStart, DateTimeZone tailZone) { // Convert the transitions to intervals int size = transitions.Count; var intervals = new ZoneInterval[size]; for (int i = 0; i < size; i++) { var transition = transitions[i]; var endInstant = i == size - 1 ? tailZoneStart : transitions[i + 1].Instant; intervals[i] = new ZoneInterval(transition.Name, transition.Instant, endInstant, transition.WallOffset, transition.Savings); } return new PrecalculatedDateTimeZone(id, intervals, tailZone); } /// <summary> /// Adds the given transition to the transition list if it represents a new transition. /// </summary> /// <param name="transitions">The list of <see cref="ZoneTransition"/> to add to.</param> /// <param name="transition">The transition to add.</param> /// <returns><c>true</c> if the transition was added.</returns> private static bool AddTransition(IList<ZoneTransition> transitions, ZoneTransition transition) { int transitionCount = transitions.Count; if (transitionCount == 0) { transitions.Add(transition); return true; } ZoneTransition lastTransition = transitions[transitionCount - 1]; if (!transition.IsTransitionFrom(lastTransition)) { return false; } // A transition after the "beginning of time" one will always be valid. if (lastTransition.Instant == Instant.BeforeMinValue) { transitions.Add(transition); return true; } Offset lastOffset = transitions.Count < 2 ? Offset.Zero : transitions[transitions.Count - 2].WallOffset; Offset newOffset = lastTransition.WallOffset; // If the local time just before the new transition is the same as the local time just // before the previous one, just replace the last transition with new one. // TODO(Post-V1): It's not clear what this is doing... work it out and give an example LocalInstant lastLocalStart = lastTransition.Instant.Plus(lastOffset); LocalInstant newLocalStart = transition.Instant.Plus(newOffset); if (lastLocalStart == newLocalStart) { transitions.RemoveAt(transitionCount - 1); return AddTransition(transitions, transition); } transitions.Add(transition); return true; } /// <summary> /// Adds a rule set that spans from the last one to the end of time. /// </summary> private void AddEndOfTimeRuleSet() { ruleSets.Add(new ZoneRecurrenceCollection()); } } }
using System; using System.Collections.Generic; using System.Text; using System.Data; using System.IO; namespace Trionic5Tools { public class Trionic5Anomalies { IECUFileInformation m_fileInformation = new Trionic5FileInformation(); /// <summary> /// Check binary file for anomalies that cannot be right /// </summary> /// <param name="filename"></param> public void CheckBinForAnomalies(string filename, Trionic5Resume resume, bool fixproblems, bool showreport, IECUFileInformation fileInformation) { m_fileInformation = fileInformation; if (showreport) { resume.AddToResumeTable("Checking file " + Path.GetFileName(filename)); resume.AddToResumeTable("Checking injection map against fuel knock map"); } CheckInjectionMapAgainstFuelKnockMap(filename, showreport, fixproblems, resume); if (showreport) { resume.AddToResumeTable("Checking injection constant value"); } CheckInjectionConstant(filename, showreport, resume); if (showreport) { resume.AddToResumeTable("Checking boost request maps agains boost limiters"); } CheckBoostRequestMapAgainstBoostLimiters(filename, showreport, resume); if (showreport) { resume.AddToResumeTable("Checking axis against maximum requested boost level"); } try { CheckBoostRequestAgainstAxisRanges(filename, true, resume); } catch (Exception E) { Console.WriteLine("CheckBoostRequestAgainstAxisRanges: " + E.Message); } if (showreport) { // seperator for next file... maybe resume.AddToResumeTable(""); } } private byte[] readdatafromfile(string filename, int address, int length) { byte[] retval = new byte[length]; FileStream fsi1 = File.OpenRead(filename); while (address > fsi1.Length) address -= (int)fsi1.Length; BinaryReader br1 = new BinaryReader(fsi1); fsi1.Position = address; string temp = string.Empty; for (int i = 0; i < length; i++) { retval.SetValue(br1.ReadByte(), i); } fsi1.Flush(); br1.Close(); fsi1.Close(); fsi1.Dispose(); return retval; } private int GetSymbolLength(string symbolname) { if (symbolname == "Knock_count_cyl1" || symbolname == "Knock_count_cyl2" || symbolname == "Knock_count_cyl3" || symbolname == "Knock_count_cyl4") { return 2; } foreach (SymbolHelper sh in m_fileInformation.SymbolCollection) { if (sh.Varname == symbolname) { return sh.Length; } } return 1; } private int GetSymbolAddress(string symbolname) { int retval = 0; foreach (SymbolHelper sh in m_fileInformation.SymbolCollection) { if (sh.Varname == symbolname) { retval = sh.Flash_start_address - m_fileInformation.Filelength; while (retval > m_fileInformation.Filelength) retval -= m_fileInformation.Filelength; } } return retval; } private int GetMaxBoostValue(string filename) { byte[] boost_request_map = readdatafromfile(filename, GetSymbolAddress( "Tryck_mat!"), GetSymbolLength( "Tryck_mat!")); byte[] boost_request_map_aut = readdatafromfile(filename, GetSymbolAddress( "Tryck_mat_a!"), GetSymbolLength( "Tryck_mat_a!")); int retval = 0; foreach (byte b in boost_request_map) { int i = (int)b; if (i > retval) retval = i; } foreach (byte b in boost_request_map_aut) { int i = (int)b; if (i > retval) retval = i; } return retval; } /// <summary> /// Check maximum boost request against maximum in fuel_map_x_axis and ign_map_0_x_axis /// </summary> /// <param name="filename"></param> /// <param name="showreport"></param> private void CheckBoostRequestAgainstAxisRanges(string filename, bool showreport, Trionic5Resume resume) { // Detect_map_x_axis //Temp_reduce_x_st //Ign_map_0_x_axis //Ign_map_2_x_axis //Ign_map_3_x_axis //Ign_map_6_x_axis //Ign_map_7_x_axis // Misfire_map_x_axis //Fuel_knock_xaxis //Fuel_map_xaxis //Overs_tab_xaxis int maxboostvalue = GetMaxBoostValue(filename); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Detect_map_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Ign_map_0_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Ign_map_2_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Ign_map_3_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Ign_map_6_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Ign_map_7_x_axis!", resume); CheckSixteenBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Misfire_map_x_axis!", resume); CheckEigthBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Fuel_knock_xaxis!", resume); CheckEigthBitAxisAgainstBoostPressure(filename, showreport, maxboostvalue, "Fuel_map_xaxis!", resume); } private void CheckEigthBitAxisAgainstBoostPressure(string filename, bool showreport, int maxboostvalue, string symbolname, Trionic5Resume resume) { byte[] axis = readdatafromfile(filename, GetSymbolAddress( symbolname), GetSymbolLength( symbolname)); bool found = false; foreach (byte b in axis) { int i = (int)b; if (i >= maxboostvalue) found = true; } if (!found && showreport) { resume.AddToResumeTable(symbolname + " does not support the maximum boost request value!"); } } private void CheckSixteenBitAxisAgainstBoostPressure(string filename, bool showreport, int maxboostvalue, string symbolname, Trionic5Resume resume) { byte[] axis = readdatafromfile(filename, GetSymbolAddress( symbolname), GetSymbolLength( symbolname)); if (axis.Length < 2) return; bool found = false; for (int t = 0; t < axis.Length; t += 2) { int i = Convert.ToInt32(axis.GetValue(t)); i *= 256; i += Convert.ToInt32(axis.GetValue(t + 1)); if (i >= maxboostvalue) found = true; } if (!found && showreport) { resume.AddToResumeTable(symbolname + " does not support the maximum boost request value!"); } } /// <summary> /// Check boost request maps (Tryck_mat & Tryck_mat_a) against limiters /// </summary> /// <param name="filename"></param> private void CheckBoostRequestMapAgainstBoostLimiters(string filename, bool showreport, Trionic5Resume resume) { byte[] boost_request_map = readdatafromfile(filename, GetSymbolAddress( "Tryck_mat!"), GetSymbolLength( "Tryck_mat!")); byte[] boost_request_map_aut = readdatafromfile(filename, GetSymbolAddress( "Tryck_mat_a!"), GetSymbolLength( "Tryck_mat_a!")); byte[] fuel_cut_map = readdatafromfile(filename, GetSymbolAddress( "Tryck_vakt_tab!"), GetSymbolLength( "Tryck_vakt_tab!")); byte[] boost_maps_axis = readdatafromfile(filename, GetSymbolAddress( "Pwm_ind_rpm!"), GetSymbolLength( "Pwm_ind_rpm!")); int numberofrows = (boost_maps_axis.Length / 2); int numberofcolumns = boost_request_map.Length / numberofrows; for (int fct = 0; fct < fuel_cut_map.Length; fct++) { byte boostlimit = (byte)fuel_cut_map.GetValue(fct); for (int ct = 0; ct < numberofcolumns; ct++) { // look in tryck_mat & tryck_mat_a byte boost_req_value = (byte)boost_request_map.GetValue((fct * numberofcolumns) + ct); if (boost_req_value >= boostlimit) { if (showreport) { resume.AddToResumeTable("Found anomaly! Boost request value higher than boost limiter (fuel cut value) in Tryck_mat"); resume.AddToResumeTable("--> row: " + fct.ToString() + " column: " + ct.ToString()); } } boost_req_value = (byte)boost_request_map_aut.GetValue((fct * numberofcolumns) + ct); if (boost_req_value >= boostlimit) { if (showreport) { resume.AddToResumeTable("Found anomaly! Boost request value higher than boost limiter (fuel cut value) in Tryck_mat_a"); resume.AddToResumeTable("--> row: " + fct.ToString() + " column: " + ct.ToString()); } } } } } /// <summary> /// Checks is fuelknock map values are higher for every load/site than injection map /// </summary> /// <param name="filename"></param> private void CheckInjectionMapAgainstFuelKnockMap(string filename, bool showreport, bool fixproblems, Trionic5Resume resume) { bool changes_made = false; byte[] fuel_injection_map = readdatafromfile(filename, GetSymbolAddress( "Insp_mat!"), GetSymbolLength( "Insp_mat!")); byte[] fuel_knock_map = readdatafromfile(filename, GetSymbolAddress( "Fuel_knock_mat!"), GetSymbolLength( "Fuel_knock_mat!")); byte[] fuel_injection_xaxis = readdatafromfile(filename, GetSymbolAddress( "Fuel_map_xaxis!"), GetSymbolLength( "Fuel_map_xaxis!")); byte[] fuel_knock_xaxis = readdatafromfile(filename, GetSymbolAddress( "Fuel_knock_xaxis!"), GetSymbolLength( "Fuel_knock_xaxis!")); byte[] fuel_knock_yaxis = readdatafromfile(filename, GetSymbolAddress( "Fuel_map_yaxis!"), GetSymbolLength( "Fuel_map_yaxis!")); // t5.2 insp_mat = 16 * 16 // t5.2 fuel_knock_mat = 8 * 16 // t5.5 insp_mat = 16 * 16 // t5.5 fuel_knock_mat = 12 * 16 int fuel_numberrows = 16; // always 16 rows int fuel_numbercolumns = fuel_injection_map.Length / fuel_numberrows; int numberrows = 16; // always 16 rows int numbercolumns = fuel_knock_map.Length / numberrows; // handle all rows for (int rt = 0; rt < numberrows; rt++) { // and all columns for (int ct = 0; ct < numbercolumns; ct++) { byte fuel_knock_byte = (byte)fuel_knock_map.GetValue((rt * numbercolumns) + ct); // fetch pressure & rpm for this cell byte fuel_knock_pressure = (byte)fuel_knock_xaxis.GetValue(ct); // now find the nearest column in fuel_map_xaxis double diff = 255; int idx_found = -1; for (int xt = 0; xt < fuel_injection_xaxis.Length; xt++) { byte fuel_map_pressure_temp = (byte)fuel_injection_xaxis.GetValue(xt); double tempdiff = Math.Abs((double)fuel_knock_pressure - (double)fuel_map_pressure_temp); if (tempdiff < diff) { idx_found = xt; diff = tempdiff; } } if (idx_found >= 0) { // found it, we can compare byte fuel_map_byte = (byte)fuel_injection_map.GetValue((rt * fuel_numbercolumns) + idx_found); if (fuel_map_byte >= fuel_knock_byte) { // ANOMALY!! if (showreport) { resume.AddToResumeTable("Found anomaly! Fuel injection map value larger than or equal to knock map"); } if (fixproblems) { if (fuel_knock_byte < 255) { fuel_knock_byte = fuel_map_byte; for (int corrt = 0; corrt < 5; corrt++) { if (fuel_knock_byte < 255) fuel_knock_byte++; } // write to symbol fuel_knock_map.SetValue(fuel_knock_byte, (rt * numbercolumns) + ct); resume.AddToResumeTable("Adjusted a value in the knock fuel matrix"); changes_made = true; } } double pressure = (double)fuel_knock_pressure; pressure *= GetMapCorrectionFactor("Tryck_mat!"); pressure += GetMapCorrectionOffset("Tryck_mat!"); try { int rpm = Convert.ToInt32(fuel_knock_yaxis.GetValue(rt * 2)) * 256; rpm += Convert.ToInt32(fuel_knock_yaxis.GetValue((rt * 2) + 1)); rpm *= 10; if (showreport) { resume.AddToResumeTable("--> pressure = " + pressure.ToString() + " bar, rpm = " + rpm.ToString()); } } catch (Exception E) { Console.WriteLine(E.Message); } } } } } if (fixproblems && changes_made) { savedatatobinary(GetSymbolAddress( "Fuel_knock_mat!"), GetSymbolLength( "Fuel_knock_mat!"), fuel_knock_map, filename); } } private void savedatatobinary(int address, int length, byte[] data, string filename) { if (address <= 0) return; FileStream fsi1 = File.OpenWrite(filename); BinaryWriter bw1 = new BinaryWriter(fsi1); fsi1.Position = address; for (int i = 0; i < length; i++) { bw1.Write((byte)data.GetValue(i)); } fsi1.Flush(); bw1.Close(); fsi1.Close(); fsi1.Dispose(); } private double GetMapCorrectionOffset(string symbolname) { double returnvalue = 0; if (symbolname.StartsWith("Ign_map_0!")) returnvalue = 0; else if (symbolname.StartsWith("Insp_mat!")) returnvalue = 0.5; // 128/256 else if (symbolname.StartsWith("Idle_fuel_korr!")) returnvalue = 0.5; // 128/256 //else if (symbolname.StartsWith("Accel_konst!")) returnvalue = 0.75; // 128/256 else if (symbolname.StartsWith("Fuel_knock_mat!")) returnvalue = 0.5; // 128/256 else if (symbolname.StartsWith("Adapt_korr!")) returnvalue = 0.75; // 384/512 else if (symbolname == "Adapt_korr") returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_ref!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_ref")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_injfaktor!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_inj_imat!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_injfaktor_high!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_injfaktor_low!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_korr_high!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Adapt_korr_low!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Cyl_komp!")) returnvalue = 0.75; // 384/512 else if (symbolname.StartsWith("Lambdaint!")) returnvalue = 0.75; // 1/512 else if (symbolname.StartsWith("Lacc_konst!")) returnvalue = 1; // 256/256 else if (symbolname.StartsWith("Lret_konst!")) returnvalue = 1; // 256/256 else if (symbolname.StartsWith("Accel_konst!")) returnvalue = 1; // 128/256 else if (symbolname.StartsWith("Retard_konst!")) returnvalue = 1; // 128/256 else if (symbolname.StartsWith("Hot_start_fak!")) returnvalue = 1; // 128/256 else if (symbolname.StartsWith("Ret_fuel_fak!")) returnvalue = 1; // 128/256 else if (symbolname.StartsWith("Ret_fuel_tab!")) returnvalue = 1; // 128/256 else if (symbolname.StartsWith("Ign_map_4!")) returnvalue = 0; //else if (symbolname.StartsWith("Insp_mat!")) returnvalue = 0; //else if (symbolname.StartsWith("Accel_konst!")) returnvalue = 0; else if (symbolname.StartsWith("Del_mat!")) returnvalue = 0; else if (symbolname.StartsWith("Tryck_mat_a!")) returnvalue = -1; else if (symbolname.StartsWith("P_Manifold")) returnvalue = -1; else if (symbolname.StartsWith("Eftersta_fak!")) returnvalue = 1; else if (symbolname.StartsWith("Eftersta_fak2!")) returnvalue = 1; else if (symbolname.StartsWith("After_fcut_tab!")) returnvalue = 1; else if (symbolname.StartsWith("Hot_tab!")) returnvalue = 1; //else if (symbolname.StartsWith("Fload_tab!")) returnvalue = 1; else if (symbolname.StartsWith("Fload_tab!")) returnvalue = 1; else if (symbolname.StartsWith("Tryck_mat!")) returnvalue = -1; else if (symbolname.StartsWith("Knock_press_tab!")) returnvalue = -1; else if (symbolname.StartsWith("Knock_press!")) returnvalue = -1; else if (symbolname.StartsWith("Limp_tryck_konst!")) returnvalue = -1; else if (symbolname.StartsWith("Idle_tryck!")) returnvalue = -1; else if (symbolname.StartsWith("Tryck_vakt_tab!")) returnvalue = -1; else if (symbolname.StartsWith("Regl_tryck")) returnvalue = -1; else if (symbolname.StartsWith("Pressure map scaled for 3 bar mapsensor")) returnvalue = -1; else if (symbolname.StartsWith("Pressure map (AUT) scaled for 3 bar mapsensor")) returnvalue = -1; else if (symbolname.StartsWith("Knock_press_lim")) returnvalue = -1; // bar else if (symbolname.StartsWith("Turbo_knock_press")) returnvalue = -1; // bar else if (symbolname.StartsWith("Turbo_knock_tab")) returnvalue = -1; else if (symbolname.StartsWith("Open_loop_knock")) returnvalue = -1; else if (symbolname.StartsWith("Open_loop")) returnvalue = -1; else if (symbolname.StartsWith("Sond_heat_tab")) returnvalue = -1; else if (symbolname.StartsWith("Reg_last!")) returnvalue = 0; else if (symbolname.StartsWith("Idle_st_last!")) returnvalue = -1; //else if (symbolname.StartsWith("Last_temp_st!")) returnvalue = -1; else if (symbolname.StartsWith("Lam_minlast!")) returnvalue = -1; else if (symbolname.StartsWith("Lam_laststeg!")) returnvalue = -1; else if (symbolname.StartsWith("Grund_last!")) returnvalue = -1; else if (symbolname.StartsWith("Max_ratio_aut!")) returnvalue = -1; else if (symbolname.StartsWith("Diag_speed_load!")) returnvalue = -1; return returnvalue; } private double GetMapCorrectionFactor(string symbolname) { double returnvalue = 1; if (symbolname.StartsWith("Ign_map_0!")) returnvalue = 0.1; else if (symbolname.StartsWith("Apc_knock_tab!")) returnvalue = 0.01; else if (symbolname.StartsWith("Knock_lim_tab!")) returnvalue = 0.1; else if (symbolname.StartsWith("Knock_average")) returnvalue = 0.1; else if (symbolname.StartsWith("Knock_lim")) returnvalue = 0.1; else if (symbolname.StartsWith("Knock_ang_dec!")) returnvalue = 0.1; else if (symbolname.StartsWith("Knock_average_tab!")) returnvalue = 0.1; else if (symbolname.StartsWith("Knock_press_lim")) returnvalue = 0.01; // bar else if (symbolname.StartsWith("Turbo_knock_press")) returnvalue = 0.01; // bar else if (symbolname.StartsWith("Ign_idle_angle_start")) returnvalue = 0.1; else if (symbolname.StartsWith("Batt_korr_tab!")) returnvalue = 0.004; // 1/250 else if (symbolname.StartsWith("Gear_st!")) returnvalue = 0.1; // 1/ ((256*256) / 260) else if (symbolname.StartsWith("Start_insp!")) returnvalue = 0.004; // 1/ ((256*256) / 260) //else if (symbolname.StartsWith("AC_wait_on!")) returnvalue = 0.25; // 1/ ((256*256) / 260) //else if (symbolname.StartsWith("AC_wait_off!")) returnvalue = 0.25; // 1/ ((256*256) / 260) else if (symbolname.StartsWith("Startvev_fak!")) returnvalue = 0.125; // 1/8 else if (symbolname.StartsWith("After_fcut_tab!")) returnvalue = 0.0009765625; // 1/1024 else if (symbolname.StartsWith("Hot_tab!")) returnvalue = 0.0009765625; // 1/1024 //else if (symbolname.StartsWith("Hot_decr!")) returnvalue = 10; // 1/1024 else if (symbolname.StartsWith("Idle_fuel_korr!")) returnvalue = 0.00390625; // 1/256 else if (symbolname.StartsWith("Insp_mat!")) returnvalue = 0.00390625; // 1/256 else if (symbolname.StartsWith("Fuel_knock_mat!")) returnvalue = 0.00390625; // 1/256 else if (symbolname.StartsWith("Fload_tab!")) returnvalue = 0.001953125; // 1/512 else if (symbolname == "Adapt_korr") returnvalue = 0.001953125; // 1/512 else if (symbolname == "Adapt_korr!") returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_ref!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_ref")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Accel_konst!")) returnvalue = 0.00390625;//returnvalue = 0.0078125; // 1/128 else if (symbolname.StartsWith("Start_proc!")) returnvalue = 0.0078125; // 1/128 else if (symbolname.StartsWith("Cyl_komp!")) returnvalue = 0.001953125; // 1/512 //Cylinder Compensation: (Cyl_komp+384)/512 else if (symbolname.StartsWith("Retard_konst!")) returnvalue = 0.00390625;//returnvalue = 0.0078125; // 1/128 else if (symbolname.StartsWith("Lacc_konst!")) returnvalue = 0.00390625; // 1/256 //0.0078125; // 1/128 else if (symbolname.StartsWith("Lret_konst!")) returnvalue = 0.00390625; // 1/256 //0.0078125; // 1/128 else if (symbolname.StartsWith("Hot_start_fak!")) returnvalue = 0.0009765625; // 128/256 else if (symbolname.StartsWith("Ret_fuel_fak!")) returnvalue = 0.0009765625; // 128/256 else if (symbolname.StartsWith("Ret_fuel_tab!")) returnvalue = 0.0009765625; // 128/256 else if (symbolname.StartsWith("Adapt_injfaktor!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_inj_imat!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_injfaktor_high!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_injfaktor_low!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_korr_high!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Adapt_korr_low!")) returnvalue = 0.001953125; // 1/512 else if (symbolname.StartsWith("Eftersta_fak!")) returnvalue = 0.0078125;// 0.01; else if (symbolname.StartsWith("Eftersta_fak2!")) returnvalue = 0.0078125;//0.01; else if (symbolname.StartsWith("Ign_map_1!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_2!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_3!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_4!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_5!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_6!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_7!")) returnvalue = 0.1; else if (symbolname.StartsWith("Ign_map_8!")) returnvalue = 0.1; else if (symbolname.StartsWith("Lamd_tid!")) returnvalue = 10; else if (symbolname.StartsWith("Del_mat!")) returnvalue = 3; else if (symbolname.StartsWith("Reg_kon_mat")) { if (GetSymbolLength(symbolname) == 0x80) { returnvalue = 1; } else { returnvalue = 0.1; } } //else if (symbolname.StartsWith("Insp_mat!")) returnvalue = 1; //else if (symbolname.StartsWith("Del_mat!")) returnvalue = 1; else if (symbolname.StartsWith("Tryck_mat_a!")) returnvalue = 0.01; else if (symbolname.StartsWith("Tryck_mat!")) returnvalue = 0.01; else if (symbolname.StartsWith("P_Manifold10")) returnvalue = 0.001; else if (symbolname.StartsWith("P_Manifold")) returnvalue = 0.01; else if (symbolname.StartsWith("Max_ratio_aut!")) returnvalue = 0.01; else if (symbolname.StartsWith("Diag_speed_load!")) returnvalue = 0.01; else if (symbolname.StartsWith("Reg_last!")) returnvalue = 0.01; else if (symbolname.StartsWith("Idle_st_last!")) returnvalue = 0.01; //else if (symbolname.StartsWith("Last_temp_st!")) returnvalue = 0.01; else if (symbolname.StartsWith("Lam_minlast!")) returnvalue = 0.01; else if (symbolname.StartsWith("Lam_laststeg!")) returnvalue = 0.01; else if (symbolname.StartsWith("Grund_last!")) returnvalue = 0.01; else if (symbolname.StartsWith("Turbo_knock_tab")) returnvalue = 0.01; else if (symbolname.StartsWith("Open_loop_knock")) returnvalue = 0.01; else if (symbolname.StartsWith("Open_loop")) returnvalue = 0.01; else if (symbolname.StartsWith("Sond_heat_tab")) returnvalue = 0.01; else if (symbolname.StartsWith("Knock_press_tab!")) returnvalue = 0.01; else if (symbolname.StartsWith("Knock_press!")) returnvalue = 0.01; else if (symbolname.StartsWith("Limp_tryck_konst!")) returnvalue = 0.01; else if (symbolname.StartsWith("Idle_tryck!")) returnvalue = 0.01; else if (symbolname.StartsWith("Tryck_vakt_tab!")) returnvalue = 0.01; else if (symbolname.StartsWith("Regl_tryck")) returnvalue = 0.01; else if (symbolname.StartsWith("Pressure map scaled for 3 bar mapsensor")) returnvalue = 0.012; else if (symbolname.StartsWith("Pressure map (AUT) scaled for 3 bar mapsensor")) returnvalue = 0.012; else if (symbolname.StartsWith("Rpm_max!")) returnvalue = 10; else if (symbolname.StartsWith("Kadapt_rpm_high!")) returnvalue = 10; else if (symbolname.StartsWith("Kadapt_rpm_low!")) returnvalue = 10; else if (symbolname.StartsWith("Derivata_grans!")) returnvalue = 10; else if (symbolname.StartsWith("Min_rpm_closed_loop!")) returnvalue = 10; else if (symbolname.StartsWith("Min_rpm_gadapt!")) returnvalue = 10; else if (symbolname.StartsWith("Max_rpm_gadapt!")) returnvalue = 10; else if (symbolname.StartsWith("Ign_idle_angle!")) returnvalue = 0.1; else if (symbolname.StartsWith("Derivata_fuel_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Ret_delta_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Ret_down_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Ret_up_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Open_all_varv!")) returnvalue = 10; else if (symbolname.StartsWith("Open_varv!")) returnvalue = 10; else if (symbolname.StartsWith("Start_detekt_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Press_rpm_lim!")) returnvalue = 10; else if (symbolname.StartsWith("Rpm_dif!")) returnvalue = 10; else if (symbolname.StartsWith("Rpm_perf_max!")) returnvalue = 10; else if (symbolname.StartsWith("Rpm_perf_min!")) returnvalue = 10; else if (symbolname.StartsWith("Diag_speed_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Dash_rpm_axis!")) returnvalue = 10; else if (symbolname.StartsWith("Idle_st_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Idle_rpm_tab!")) returnvalue = 10; else if (symbolname.StartsWith("Knock_wind_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("PMCal_RpmIdleNomRefLim!")) returnvalue = 10; else if (symbolname.StartsWith("Pwm_ind_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Ap_max_rpm!")) returnvalue = 10; else if (symbolname.StartsWith("Lam_rpm_steg!")) returnvalue = 10; // ?? else if (symbolname.StartsWith("Last_varv_st!")) returnvalue = 10; // ?? else if (symbolname.StartsWith("Lambdaint!")) returnvalue = 0.001953125; // 1/512 ///else if (symbolname.StartsWith("Pulses_per_rev!")) returnvalue = 0.1; return returnvalue; } /// <summary> /// checks injector constant value: should be over 5 and under 25 /// </summary> /// <param name="filename"></param> private void CheckInjectionConstant(string filename, bool showreport, Trionic5Resume resume) { byte[] injector_constant = readdatafromfile(filename, GetSymbolAddress( "Inj_konst!"), GetSymbolLength( "Inj_konst!")); byte b = (byte)injector_constant.GetValue(0); if (b <= 5 || b > 25) { if (showreport) { resume.AddToResumeTable("Found anomaly! Injector constant has an invalid value: " + b.ToString()); } } } } }
using System; using System.Collections.Generic; using System.Text; using System.Text; using System.Runtime.InteropServices; using gView.Framework.Data; using gView.Framework.Geometry; using gView.SDEWrapper.x64; namespace gView.Interoperability.Sde.x64 { internal class SdeQueryInfo : IDisposable { private SE_QUERYINFO_64 _queryInfo = new SE_QUERYINFO_64(); private SE_SHAPE_64 _shape = new SE_SHAPE_64(); private System.Int64 _err_no = 0; private string _errMsg = ""; private SE_FILTER _seFilter = new SE_FILTER(); private bool _isSpatial = false; private List<IField> _queryFields = new List<IField>(); public SdeQueryInfo(ArcSdeConnection connection, ITableClass tc, IQueryFilter filter) { if (tc == null) return; try { if (filter is ISpatialFilter && ((ISpatialFilter)filter).Geometry != null && tc is IFeatureClass && tc.Dataset is SdeDataset) { SE_ENVELOPE maxExtent = new SE_ENVELOPE(); SE_COORDREF_64 coordRef = ((SdeDataset)tc.Dataset).GetSeCoordRef(connection, tc.Name, ((IFeatureClass)tc).ShapeFieldName, ref maxExtent); if (((SdeDataset)tc.Dataset).lastErrorMsg != "") return; _isSpatial = true; _err_no = Wrapper92_64.SE_shape_create(coordRef, ref _shape); ((SdeDataset)tc.Dataset).FreeSeCoordRef(coordRef); if (_err_no != 0) return; //IEnvelope env = ((ISpatialFilter)filter).Geometry.Envelope; //SE_ENVELOPE seEnvelope = new SE_ENVELOPE(); //seEnvelope.minx = Math.Max(env.minx, maxExtent.minx); //seEnvelope.miny = Math.Max(env.miny, maxExtent.miny); //seEnvelope.maxx = Math.Min(env.maxx, maxExtent.maxx); //seEnvelope.maxy = Math.Min(env.maxy, maxExtent.maxy); //if (seEnvelope.minx == seEnvelope.maxx && seEnvelope.miny == seEnvelope.maxy) //{ // /* fudge a rectangle so we have a valid one for generate_rectangle */ // /* FIXME: use the real shape for the query and set the filter_type // to be an appropriate type */ // seEnvelope.minx = seEnvelope.minx - 0.001; // seEnvelope.maxx = seEnvelope.maxx + 0.001; // seEnvelope.miny = seEnvelope.miny - 0.001; // seEnvelope.maxy = seEnvelope.maxy + 0.001; //} //_err_no = Wrapper92_64.SE_shape_generate_rectangle(ref seEnvelope, _shape); _err_no = gView.SDEWrapper.x64.Functions.SE_GenerateGeometry(_shape, ((ISpatialFilter)filter).Geometry, maxExtent); if (_err_no != 0) return; _seFilter.shape = _shape; /* set spatial constraint column and table */ _seFilter.table = tc.Name.PadRight(CONST.SE_QUALIFIED_TABLE_NAME, '\0'); ; _seFilter.column = ((IFeatureClass)tc).ShapeFieldName.PadRight(CONST.SE_MAX_COLUMN_LEN, '\0'); /* set a couple of other spatial constraint properties */ _seFilter.method = (((ISpatialFilter)filter).SpatialRelation == spatialRelation.SpatialRelationEnvelopeIntersects) ? CONST.SM_ENVP_BY_GRID /*CONST.SM_AI*/ : CONST.SM_AI; _seFilter.filter_type = CONST.SE_SHAPE_FILTER; _seFilter.truth = true; // True; } _err_no = Wrapper92_64.SE_queryinfo_create(ref _queryInfo); if (_err_no != 0) return; _err_no = Wrapper92_64.SE_queryinfo_set_tables(_queryInfo, 1, new string[] { tc.Name }, null); if (_err_no != 0) return; string [] fields; if (filter.SubFields == "" || filter.SubFields == "*" || filter.SubFields == null) { StringBuilder subFields = new StringBuilder(); foreach (IField field in tc.Fields) { if (subFields.Length != 0) subFields.Append(" "); subFields.Append(tc.Name + "." + field.name); _queryFields.Add(field); } fields=subFields.ToString().Split(' '); } else { fields=filter.SubFields.Split(' '); foreach (string fieldname in fields) { string fname = fieldname; if (fieldname.ToLower().IndexOf("distinct(") == 0) { fname = fieldname.Substring(9, fieldname.IndexOf(")") - 9); } IField field = tc.FindField(fname); if (field == null) { _errMsg = "Can't get Field " + fname; Cleanup(); return; } _queryFields.Add(field); } } _err_no = Wrapper92_64.SE_queryinfo_set_columns(_queryInfo, fields.Length, fields); if (_err_no != 0) return; string where = ""; if (filter != null) { if (filter is IRowIDFilter) { where = ((IRowIDFilter)filter).RowIDWhereClause; } else { where = filter.WhereClause; } } if (where != "") { _err_no = Wrapper92_64.SE_queryinfo_set_where_clause(_queryInfo, where); if (_err_no != 0) return; } _err_no = Wrapper92_64.SE_queryinfo_set_query_type(_queryInfo, CONST.SE_QUERYTYPE_JSFA); if (_err_no != 0) return; } catch (Exception ex) { _errMsg = "SeQueryInfo:" + ex.Message + "\n" + ex.StackTrace; _err_no = -1; } finally { if (_err_no != 0) { _errMsg = Wrapper92_64.GetErrorMsg(new SE_CONNECTION_64(), _err_no); Cleanup(); } } } private void Cleanup() { if (_shape.handle != 0) { Wrapper92_64.SE_shape_free(_shape); _shape.handle = 0; } if (_queryInfo.handle != 0) { Wrapper92_64.SE_queryinfo_free(_queryInfo); _queryInfo.handle = 0; } } public string ErrorMessage { get { return _errMsg; } } public SE_QUERYINFO_64 SeQueryInfo { get { return _queryInfo; } } public bool IsSpatial { get { return _isSpatial; } } public SE_FILTER Filter_Shape { get { return _seFilter; } } public SE_FILTER Filter_Id { get { return _seFilter; } } public List<IField> QueryFields { get { return _queryFields; } } #region IDisposable Member public void Dispose() { Cleanup(); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void TestCInt64() { var test = new BooleanBinaryOpTest__TestCInt64(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class BooleanBinaryOpTest__TestCInt64 { private const int VectorSize = 16; private const int Op1ElementCount = VectorSize / sizeof(Int64); private const int Op2ElementCount = VectorSize / sizeof(Int64); private static Int64[] _data1 = new Int64[Op1ElementCount]; private static Int64[] _data2 = new Int64[Op2ElementCount]; private static Vector128<Int64> _clsVar1; private static Vector128<Int64> _clsVar2; private Vector128<Int64> _fld1; private Vector128<Int64> _fld2; private BooleanBinaryOpTest__DataTable<Int64, Int64> _dataTable; static BooleanBinaryOpTest__TestCInt64() { var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _clsVar1), ref Unsafe.As<Int64, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _clsVar2), ref Unsafe.As<Int64, byte>(ref _data2[0]), VectorSize); } public BooleanBinaryOpTest__TestCInt64() { Succeeded = true; var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _fld1), ref Unsafe.As<Int64, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _fld2), ref Unsafe.As<Int64, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); } _dataTable = new BooleanBinaryOpTest__DataTable<Int64, Int64>(_data1, _data2, VectorSize); } public bool IsSupported => Sse41.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse41.TestC( Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int64>>(_dataTable.inArray2Ptr) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_Load() { var result = Sse41.TestC( Sse2.LoadVector128((Int64*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int64*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_LoadAligned() { var result = Sse41.TestC( Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunReflectionScenario_UnsafeRead() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int64>), typeof(Vector128<Int64>) }); if (method != null) { var result = method.Invoke(null, new object[] { Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int64>>(_dataTable.inArray2Ptr) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunReflectionScenario_Load() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int64>), typeof(Vector128<Int64>) }); if (method != null) { var result = method.Invoke(null, new object[] { Sse2.LoadVector128((Int64*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int64*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunReflectionScenario_LoadAligned() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int64>), typeof(Vector128<Int64>) }); if (method != null) { var result = method.Invoke(null, new object[] { Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunClsVarScenario() { var result = Sse41.TestC( _clsVar1, _clsVar2 ); ValidateResult(_clsVar1, _clsVar2, result); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<Int64>>(_dataTable.inArray2Ptr); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((Int64*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((Int64*)(_dataTable.inArray2Ptr)); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((Int64*)(_dataTable.inArray2Ptr)); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclFldScenario() { var test = new BooleanBinaryOpTest__TestCInt64(); var result = Sse41.TestC(test._fld1, test._fld2); ValidateResult(test._fld1, test._fld2, result); } public void RunFldScenario() { var result = Sse41.TestC(_fld1, _fld2); ValidateResult(_fld1, _fld2, result); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<Int64> left, Vector128<Int64> right, bool result, [CallerMemberName] string method = "") { Int64[] inArray1 = new Int64[Op1ElementCount]; Int64[] inArray2 = new Int64[Op2ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(void* left, void* right, bool result, [CallerMemberName] string method = "") { Int64[] inArray1 = new Int64[Op1ElementCount]; Int64[] inArray2 = new Int64[Op2ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(Int64[] left, Int64[] right, bool result, [CallerMemberName] string method = "") { var expectedResult = true; for (var i = 0; i < Op1ElementCount; i++) { expectedResult &= ((~left[i] & right[i]) == 0); } if (expectedResult != result) { Succeeded = false; Console.WriteLine($"{nameof(Sse41)}.{nameof(Sse41.TestC)}<Int64>(Vector128<Int64>, Vector128<Int64>): {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsHead { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; /// <summary> /// Test Infrastructure for AutoRest /// </summary> public partial class AutoRestHeadTestService : Microsoft.Rest.ServiceClient<AutoRestHeadTestService>, IAutoRestHeadTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IHttpSuccessOperations. /// </summary> public virtual IHttpSuccessOperations HttpSuccess { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestHeadTestService(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestHeadTestService(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestHeadTestService(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestHeadTestService(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadTestService(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadTestService(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.HttpSuccess = new HttpSuccessOperations(this); this.BaseUri = new System.Uri("http://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter()); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: A1023Response.txt #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.ProtocolBuffers; using pbc = global::Google.ProtocolBuffers.Collections; using pbd = global::Google.ProtocolBuffers.Descriptors; using scg = global::System.Collections.Generic; namespace DolphinServer.ProtoEntity { namespace Proto { [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public static partial class A1023Response { #region Extension registration public static void RegisterAllExtensions(pb::ExtensionRegistry registry) { } #endregion #region Static variables internal static pbd::MessageDescriptor internal__static_A1023Response__Descriptor; internal static pb::FieldAccess.FieldAccessorTable<global::DolphinServer.ProtoEntity.A1023Response, global::DolphinServer.ProtoEntity.A1023Response.Builder> internal__static_A1023Response__FieldAccessorTable; #endregion #region Descriptor public static pbd::FileDescriptor Descriptor { get { return descriptor; } } private static pbd::FileDescriptor descriptor; static A1023Response() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ChFBMTAyM1Jlc3BvbnNlLnR4dCI1Cg1BMTAyM1Jlc3BvbnNlEhEKCUVycm9y", "SW5mbxgBIAEoCRIRCglFcnJvckNvZGUYAiABKAVCHKoCGURvbHBoaW5TZXJ2", "ZXIuUHJvdG9FbnRpdHk=")); pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) { descriptor = root; internal__static_A1023Response__Descriptor = Descriptor.MessageTypes[0]; internal__static_A1023Response__FieldAccessorTable = new pb::FieldAccess.FieldAccessorTable<global::DolphinServer.ProtoEntity.A1023Response, global::DolphinServer.ProtoEntity.A1023Response.Builder>(internal__static_A1023Response__Descriptor, new string[] { "ErrorInfo", "ErrorCode", }); pb::ExtensionRegistry registry = pb::ExtensionRegistry.CreateInstance(); RegisterAllExtensions(registry); return registry; }; pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData, new pbd::FileDescriptor[] { }, assigner); } #endregion } } #region Messages [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class A1023Response : pb::GeneratedMessage<A1023Response, A1023Response.Builder> { private A1023Response() { } private static readonly A1023Response defaultInstance = new A1023Response().MakeReadOnly(); private static readonly string[] _a1023ResponseFieldNames = new string[] { "ErrorCode", "ErrorInfo" }; private static readonly uint[] _a1023ResponseFieldTags = new uint[] { 16, 10 }; public static A1023Response DefaultInstance { get { return defaultInstance; } } public override A1023Response DefaultInstanceForType { get { return DefaultInstance; } } protected override A1023Response ThisMessage { get { return this; } } public static pbd::MessageDescriptor Descriptor { get { return global::DolphinServer.ProtoEntity.Proto.A1023Response.internal__static_A1023Response__Descriptor; } } protected override pb::FieldAccess.FieldAccessorTable<A1023Response, A1023Response.Builder> InternalFieldAccessors { get { return global::DolphinServer.ProtoEntity.Proto.A1023Response.internal__static_A1023Response__FieldAccessorTable; } } public const int ErrorInfoFieldNumber = 1; private bool hasErrorInfo; private string errorInfo_ = ""; public bool HasErrorInfo { get { return hasErrorInfo; } } public string ErrorInfo { get { return errorInfo_; } } public const int ErrorCodeFieldNumber = 2; private bool hasErrorCode; private int errorCode_; public bool HasErrorCode { get { return hasErrorCode; } } public int ErrorCode { get { return errorCode_; } } public override bool IsInitialized { get { return true; } } public override void WriteTo(pb::ICodedOutputStream output) { CalcSerializedSize(); string[] field_names = _a1023ResponseFieldNames; if (hasErrorInfo) { output.WriteString(1, field_names[1], ErrorInfo); } if (hasErrorCode) { output.WriteInt32(2, field_names[0], ErrorCode); } UnknownFields.WriteTo(output); } private int memoizedSerializedSize = -1; public override int SerializedSize { get { int size = memoizedSerializedSize; if (size != -1) return size; return CalcSerializedSize(); } } private int CalcSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasErrorInfo) { size += pb::CodedOutputStream.ComputeStringSize(1, ErrorInfo); } if (hasErrorCode) { size += pb::CodedOutputStream.ComputeInt32Size(2, ErrorCode); } size += UnknownFields.SerializedSize; memoizedSerializedSize = size; return size; } public static A1023Response ParseFrom(pb::ByteString data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static A1023Response ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static A1023Response ParseFrom(byte[] data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static A1023Response ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static A1023Response ParseFrom(global::System.IO.Stream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static A1023Response ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static A1023Response ParseDelimitedFrom(global::System.IO.Stream input) { return CreateBuilder().MergeDelimitedFrom(input).BuildParsed(); } public static A1023Response ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed(); } public static A1023Response ParseFrom(pb::ICodedInputStream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static A1023Response ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } private A1023Response MakeReadOnly() { return this; } public static Builder CreateBuilder() { return new Builder(); } public override Builder ToBuilder() { return CreateBuilder(this); } public override Builder CreateBuilderForType() { return new Builder(); } public static Builder CreateBuilder(A1023Response prototype) { return new Builder(prototype); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class Builder : pb::GeneratedBuilder<A1023Response, Builder> { protected override Builder ThisBuilder { get { return this; } } public Builder() { result = DefaultInstance; resultIsReadOnly = true; } internal Builder(A1023Response cloneFrom) { result = cloneFrom; resultIsReadOnly = true; } private bool resultIsReadOnly; private A1023Response result; private A1023Response PrepareBuilder() { if (resultIsReadOnly) { A1023Response original = result; result = new A1023Response(); resultIsReadOnly = false; MergeFrom(original); } return result; } public override bool IsInitialized { get { return result.IsInitialized; } } protected override A1023Response MessageBeingBuilt { get { return PrepareBuilder(); } } public override Builder Clear() { result = DefaultInstance; resultIsReadOnly = true; return this; } public override Builder Clone() { if (resultIsReadOnly) { return new Builder(result); } else { return new Builder().MergeFrom(result); } } public override pbd::MessageDescriptor DescriptorForType { get { return global::DolphinServer.ProtoEntity.A1023Response.Descriptor; } } public override A1023Response DefaultInstanceForType { get { return global::DolphinServer.ProtoEntity.A1023Response.DefaultInstance; } } public override A1023Response BuildPartial() { if (resultIsReadOnly) { return result; } resultIsReadOnly = true; return result.MakeReadOnly(); } public override Builder MergeFrom(pb::IMessage other) { if (other is A1023Response) { return MergeFrom((A1023Response) other); } else { base.MergeFrom(other); return this; } } public override Builder MergeFrom(A1023Response other) { if (other == global::DolphinServer.ProtoEntity.A1023Response.DefaultInstance) return this; PrepareBuilder(); if (other.HasErrorInfo) { ErrorInfo = other.ErrorInfo; } if (other.HasErrorCode) { ErrorCode = other.ErrorCode; } this.MergeUnknownFields(other.UnknownFields); return this; } public override Builder MergeFrom(pb::ICodedInputStream input) { return MergeFrom(input, pb::ExtensionRegistry.Empty); } public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) { PrepareBuilder(); pb::UnknownFieldSet.Builder unknownFields = null; uint tag; string field_name; while (input.ReadTag(out tag, out field_name)) { if(tag == 0 && field_name != null) { int field_ordinal = global::System.Array.BinarySearch(_a1023ResponseFieldNames, field_name, global::System.StringComparer.Ordinal); if(field_ordinal >= 0) tag = _a1023ResponseFieldTags[field_ordinal]; else { if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name); continue; } } switch (tag) { case 0: { throw pb::InvalidProtocolBufferException.InvalidTag(); } default: { if (pb::WireFormat.IsEndGroupTag(tag)) { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name); break; } case 10: { result.hasErrorInfo = input.ReadString(ref result.errorInfo_); break; } case 16: { result.hasErrorCode = input.ReadInt32(ref result.errorCode_); break; } } } if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } public bool HasErrorInfo { get { return result.hasErrorInfo; } } public string ErrorInfo { get { return result.ErrorInfo; } set { SetErrorInfo(value); } } public Builder SetErrorInfo(string value) { pb::ThrowHelper.ThrowIfNull(value, "value"); PrepareBuilder(); result.hasErrorInfo = true; result.errorInfo_ = value; return this; } public Builder ClearErrorInfo() { PrepareBuilder(); result.hasErrorInfo = false; result.errorInfo_ = ""; return this; } public bool HasErrorCode { get { return result.hasErrorCode; } } public int ErrorCode { get { return result.ErrorCode; } set { SetErrorCode(value); } } public Builder SetErrorCode(int value) { PrepareBuilder(); result.hasErrorCode = true; result.errorCode_ = value; return this; } public Builder ClearErrorCode() { PrepareBuilder(); result.hasErrorCode = false; result.errorCode_ = 0; return this; } } static A1023Response() { object.ReferenceEquals(global::DolphinServer.ProtoEntity.Proto.A1023Response.Descriptor, null); } } #endregion } #endregion Designer generated code
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.DotNet.XUnitExtensions; using System.Linq; using System.Net.Sockets; using System.Net.Test.Common; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Xunit; using Xunit.Abstractions; namespace System.Net.NetworkInformation.Tests { public class PingTest { public readonly ITestOutputHelper _output; private class FinalizingPing : Ping { public static volatile bool WasFinalized; public static void CreateAndRelease() { new FinalizingPing(); } protected override void Dispose(bool disposing) { if (!disposing) { WasFinalized = true; } base.Dispose(disposing); } } public PingTest(ITestOutputHelper output) { _output = output; } private void PingResultValidator(PingReply pingReply, IPAddress localIpAddress) { PingResultValidator(pingReply, new IPAddress[] { localIpAddress }); } private void PingResultValidator(PingReply pingReply, IPAddress[] localIpAddresses) { if (pingReply.Status == IPStatus.TimedOut && pingReply.Address.AddressFamily == AddressFamily.InterNetworkV6 && PlatformDetection.IsOSX) { // Workaround OSX ping6 bug, refer issue #15018 return; } Assert.Equal(IPStatus.Success, pingReply.Status); if (localIpAddresses.Any(addr => pingReply.Address.Equals(addr))) { // response did come from expected address. Test will pass. return; } // We did not find response address in given list. // Test is going to fail. Collect some more info. _output.WriteLine($"Reply address {pingReply.Address} is not expected local address."); foreach (IPAddress address in localIpAddresses) { _output.WriteLine($"Local address {address}"); } Assert.Contains(pingReply.Address, localIpAddresses); ///, "Reply address {pingReply.Address} is not expected local address."); } [Fact] public async Task SendPingAsync_InvalidArgs() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); Ping p = new Ping(); // Null address AssertExtensions.Throws<ArgumentNullException>("address", () => { p.SendPingAsync((IPAddress)null); }); AssertExtensions.Throws<ArgumentNullException>("hostNameOrAddress", () => { p.SendPingAsync((string)null); }); AssertExtensions.Throws<ArgumentNullException>("address", () => { p.SendAsync((IPAddress)null, null); }); AssertExtensions.Throws<ArgumentNullException>("hostNameOrAddress", () => { p.SendAsync((string)null, null); }); AssertExtensions.Throws<ArgumentNullException>("address", () => { p.Send((IPAddress)null); }); AssertExtensions.Throws<ArgumentNullException>("hostNameOrAddress", () => { p.Send((string)null); }); // Invalid address AssertExtensions.Throws<ArgumentException>("address", () => { p.SendPingAsync(IPAddress.Any); }); AssertExtensions.Throws<ArgumentException>("address", () => { p.SendPingAsync(IPAddress.IPv6Any); }); AssertExtensions.Throws<ArgumentException>("address", () => { p.SendAsync(IPAddress.Any, null); }); AssertExtensions.Throws<ArgumentException>("address", () => { p.SendAsync(IPAddress.IPv6Any, null); }); AssertExtensions.Throws<ArgumentException>("address", () => { p.Send(IPAddress.Any); }); AssertExtensions.Throws<ArgumentException>("address", () => { p.Send(IPAddress.IPv6Any); }); // Negative timeout AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.SendPingAsync(localIpAddress, -1); }); AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.SendPingAsync(TestSettings.LocalHost, -1); }); AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.SendAsync(localIpAddress, -1, null); }); AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.SendAsync(TestSettings.LocalHost, -1, null); }); AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.Send(localIpAddress, -1); }); AssertExtensions.Throws<ArgumentOutOfRangeException>("timeout", () => { p.Send(TestSettings.LocalHost, -1); }); // Null byte[] AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.SendPingAsync(localIpAddress, 0, null); }); AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.SendPingAsync(TestSettings.LocalHost, 0, null); }); AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.SendAsync(localIpAddress, 0, null, null); }); AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.SendAsync(TestSettings.LocalHost, 0, null, null); }); AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.Send(localIpAddress, 0, null); }); AssertExtensions.Throws<ArgumentNullException>("buffer", () => { p.Send(TestSettings.LocalHost, 0, null); }); // Too large byte[] AssertExtensions.Throws<ArgumentException>("buffer", () => { p.SendPingAsync(localIpAddress, 1, new byte[65501]); }); AssertExtensions.Throws<ArgumentException>("buffer", () => { p.SendPingAsync(TestSettings.LocalHost, 1, new byte[65501]); }); AssertExtensions.Throws<ArgumentException>("buffer", () => { p.SendAsync(localIpAddress, 1, new byte[65501], null); }); AssertExtensions.Throws<ArgumentException>("buffer", () => { p.SendAsync(TestSettings.LocalHost, 1, new byte[65501], null); }); AssertExtensions.Throws<ArgumentException>("buffer", () => { p.Send(localIpAddress, 1, new byte[65501]); }); AssertExtensions.Throws<ArgumentException>("buffer", () => { p.Send(TestSettings.LocalHost, 1, new byte[65501]); }); } [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public void SendPingWithIPAddress(AddressFamily addressFamily) { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(addressFamily); if (localIpAddress == null) { // No local address for given address family. return; } SendBatchPing( (ping) => ping.Send(localIpAddress), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public async Task SendPingAsyncWithIPAddress(AddressFamily addressFamily) { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(addressFamily); if (localIpAddress == null) { // No local address for given address family. return; } await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public void SendPingWithIPAddress_AddressAsString(AddressFamily addressFamily) { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(addressFamily); if (localIpAddress == null) { // No local address for given address family. return; } SendBatchPing( (ping) => ping.Send(localIpAddress.ToString()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [Fact] public async Task SendPingAsyncWithIPAddress_AddressAsString() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress.ToString()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [Fact] public void SendPingWithIPAddressAndTimeout() { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); SendBatchPing( (ping) => ping.Send(localIpAddress, TestSettings.PingTimeout), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [Fact] public async Task SendPingAsyncWithIPAddressAndTimeout() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress, TestSettings.PingTimeout), (pingReply) => { PingResultValidator(pingReply, localIpAddress); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithIPAddressAndTimeoutAndBuffer() { byte[] buffer = TestSettings.PayloadAsBytes; IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); SendBatchPing( (ping) => ping.Send(localIpAddress, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithIPAddressAndTimeoutAndBuffer() { byte[] buffer = TestSettings.PayloadAsBytes; IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithIPAddressAndTimeoutAndBuffer_Unix() { byte[] buffer = TestSettings.PayloadAsBytes; IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); SendBatchPing( (ping) => ping.Send(localIpAddress, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(localIpAddress.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithIPAddressAndTimeoutAndBuffer_Unix() { byte[] buffer = TestSettings.PayloadAsBytes; IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(localIpAddress.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithIPAddressAndTimeoutAndBufferAndPingOptions() { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); var options = new PingOptions(); byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(localIpAddress, TestSettings.PingTimeout, buffer, options), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); Assert.InRange(pingReply.RoundtripTime, 0, long.MaxValue); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithIPAddressAndTimeoutAndBufferAndPingOptions() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); var options = new PingOptions(); byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress, TestSettings.PingTimeout, buffer, options), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); Assert.InRange(pingReply.RoundtripTime, 0, long.MaxValue); }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public void SendPingWithIPAddressAndTimeoutAndBufferAndPingOptions_Unix(AddressFamily addressFamily) { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(addressFamily); if (localIpAddress == null) { // No local address for given address family. return; } byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(localIpAddress, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(localIpAddress.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Theory] [InlineData(AddressFamily.InterNetwork)] [InlineData(AddressFamily.InterNetworkV6)] public async Task SendPingAsyncWithIPAddressAndTimeoutAndBufferAndPingOptions_Unix(AddressFamily addressFamily) { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(addressFamily); if (localIpAddress == null) { // No local address for given address family. return; } byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(localIpAddress, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(localIpAddress.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [Fact] public void SendPingWithHost() { IPAddress[] localIpAddresses = TestSettings.GetLocalIPAddresses(); SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); }); } [Fact] public async Task SendPingAsyncWithHost() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); }); } [Fact] public void SendPingWithHostAndTimeout() { IPAddress[] localIpAddresses = TestSettings.GetLocalIPAddresses(); SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost, TestSettings.PingTimeout), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); }); } [Fact] public async Task SendPingAsyncWithHostAndTimeout() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithHostAndTimeoutAndBuffer() { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithHostAndTimeoutAndBuffer() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithHostAndTimeoutAndBuffer_Unix() { IPAddress[] localIpAddresses = TestSettings.GetLocalIPAddresses(); byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(pingReply.Address.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithHostAndTimeoutAndBuffer_Unix() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout, buffer), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(pingReply.Address.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithHostAndTimeoutAndBufferAndPingOptions() { IPAddress localIpAddress = TestSettings.GetLocalIPAddress(); byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.Windows)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithHostAndTimeoutAndBufferAndPingOptions() { IPAddress localIpAddress = await TestSettings.GetLocalIPAddressAsync(); byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddress); Assert.Equal(buffer, pingReply.Buffer); }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public void SendPingWithHostAndTimeoutAndBufferAndPingOptions_Unix() { IPAddress[] localIpAddresses = TestSettings.GetLocalIPAddresses(); byte[] buffer = TestSettings.PayloadAsBytes; SendBatchPing( (ping) => ping.Send(TestSettings.LocalHost, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(pingReply.Address.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [PlatformSpecific(TestPlatforms.AnyUnix)] // On Unix, Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. [Fact] public async Task SendPingAsyncWithHostAndTimeoutAndBufferAndPingOptions_Unix() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); byte[] buffer = TestSettings.PayloadAsBytes; await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout, buffer, new PingOptions()), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); // Non-root pings cannot send arbitrary data in the buffer, and do not receive it back in the PingReply. if (Capability.CanUseRawSockets(pingReply.Address.AddressFamily)) { Assert.Equal(buffer, pingReply.Buffer); } else { Assert.Equal(Array.Empty<byte>(), pingReply.Buffer); } }); } [Fact] public async Task SendPings_ReuseInstance_Hostname() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); using (Ping p = new Ping()) { for (int i = 0; i < 3; i++) { PingReply pingReply = await p.SendPingAsync(TestSettings.LocalHost); PingResultValidator(pingReply, localIpAddresses); } } } [Fact] public async Task Sends_ReuseInstance_Hostname() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); using (Ping p = new Ping()) { for (int i = 0; i < 3; i++) { PingReply pingReply = p.Send(TestSettings.LocalHost); PingResultValidator(pingReply, localIpAddresses); } } } [Fact] public async Task SendAsyncs_ReuseInstance_Hostname() { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); using (Ping p = new Ping()) { TaskCompletionSource<bool> tcs = null; PingCompletedEventArgs ea = null; p.PingCompleted += (s, e) => { ea = e; tcs.TrySetResult(true); }; Action reset = () => { ea = null; tcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously); }; // Several normal iterations for (int i = 0; i < 3; i++) { reset(); p.SendAsync(TestSettings.LocalHost, null); await tcs.Task; Assert.NotNull(ea); PingResultValidator(ea.Reply, localIpAddresses); } // Several canceled iterations for (int i = 0; i < 3; i++) { reset(); p.SendAsync(TestSettings.LocalHost, null); p.SendAsyncCancel(); // will block until operation can be started again await tcs.Task; bool cancelled = ea.Cancelled; Exception error = ea.Error; PingReply reply = ea.Reply; Assert.True(cancelled ^ (error != null) ^ (reply != null), "Cancelled: " + cancelled + (error == null ? "" : (Environment.NewLine + "Error Message: " + error.Message + Environment.NewLine + "Error Inner Exception: " + error.InnerException)) + (reply == null ? "" : (Environment.NewLine + "Reply Address: " + reply.Address + Environment.NewLine + "Reply Status: " + reply.Status))); } } } [Fact] public static void Ping_DisposeAfterSend_Success() { Ping p = new Ping(); p.Send(TestSettings.LocalHost); p.Dispose(); } [Fact] public static async Task PingAsync_DisposeAfterSend_Success() { Ping p = new Ping(); await p.SendPingAsync(TestSettings.LocalHost); p.Dispose(); } [Fact] public static void Ping_DisposeMultipletimes_Success() { Ping p = new Ping(); p.Dispose(); p.Dispose(); } [Fact] public static void Ping_SendAfterDispose_ThrowsSynchronously() { Ping p = new Ping(); p.Dispose(); Assert.Throws<ObjectDisposedException>(() => { p.Send(TestSettings.LocalHost); }); } [Fact] public static void PingAsync_SendAfterDispose_ThrowsSynchronously() { Ping p = new Ping(); p.Dispose(); Assert.Throws<ObjectDisposedException>(() => { p.SendPingAsync(TestSettings.LocalHost); }); } private static readonly int s_pingcount = 4; private static void SendBatchPing(Func<Ping, PingReply> sendPing, Action<PingReply> pingResultValidator) { for (int i = 0; i < s_pingcount; i++) { SendPing(sendPing, pingResultValidator); } } private static Task SendBatchPingAsync(Func<Ping, Task<PingReply>> sendPing, Action<PingReply> pingResultValidator) { // create several concurrent pings Task[] pingTasks = new Task[s_pingcount]; for (int i = 0; i < s_pingcount; i++) { pingTasks[i] = SendPingAsync(sendPing, pingResultValidator); } return Task.WhenAll(pingTasks); } private static void SendPing(Func<Ping, PingReply> sendPing, Action<PingReply> pingResultValidator) { var pingResult = sendPing(new Ping()); pingResultValidator(pingResult); } private static async Task SendPingAsync(Func<Ping, Task<PingReply>> sendPing, Action<PingReply> pingResultValidator) { var pingResult = await sendPing(new Ping()); pingResultValidator(pingResult); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.Mono, "GC has different behavior on Mono")] public void CanBeFinalized() { FinalizingPing.CreateAndRelease(); GC.Collect(); GC.WaitForPendingFinalizers(); Assert.True(FinalizingPing.WasFinalized); } [Theory] [InlineData(true)] [InlineData(false)] public async Task SendPingAsyncWithHostAndTtlAndFragmentPingOptions(bool fragment) { IPAddress[] localIpAddresses = await TestSettings.GetLocalIPAddressesAsync(); byte[] buffer = TestSettings.PayloadAsBytes; PingOptions options = new PingOptions(); options.Ttl = 32; options.DontFragment = fragment; await SendBatchPingAsync( (ping) => ping.SendPingAsync(TestSettings.LocalHost, TestSettings.PingTimeout, buffer, options), (pingReply) => { PingResultValidator(pingReply, localIpAddresses); }); } [ConditionalFact] [OuterLoop] // Depends on external host and assumption that network respects and does not change TTL public async Task SendPingToExternalHostWithLowTtlTest() { string host = System.Net.Test.Common.Configuration.Ping.PingHost; PingReply pingReply; PingOptions options = new PingOptions(); bool reachable = false; Ping ping = new Ping(); for (int i = 0; i < s_pingcount; i++) { pingReply = await ping.SendPingAsync(host, TestSettings.PingTimeout, TestSettings.PayloadAsBytesShort); if (pingReply.Status == IPStatus.Success) { reachable = true; break; } } if (!reachable) { throw new SkipTestException($"Host {host} is not reachable. Skipping test."); } options.Ttl = 1; // This should always fail unless host is one IP hop away. pingReply = await ping.SendPingAsync(host, TestSettings.PingTimeout, TestSettings.PayloadAsBytesShort, options); Assert.NotEqual(IPStatus.Success, pingReply.Status); } [Fact] [OuterLoop] public void Ping_TimedOut_Sync_Success() { var sender = new Ping(); PingReply reply = sender.Send(TestSettings.UnreachableAddress); Assert.Equal(IPStatus.TimedOut, reply.Status); } [Fact] [OuterLoop] public async Task Ping_TimedOut_EAP_Success() { var sender = new Ping(); sender.PingCompleted += (s, e) => { var tcs = (TaskCompletionSource<PingReply>)e.UserState; if (e.Cancelled) { tcs.TrySetCanceled(); } else if (e.Error != null) { tcs.TrySetException(e.Error); } else { tcs.TrySetResult(e.Reply); } }; var tcs = new TaskCompletionSource<PingReply>(); sender.SendAsync(TestSettings.UnreachableAddress, tcs); PingReply reply = await tcs.Task; Assert.Equal(IPStatus.TimedOut, reply.Status); } [Fact] [OuterLoop] public async Task Ping_TimedOut_TAP_Success() { var sender = new Ping(); PingReply reply = await sender.SendPingAsync(TestSettings.UnreachableAddress); Assert.Equal(IPStatus.TimedOut, reply.Status); } } }
/******************************************************************************************** Copyright (c) Microsoft Corporation All rights reserved. Microsoft Public License: This license governs use of the accompanying software. If you use the software, you accept this license. If you do not accept the license, do not use the software. 1. Definitions The terms "reproduce," "reproduction," "derivative works," and "distribution" have the same meaning here as under U.S. copyright law. A "contribution" is the original software, or any additions or changes to the software. A "contributor" is any person that distributes its contribution under this license. "Licensed patents" are a contributor's patent claims that read directly on its contribution. 2. Grant of Rights (A) Copyright Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free copyright license to reproduce its contribution, prepare derivative works of its contribution, and distribute its contribution or any derivative works that you create. (B) Patent Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free license under its licensed patents to make, have made, use, sell, offer for sale, import, and/or otherwise dispose of its contribution in the software or derivative works of the contribution in the software. 3. Conditions and Limitations (A) No Trademark License- This license does not grant you rights to use any contributors' name, logo, or trademarks. (B) If you bring a patent claim against any contributor over patents that you claim are infringed by the software, your patent license from such contributor to the software ends automatically. (C) If you distribute any portion of the software, you must retain all copyright, patent, trademark, and attribution notices that are present in the software. (D) If you distribute any portion of the software in source code form, you may do so only under this license by including a complete copy of this license with your distribution. If you distribute any portion of the software in compiled or object code form, you may only do so under a license that complies with this license. (E) The software is licensed "as-is." You bear the risk of using it. The contributors give no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this license cannot change. To the extent permitted under your local laws, the contributors exclude the implied warranties of merchantability, fitness for a particular purpose and non-infringement. ********************************************************************************************/ using System; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Runtime.InteropServices; using Microsoft.VisualStudio; using Microsoft.VisualStudio.OLE.Interop; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Shell.Interop; using ErrorHandler = Microsoft.VisualStudio.ErrorHandler; using ShellConstants = Microsoft.VisualStudio.Shell.Interop.Constants; namespace Microsoft.VisualStudio.Project { [CLSCompliant(false), ComVisible(true)] public class NestedProjectNode : HierarchyNode, IPropertyNotifySink { #region fields private IVsHierarchy nestedHierarchy; Guid projectInstanceGuid = Guid.Empty; private string projectName = String.Empty; private string projectPath = String.Empty; private ImageHandler imageHandler; /// <summary> /// Defines an object that will be a mutex for this object for synchronizing thread calls. /// </summary> private static volatile object Mutex = new object(); /// <summary> /// Sets the dispose flag on the object. /// </summary> private bool isDisposed; // A cooike retrieved when advising on property chnanged events. private uint projectPropertyNotifySinkCookie; #endregion #region properties internal IVsHierarchy NestedHierarchy { get { return this.nestedHierarchy; } } #endregion #region virtual properties /// <summary> /// Returns the __VSADDVPFLAGS that will be passed in when calling AddVirtualProjectEx /// </summary> protected virtual uint VirtualProjectFlags { get { return 0; } } #endregion #region overridden properties /// <summary> /// The path of the nested project. /// </summary> public override string Url { get { return this.projectPath; } } /// <summary> /// The Caption of the nested project. /// </summary> public override string Caption { get { return Path.GetFileNameWithoutExtension(this.projectName); } } public override Guid ItemTypeGuid { get { return VSConstants.GUID_ItemType_SubProject; } } /// <summary> /// Defines whether a node can execute a command if in selection. /// We do this in order to let the nested project to handle the execution of its own commands. /// </summary> public override bool CanExecuteCommand { get { return false; } } public override int SortPriority { get { return DefaultSortOrderNode.NestedProjectNode; } } protected bool IsDisposed { get { return this.isDisposed; } set { this.isDisposed = value; } } #endregion #region ctor protected NestedProjectNode() { } public NestedProjectNode(ProjectNode root, ProjectElement element) : base(root, element) { this.IsExpanded = true; } #endregion #region IPropertyNotifySink Members /// <summary> /// Notifies a sink that the [bindable] property specified by dispID has changed. /// If dispID is DISPID_UNKNOWN, then multiple properties have changed together. /// The client (owner of the sink) should then retrieve the current value of each property of interest from the object that generated the notification. /// In our case we will care about the VSLangProj80.VsProjPropId.VBPROJPROPID_FileName and update the changes in the parent project file. /// </summary> /// <param name="dispid">Dispatch identifier of the property that is about to change or DISPID_UNKNOWN if multiple properties are about to change.</param> public virtual void OnChanged(int dispid) { if (dispid == (int)VSLangProj80.VsProjPropId.VBPROJPROPID_FileName) { // Get the filename of the nested project. Inetead of asking the label on the nested we ask the filename, since the label might not yet been set. IVsProject3 nestedProject = this.nestedHierarchy as IVsProject3; if (nestedProject != null) { string document; ErrorHandler.ThrowOnFailure(nestedProject.GetMkDocument(VSConstants.VSITEMID_ROOT, out document)); this.RenameNestedProjectInParentProject(Path.GetFileNameWithoutExtension(document)); // We need to redraw the caption since for some reason, by intervining to the OnChanged event the Caption is not updated. this.ReDraw(UIHierarchyElement.Caption); } } } /// <summary> /// Notifies a sink that a [requestedit] property is about to change and that the object is asking the sink how to proceed. /// </summary> /// <param name="dispid">Dispatch identifier of the property that is about to change or DISPID_UNKNOWN if multiple properties are about to change.</param> public virtual void OnRequestEdit(int dispid) { } #endregion #region public methods #endregion #region overridden methods /// <summary> /// Get the automation object for the NestedProjectNode /// </summary> /// <returns>An instance of the Automation.OANestedProjectItem type if succeded</returns> public override object GetAutomationObject() { //Validate that we are not disposed or the project is closing if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { return null; } return new Automation.OANestedProjectItem(this.ProjectMgr.GetAutomationObject() as Automation.OAProject, this); } /// <summary> /// Gets properties of a given node or of the hierarchy. /// </summary> /// <param name="propId">Identifier of the hierarchy property</param> /// <returns>It return an object which type is dependent on the propid.</returns> public override object GetProperty(int propId) { __VSHPROPID vshPropId = (__VSHPROPID)propId; switch (vshPropId) { default: return base.GetProperty(propId); case __VSHPROPID.VSHPROPID_Expandable: return true; case __VSHPROPID.VSHPROPID_BrowseObject: case __VSHPROPID.VSHPROPID_HandlesOwnReload: return this.DelegateGetPropertyToNested(propId); } } /// <summary> /// Gets properties whose values are GUIDs. /// </summary> /// <param name="propid">Identifier of the hierarchy property</param> /// <param name="guid"> Pointer to a GUID property specified in propid</param> /// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns> public override int GetGuidProperty(int propid, out Guid guid) { guid = Guid.Empty; switch ((__VSHPROPID)propid) { case __VSHPROPID.VSHPROPID_ProjectIDGuid: guid = this.projectInstanceGuid; break; default: return base.GetGuidProperty(propid, out guid); } CCITracing.TraceCall(String.Format(CultureInfo.CurrentCulture, "Guid for {0} property", propid)); if (guid.CompareTo(Guid.Empty) == 0) { return VSConstants.DISP_E_MEMBERNOTFOUND; } return VSConstants.S_OK; } /// <summary> /// Determines whether the hierarchy item changed. /// </summary> /// <param name="itemId">Item identifier of the hierarchy item contained in VSITEMID</param> /// <param name="punkDocData">Pointer to the IUnknown interface of the hierarchy item. </param> /// <param name="pfDirty">TRUE if the hierarchy item changed.</param> /// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns> public override int IsItemDirty(uint itemId, IntPtr punkDocData, out int pfDirty) { Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); Debug.Assert(punkDocData != IntPtr.Zero, "docData intptr was zero"); // Get an IPersistFileFormat object from docData object IPersistFileFormat persistFileFormat = Marshal.GetTypedObjectForIUnknown(punkDocData, typeof(IPersistFileFormat)) as IPersistFileFormat; Debug.Assert(persistFileFormat != null, "The docData object does not implement the IPersistFileFormat interface"); // Call IsDirty on the IPersistFileFormat interface ErrorHandler.ThrowOnFailure(persistFileFormat.IsDirty(out pfDirty)); return VSConstants.S_OK; } /// <summary> /// Saves the hierarchy item to disk. /// </summary> /// <param name="dwSave">Flags whose values are taken from the VSSAVEFLAGS enumeration.</param> /// <param name="silentSaveAsName">File name to be applied when dwSave is set to VSSAVE_SilentSave. </param> /// <param name="itemid">Item identifier of the hierarchy item saved from VSITEMID. </param> /// <param name="punkDocData">Pointer to the IUnknown interface of the hierarchy item saved.</param> /// <param name="pfCancelled">TRUE if the save action was canceled. </param> /// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns> public override int SaveItem(VSSAVEFLAGS dwSave, string silentSaveAsName, uint itemid, IntPtr punkDocData, out int pfCancelled) { // Don't ignore/unignore file changes // Use Advise/Unadvise to work around rename situations try { this.StopObservingNestedProjectFile(); Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); Debug.Assert(punkDocData != IntPtr.Zero, "docData intptr was zero"); // Get an IPersistFileFormat object from docData object (we don't call release on punkDocData since did not increment its ref count) IPersistFileFormat persistFileFormat = Marshal.GetTypedObjectForIUnknown(punkDocData, typeof(IPersistFileFormat)) as IPersistFileFormat; Debug.Assert(persistFileFormat != null, "The docData object does not implement the IPersistFileFormat interface"); IVsUIShell uiShell = this.GetService(typeof(SVsUIShell)) as IVsUIShell; string newName; ErrorHandler.ThrowOnFailure(uiShell.SaveDocDataToFile(dwSave, persistFileFormat, silentSaveAsName, out newName, out pfCancelled)); // When supported do a rename of the nested project here } finally { // Succeeded or not we must hook to the file change events // Don't ignore/unignore file changes // Use Advise/Unadvise to work around rename situations this.ObserveNestedProjectFile(); } return VSConstants.S_OK; } /// <summary> /// Gets the icon handle. It tries first the nested to get the icon handle. If that is not supported it will get it from /// the image list of the nested if that is supported. If neither of these is supported a default image will be shown. /// </summary> /// <returns>An object representing the icon.</returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1806:DoNotIgnoreMethodResults", MessageId = "Microsoft.VisualStudio.Shell.Interop.IVsHierarchy.GetProperty(System.UInt32,System.Int32,System.Object@)")] public override object GetIconHandle(bool open) { Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); object iconHandle = null; this.nestedHierarchy.GetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_IconHandle, out iconHandle); if (iconHandle == null) { if (null == imageHandler) { InitImageHandler(); } // Try to get an icon from the nested hierrachy image list. if (imageHandler.ImageList != null) { object imageIndexAsObject = null; if (this.nestedHierarchy.GetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_IconIndex, out imageIndexAsObject) == VSConstants.S_OK && imageIndexAsObject != null) { int imageIndex = (int)imageIndexAsObject; if (imageIndex < imageHandler.ImageList.Images.Count) { iconHandle = imageHandler.GetIconHandle(imageIndex); } } } if (null == iconHandle) { iconHandle = this.ProjectMgr.ImageHandler.GetIconHandle((int)ProjectNode.ImageName.Application); } } return iconHandle; } /// <summary> /// Return S_OK. Implementation of Closing a nested project is done in CloseNestedProject which is called by CloseChildren. /// </summary> /// <returns>S_OK</returns> public override int Close() { return VSConstants.S_OK; } /// <summary> /// Returns the moniker of the nested project. /// </summary> /// <returns></returns> public override string GetMkDocument() { Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { return String.Empty; } return this.projectPath; } /// <summary> /// Called by the shell when a node has been renamed from the GUI /// </summary> /// <param name="label">The name of the new label.</param> /// <returns>A success or failure value.</returns> public override int SetEditLabel(string label) { int result = this.DelegateSetPropertyToNested((int)__VSHPROPID.VSHPROPID_EditLabel, label); if (ErrorHandler.Succeeded(result)) { this.RenameNestedProjectInParentProject(label); } return result; } /// <summary> /// Called by the shell to get the node caption when the user tries to rename from the GUI /// </summary> /// <returns>the node cation</returns> public override string GetEditLabel() { return (string)this.DelegateGetPropertyToNested((int)__VSHPROPID.VSHPROPID_EditLabel); } /// <summary> /// This is temporary until we have support for re-adding a nested item /// </summary> protected override bool CanDeleteItem(__VSDELETEITEMOPERATION deleteOperation) { return false; } /// <summary> /// Delegates the call to the inner hierarchy. /// </summary> /// <param name="reserved">Reserved parameter defined at the IVsPersistHierarchyItem2::ReloadItem parameter.</param> protected internal override void ReloadItem(uint reserved) { #region precondition if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { throw new InvalidOperationException(); } Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); #endregion IVsPersistHierarchyItem2 persistHierachyItem = this.nestedHierarchy as IVsPersistHierarchyItem2; // We are expecting that if we get called then the nestedhierarchy supports IVsPersistHierarchyItem2, since then hierrachy should support handling its own reload. // There should be no errormessage to the user since this is an internal error, that it cannot be fixed at user level. if (persistHierachyItem == null) { throw new InvalidOperationException(); } ErrorHandler.ThrowOnFailure(persistHierachyItem.ReloadItem(VSConstants.VSITEMID_ROOT, reserved)); } /// <summary> /// Flag indicating that changes to a file can be ignored when item is saved or reloaded. /// </summary> /// <param name="ignoreFlag">Flag indicating whether or not to ignore changes (1 to ignore, 0 to stop ignoring).</param> protected internal override void IgnoreItemFileChanges(bool ignoreFlag) { #region precondition if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { throw new InvalidOperationException(); } Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); #endregion this.IgnoreNestedProjectFile(ignoreFlag); IVsPersistHierarchyItem2 persistHierachyItem = this.nestedHierarchy as IVsPersistHierarchyItem2; // If the IVsPersistHierarchyItem2 is not implemented by the nested just return if (persistHierachyItem == null) { return; } ErrorHandler.ThrowOnFailure(persistHierachyItem.IgnoreItemFileChanges(VSConstants.VSITEMID_ROOT, ignoreFlag ? 1 : 0)); } /// <summary> /// Sets the VSADDFILEFLAGS that will be used to call the IVsTrackProjectDocumentsEvents2 OnAddFiles /// </summary> /// <param name="files">The files to which an array of VSADDFILEFLAGS has to be specified.</param> /// <returns></returns> protected internal override VSADDFILEFLAGS[] GetAddFileFlags(string[] files) { if (files == null || files.Length == 0) { return new VSADDFILEFLAGS[1] { VSADDFILEFLAGS.VSADDFILEFLAGS_NoFlags }; } VSADDFILEFLAGS[] addFileFlags = new VSADDFILEFLAGS[files.Length]; for (int i = 0; i < files.Length; i++) { addFileFlags[i] = VSADDFILEFLAGS.VSADDFILEFLAGS_IsNestedProjectFile; } return addFileFlags; } /// <summary> /// Sets the VSQUERYADDFILEFLAGS that will be used to call the IVsTrackProjectDocumentsEvents2 OnQueryAddFiles /// </summary> /// <param name="files">The files to which an array of VSADDFILEFLAGS has to be specified.</param> /// <returns></returns> protected internal override VSQUERYADDFILEFLAGS[] GetQueryAddFileFlags(string[] files) { if (files == null || files.Length == 0) { return new VSQUERYADDFILEFLAGS[1] { VSQUERYADDFILEFLAGS.VSQUERYADDFILEFLAGS_NoFlags }; } VSQUERYADDFILEFLAGS[] queryAddFileFlags = new VSQUERYADDFILEFLAGS[files.Length]; for (int i = 0; i < files.Length; i++) { queryAddFileFlags[i] = VSQUERYADDFILEFLAGS.VSQUERYADDFILEFLAGS_IsNestedProjectFile; } return queryAddFileFlags; } /// <summary> /// Sets the VSREMOVEFILEFLAGS that will be used to call the IVsTrackProjectDocumentsEvents2 OnRemoveFiles /// </summary> /// <param name="files">The files to which an array of VSREMOVEFILEFLAGS has to be specified.</param> /// <returns></returns> protected internal override VSREMOVEFILEFLAGS[] GetRemoveFileFlags(string[] files) { if (files == null || files.Length == 0) { return new VSREMOVEFILEFLAGS[1] { VSREMOVEFILEFLAGS.VSREMOVEFILEFLAGS_NoFlags }; } VSREMOVEFILEFLAGS[] removeFileFlags = new VSREMOVEFILEFLAGS[files.Length]; for (int i = 0; i < files.Length; i++) { removeFileFlags[i] = VSREMOVEFILEFLAGS.VSREMOVEFILEFLAGS_IsNestedProjectFile; } return removeFileFlags; } /// <summary> /// Sets the VSQUERYREMOVEFILEFLAGS that will be used to call the IVsTrackProjectDocumentsEvents2 OnQueryRemoveFiles /// </summary> /// <param name="files">The files to which an array of VSQUERYREMOVEFILEFLAGS has to be specified.</param> /// <returns></returns> protected internal override VSQUERYREMOVEFILEFLAGS[] GetQueryRemoveFileFlags(string[] files) { if (files == null || files.Length == 0) { return new VSQUERYREMOVEFILEFLAGS[1] { VSQUERYREMOVEFILEFLAGS.VSQUERYREMOVEFILEFLAGS_NoFlags }; } VSQUERYREMOVEFILEFLAGS[] queryRemoveFileFlags = new VSQUERYREMOVEFILEFLAGS[files.Length]; for (int i = 0; i < files.Length; i++) { queryRemoveFileFlags[i] = VSQUERYREMOVEFILEFLAGS.VSQUERYREMOVEFILEFLAGS_IsNestedProjectFile; } return queryRemoveFileFlags; } #endregion #region virtual methods /// <summary> /// Initialize the nested hierarhy node. /// </summary> /// <param name="fileName">The file name of the nested project.</param> /// <param name="destination">The location of the nested project.</param> /// <param name="projectName">The name of the project.</param> /// <param name="createFlags">The nested project creation flags </param> /// <remarks>This methos should be called just after a NestedProjectNode object is created.</remarks> public virtual void Init(string fileName, string destination, string projectName, __VSCREATEPROJFLAGS createFlags) { if (String.IsNullOrEmpty(fileName)) { throw new ArgumentException(SR.GetString(SR.ParameterCannotBeNullOrEmpty, CultureInfo.CurrentUICulture), "fileName"); } if (String.IsNullOrEmpty(destination)) { throw new ArgumentException(SR.GetString(SR.ParameterCannotBeNullOrEmpty, CultureInfo.CurrentUICulture), "destination"); } this.projectName = Path.GetFileName(fileName); this.projectPath = Path.Combine(destination, this.projectName); // get the IVsSolution interface from the global service provider IVsSolution solution = this.GetService(typeof(IVsSolution)) as IVsSolution; Debug.Assert(solution != null, "Could not get the IVsSolution object from the services exposed by this project"); if (solution == null) { throw new InvalidOperationException(); } // Get the project type guid from project element string typeGuidString = this.ItemNode.GetMetadataAndThrow(ProjectFileConstants.TypeGuid, new InvalidOperationException()); Guid projectFactoryGuid = Guid.Empty; if (!String.IsNullOrEmpty(typeGuidString)) { projectFactoryGuid = new Guid(typeGuidString); } // Get the project factory. IVsProjectFactory projectFactory; ErrorHandler.ThrowOnFailure(solution.GetProjectFactory((uint)0, new Guid[] { projectFactoryGuid }, fileName, out projectFactory)); this.CreateProjectDirectory(); //Create new project using factory int cancelled; Guid refiid = NativeMethods.IID_IUnknown; IntPtr projectPtr = IntPtr.Zero; try { ErrorHandler.ThrowOnFailure(projectFactory.CreateProject(fileName, destination, projectName, (uint)createFlags, ref refiid, out projectPtr, out cancelled)); if (projectPtr != IntPtr.Zero) { this.nestedHierarchy = Marshal.GetTypedObjectForIUnknown(projectPtr, typeof(IVsHierarchy)) as IVsHierarchy; Debug.Assert(this.nestedHierarchy != null, "Nested hierarchy could not be created"); Debug.Assert(cancelled == 0); } } finally { if (projectPtr != IntPtr.Zero) { // We created a new instance of the project, we need to call release to decrement the ref count // the RCW (this.nestedHierarchy) still has a reference to it which will keep it alive Marshal.Release(projectPtr); } } if (cancelled != 0 && this.nestedHierarchy == null) { ErrorHandler.ThrowOnFailure(VSConstants.OLE_E_PROMPTSAVECANCELLED); } // Link into the nested VS hierarchy. ErrorHandler.ThrowOnFailure(this.nestedHierarchy.SetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ParentHierarchy, this.ProjectMgr)); ErrorHandler.ThrowOnFailure(this.nestedHierarchy.SetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ParentHierarchyItemid, (object)(int)this.ID)); this.LockRDTEntry(); this.ConnectPropertyNotifySink(); } /// <summary> /// Links a nested project as a virtual project to the solution. /// </summary> protected internal virtual void AddVirtualProject() { // This is the second step in creating and adding a nested project. The inner hierarchy must have been // already initialized at this point. #region precondition if (this.nestedHierarchy == null) { throw new InvalidOperationException(); } #endregion // get the IVsSolution interface from the global service provider IVsSolution solution = this.GetService(typeof(IVsSolution)) as IVsSolution; Debug.Assert(solution != null, "Could not get the IVsSolution object from the services exposed by this project"); if (solution == null) { throw new InvalidOperationException(); } this.InitializeInstanceGuid(); // Add virtual project to solution. ErrorHandler.ThrowOnFailure(solution.AddVirtualProjectEx(this.nestedHierarchy, this.VirtualProjectFlags, ref this.projectInstanceGuid)); // Now set up to listen on file changes on the nested project node. this.ObserveNestedProjectFile(); } /// <summary> /// The method that does the cleanup. /// </summary> /// <param name="disposing"></param> protected override void Dispose(bool disposing) { // Everybody can go here. if (!this.isDisposed) { try { // Synchronize calls to the Dispose simulteniously. lock (Mutex) { if (disposing) { this.DisconnectPropertyNotifySink(); this.StopObservingNestedProjectFile(); // If a project cannot load it may happen that the imagehandler is not instantiated. if (this.imageHandler != null) { this.imageHandler.Close(); } } } } finally { base.Dispose(disposing); this.isDisposed = true; } } } /// <summary> /// Creates the project directory if it does not exist. /// </summary> /// <returns></returns> protected virtual void CreateProjectDirectory() { string directoryName = Path.GetDirectoryName(this.projectPath); if (!Directory.Exists(directoryName)) { Directory.CreateDirectory(directoryName); } } /// <summary> /// Lock the RDT Entry for the nested project. /// By default this document is marked as "Dont Save as". That means the menu File->SaveAs is disabled for the /// nested project node. /// </summary> [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "RDT")] protected virtual void LockRDTEntry() { // Define flags for the nested project document _VSRDTFLAGS flags = _VSRDTFLAGS.RDT_VirtualDocument | _VSRDTFLAGS.RDT_ProjSlnDocument; ; // Request the RDT service IVsRunningDocumentTable rdt = this.GetService(typeof(SVsRunningDocumentTable)) as IVsRunningDocumentTable; Debug.Assert(rdt != null, " Could not get running document table from the services exposed by this project"); if (rdt == null) { throw new InvalidOperationException(); } // First we see if someone else has opened the requested view of the file. uint itemid; IntPtr docData = IntPtr.Zero; IVsHierarchy ivsHierarchy; uint docCookie; IntPtr projectPtr = IntPtr.Zero; try { ErrorHandler.ThrowOnFailure(rdt.FindAndLockDocument((uint)flags, this.projectPath, out ivsHierarchy, out itemid, out docData, out docCookie)); flags |= _VSRDTFLAGS.RDT_EditLock; if (ivsHierarchy != null && docCookie != (uint)ShellConstants.VSDOCCOOKIE_NIL) { if (docCookie != this.DocCookie) { this.DocCookie = docCookie; } } else { // get inptr for hierarchy projectPtr = Marshal.GetIUnknownForObject(this.nestedHierarchy); Debug.Assert(projectPtr != IntPtr.Zero, " Project pointer for the nested hierarchy has not been initialized"); ErrorHandler.ThrowOnFailure(rdt.RegisterAndLockDocument((uint)flags, this.projectPath, this.ProjectMgr.InteropSafeIVsHierarchy, this.ID, projectPtr, out docCookie)); this.DocCookie = docCookie; Debug.Assert(this.DocCookie != (uint)ShellConstants.VSDOCCOOKIE_NIL, "Invalid cookie when registering document in the running document table."); //we must also set the doc cookie on the nested hier this.SetDocCookieOnNestedHier(this.DocCookie); } } finally { // Release all Inptr's that that were given as out pointers if (docData != IntPtr.Zero) { Marshal.Release(docData); } if (projectPtr != IntPtr.Zero) { Marshal.Release(projectPtr); } } } /// <summary> /// Unlock the RDT entry for the nested project /// </summary> [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "RDT")] protected virtual void UnlockRDTEntry() { if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { return; } // First we see if someone else has opened the requested view of the file. IVsRunningDocumentTable rdt = this.GetService(typeof(SVsRunningDocumentTable)) as IVsRunningDocumentTable; if (rdt != null && this.DocCookie != (int)ShellConstants.VSDOCCOOKIE_NIL) { _VSRDTFLAGS flags = _VSRDTFLAGS.RDT_EditLock; ErrorHandler.ThrowOnFailure(rdt.UnlockDocument((uint)flags, (uint)this.DocCookie)); } this.DocCookie = (int)ShellConstants.VSDOCCOOKIE_NIL; } /// <summary> /// Renames the project file in the parent project structure. /// </summary> /// <param name="label">The new label.</param> protected virtual void RenameNestedProjectInParentProject(string label) { string existingLabel = this.Caption; if (String.Compare(existingLabel, label, StringComparison.Ordinal) == 0) { return; } string oldFileName = this.projectPath; string oldPath = this.Url; try { this.StopObservingNestedProjectFile(); this.ProjectMgr.SuspendMSBuild(); // Check out the project file if necessary. if (!this.ProjectMgr.QueryEditProjectFile(false)) { throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED); } string newFileName = label + Path.GetExtension(oldFileName); this.SaveNestedProjectItemInProjectFile(newFileName); string projectDirectory = Path.GetDirectoryName(oldFileName); // update state. this.projectName = newFileName; this.projectPath = Path.Combine(projectDirectory, this.projectName); // Unload and lock the RDT entries this.UnlockRDTEntry(); this.LockRDTEntry(); // Since actually this is a rename in our hierarchy notify the tracker that a rename has happened. this.ProjectMgr.Tracker.OnItemRenamed(oldPath, this.projectPath, VSRENAMEFILEFLAGS.VSRENAMEFILEFLAGS_IsNestedProjectFile); } finally { this.ObserveNestedProjectFile(); this.ProjectMgr.ResumeMSBuild(this.ProjectMgr.ReEvaluateProjectFileTargetName); } } /// <summary> /// Saves the nested project information in the project file. /// </summary> /// <param name="newFileName"></param> protected virtual void SaveNestedProjectItemInProjectFile(string newFileName) { string existingInclude = this.ItemNode.Item.EvaluatedInclude; string existingRelativePath = Path.GetDirectoryName(existingInclude); string newRelativePath = Path.Combine(existingRelativePath, newFileName); this.ItemNode.Rename(newRelativePath); } #endregion #region helper methods /// <summary> /// Closes a nested project and releases the nested hierrachy pointer. /// </summary> internal void CloseNestedProjectNode() { if (this.isDisposed || this.ProjectMgr == null || this.ProjectMgr.IsClosed) { return; } uint itemid = VSConstants.VSITEMID_NIL; try { this.DisconnectPropertyNotifySink(); IVsUIHierarchy hier; IVsWindowFrame windowFrame; VsShellUtilities.IsDocumentOpen(this.ProjectMgr.Site, this.projectPath, Guid.Empty, out hier, out itemid, out windowFrame); if (itemid == VSConstants.VSITEMID_NIL) { this.UnlockRDTEntry(); } IVsSolution solution = this.GetService(typeof(IVsSolution)) as IVsSolution; if (solution == null) { throw new InvalidOperationException(); } ErrorHandler.ThrowOnFailure(solution.RemoveVirtualProject(this.nestedHierarchy, 0)); } finally { this.StopObservingNestedProjectFile(); // if we haven't already release the RDT cookie, do so now. if (itemid == VSConstants.VSITEMID_NIL) { this.UnlockRDTEntry(); } this.Dispose(true); } } private void InitializeInstanceGuid() { if (this.projectInstanceGuid != Guid.Empty) { return; } Guid instanceGuid = Guid.Empty; Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); // This method should be called from the open children method, then we can safely use the IsNewProject property if (this.ProjectMgr.IsNewProject) { instanceGuid = Guid.NewGuid(); this.ItemNode.SetMetadata(ProjectFileConstants.InstanceGuid, instanceGuid.ToString("B")); ErrorHandler.ThrowOnFailure(this.nestedHierarchy.SetGuidProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ProjectIDGuid, ref instanceGuid)); } else { // Get a guid from the nested hiererachy. Guid nestedHiererachyInstanceGuid; ErrorHandler.ThrowOnFailure(this.nestedHierarchy.GetGuidProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ProjectIDGuid, out nestedHiererachyInstanceGuid)); // Get instance guid from the project file. If it does not exist then we create one. string instanceGuidAsString = this.ItemNode.GetMetadata(ProjectFileConstants.InstanceGuid); // 1. nestedHiererachyInstanceGuid is empty and instanceGuidAsString is empty then create a new one. // 2. nestedHiererachyInstanceGuid is empty and instanceGuidAsString not empty use instanceGuidAsString and update the nested project object by calling SetGuidProperty. // 3. nestedHiererachyInstanceGuid is not empty instanceGuidAsString is empty then use nestedHiererachyInstanceGuid and update the outer project element. // 4. nestedHiererachyInstanceGuid is not empty instanceGuidAsString is empty then use nestedHiererachyInstanceGuid and update the outer project element. if (nestedHiererachyInstanceGuid == Guid.Empty && String.IsNullOrEmpty(instanceGuidAsString)) { instanceGuid = Guid.NewGuid(); } else if (nestedHiererachyInstanceGuid == Guid.Empty && !String.IsNullOrEmpty(instanceGuidAsString)) { instanceGuid = new Guid(instanceGuidAsString); ErrorHandler.ThrowOnFailure(this.nestedHierarchy.SetGuidProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ProjectIDGuid, ref instanceGuid)); } else if (nestedHiererachyInstanceGuid != Guid.Empty) { instanceGuid = nestedHiererachyInstanceGuid; // If the instanceGuidAsString is empty then creating a guid out of it would throw an exception. if (String.IsNullOrEmpty(instanceGuidAsString) || nestedHiererachyInstanceGuid != new Guid(instanceGuidAsString)) { this.ItemNode.SetMetadata(ProjectFileConstants.InstanceGuid, instanceGuid.ToString("B")); } } } this.projectInstanceGuid = instanceGuid; } private void SetDocCookieOnNestedHier(uint itemDocCookie) { object docCookie = (int)itemDocCookie; try { ErrorHandler.ThrowOnFailure(this.nestedHierarchy.SetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_ItemDocCookie, docCookie)); } catch (NotImplementedException) { //we swallow this exception on purpose } } private void InitImageHandler() { Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); if (null == imageHandler) { imageHandler = new ImageHandler(); } object imageListAsPointer = null; ErrorHandler.ThrowOnFailure(this.nestedHierarchy.GetProperty(VSConstants.VSITEMID_ROOT, (int)__VSHPROPID.VSHPROPID_IconImgList, out imageListAsPointer)); if (imageListAsPointer != null) { this.imageHandler.ImageList = Utilities.GetImageList(imageListAsPointer); } } /// <summary> /// Delegates Getproperty calls to the inner nested. /// </summary> /// <param name="propID">The property to delegate.</param> /// <returns>The return of the GetProperty from nested.</returns> private object DelegateGetPropertyToNested(int propID) { if (!this.ProjectMgr.IsClosed) { Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); object returnValue; // Do not throw since some project types will return E_FAIL if they do not support a property. int result = this.nestedHierarchy.GetProperty(VSConstants.VSITEMID_ROOT, propID, out returnValue); if (ErrorHandler.Succeeded(result)) { return returnValue; } } return null; } /// <summary> /// Delegates Setproperty calls to the inner nested. /// </summary> /// <param name="propID">The property to delegate.</param> /// <param name="value">The property to set.</param> /// <returns>The return of the SetProperty from nested.</returns> private int DelegateSetPropertyToNested(int propID, object value) { if (this.ProjectMgr.IsClosed) { return VSConstants.E_FAIL; } Debug.Assert(this.nestedHierarchy != null, "The nested hierarchy object must be created before calling this method"); // Do not throw since some project types will return E_FAIL if they do not support a property. return this.nestedHierarchy.SetProperty(VSConstants.VSITEMID_ROOT, propID, value); } /// <summary> /// Starts observing changes on this file. /// </summary> private void ObserveNestedProjectFile() { ProjectContainerNode parent = this.ProjectMgr as ProjectContainerNode; Debug.Assert(parent != null, "The parent project for nested projects should be subclassed from ProjectContainerNode"); parent.NestedProjectNodeReloader.ObserveItem(this.GetMkDocument(), this.ID); } /// <summary> /// Stops observing changes on this file. /// </summary> private void StopObservingNestedProjectFile() { ProjectContainerNode parent = this.ProjectMgr as ProjectContainerNode; Debug.Assert(parent != null, "The parent project for nested projects should be subclassed from ProjectContainerNode"); parent.NestedProjectNodeReloader.StopObservingItem(this.GetMkDocument()); } /// <summary> /// Ignores observing changes on this file depending on the boolean flag. /// </summary> /// <param name="ignoreFlag">Flag indicating whether or not to ignore changes (1 to ignore, 0 to stop ignoring).</param> private void IgnoreNestedProjectFile(bool ignoreFlag) { ProjectContainerNode parent = this.ProjectMgr as ProjectContainerNode; Debug.Assert(parent != null, "The parent project for nested projects should be subclassed from ProjectContainerNode"); parent.NestedProjectNodeReloader.IgnoreItemChanges(this.GetMkDocument(), ignoreFlag); } /// <summary> /// We need to advise property notify sink on project properties so that /// we know when the project file is renamed through a property. /// </summary> private void ConnectPropertyNotifySink() { if (this.projectPropertyNotifySinkCookie != (uint)ShellConstants.VSCOOKIE_NIL) { return; } IConnectionPoint connectionPoint = this.GetConnectionPointFromPropertySink(); if (connectionPoint != null) { connectionPoint.Advise(this, out this.projectPropertyNotifySinkCookie); } } /// <summary> /// Disconnects the propertynotify sink /// </summary> private void DisconnectPropertyNotifySink() { if (this.projectPropertyNotifySinkCookie == (uint)ShellConstants.VSCOOKIE_NIL) { return; } IConnectionPoint connectionPoint = this.GetConnectionPointFromPropertySink(); if (connectionPoint != null) { connectionPoint.Unadvise(this.projectPropertyNotifySinkCookie); this.projectPropertyNotifySinkCookie = (uint)ShellConstants.VSCOOKIE_NIL; } } /// <summary> /// Gets a ConnectionPoint for the IPropertyNotifySink interface. /// </summary> /// <returns></returns> private IConnectionPoint GetConnectionPointFromPropertySink() { IConnectionPoint connectionPoint = null; object browseObject = this.GetProperty((int)__VSHPROPID.VSHPROPID_BrowseObject); IConnectionPointContainer connectionPointContainer = browseObject as IConnectionPointContainer; if (connectionPointContainer != null) { Guid guid = typeof(IPropertyNotifySink).GUID; connectionPointContainer.FindConnectionPoint(ref guid, out connectionPoint); } return connectionPoint; } #endregion } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; #if !(NET35 || NET20 || WINDOWS_PHONE || SILVERLIGHT || MONOTOUCH) using System.Dynamic; #endif using System.Globalization; using System.Linq; using System.Reflection; using System.Runtime.Serialization; using System.Security.Permissions; using System.Xml.Serialization; using Newtonsoft.Json.Converters; using Newtonsoft.Json.Utilities; using Newtonsoft.Json.Linq; using System.Runtime.CompilerServices; namespace Newtonsoft.Json.Serialization { internal struct ResolverContractKey : IEquatable<ResolverContractKey> { private readonly Type _resolverType; private readonly Type _contractType; public ResolverContractKey(Type resolverType, Type contractType) { _resolverType = resolverType; _contractType = contractType; } public override int GetHashCode() { return _resolverType.GetHashCode() ^ _contractType.GetHashCode(); } public override bool Equals(object obj) { if (!(obj is ResolverContractKey)) return false; return Equals((ResolverContractKey) obj); } public bool Equals(ResolverContractKey other) { return (_resolverType == other._resolverType && _contractType == other._contractType); } } /// <summary> /// Used by <see cref="JsonSerializer"/> to resolves a <see cref="JsonContract"/> for a given <see cref="Type"/>. /// </summary> public class DefaultContractResolver : IContractResolver { private static readonly IContractResolver _instance = new DefaultContractResolver(true); internal static IContractResolver Instance { get { return _instance; } } private static readonly IList<JsonConverter> BuiltInConverters = new List<JsonConverter> { #if !PocketPC && !SILVERLIGHT && !NET20 new EntityKeyMemberConverter(), #endif #if !(NET35 || NET20 || WINDOWS_PHONE || MONODROID || MONOTOUCH) new ExpandoObjectConverter(), #endif new BinaryConverter(), new KeyValuePairConverter(), #if !(SILVERLIGHT || WINDOWS_PHONE || MONODROID || MONOTOUCH) new XmlNodeConverter(), new DataSetConverter(), new DataTableConverter(), #endif new BsonObjectIdConverter() }; #if MONOTOUCH private readonly ThreadSafeStore<Type, JsonContract> _typeContractCache; #else private static Dictionary<ResolverContractKey, JsonContract> _sharedContractCache; #endif private static readonly object _typeContractCacheLock = new object(); private Dictionary<ResolverContractKey, JsonContract> _instanceContractCache; private readonly bool _sharedCache; /// <summary> /// Gets a value indicating whether members are being get and set using dynamic code generation. /// This value is determined by the runtime permissions available. /// </summary> /// <value> /// <c>true</c> if using dynamic code generation; otherwise, <c>false</c>. /// </value> public bool DynamicCodeGeneration { get { return JsonTypeReflector.DynamicCodeGeneration; } } /// <summary> /// Gets or sets the default members search flags. /// </summary> /// <value>The default members search flags.</value> public BindingFlags DefaultMembersSearchFlags { get; set; } /// <summary> /// Gets or sets a value indicating whether compiler generated members should be serialized. /// </summary> /// <value> /// <c>true</c> if serialized compiler generated members; otherwise, <c>false</c>. /// </value> public bool SerializeCompilerGeneratedMembers { get; set; } /// <summary> /// Initializes a new instance of the <see cref="DefaultContractResolver"/> class. /// </summary> public DefaultContractResolver() : this(false) { } /// <summary> /// Initializes a new instance of the <see cref="DefaultContractResolver"/> class. /// </summary> /// <param name="shareCache"> /// If set to <c>true</c> the <see cref="DefaultContractResolver"/> will use a cached shared with other resolvers of the same type. /// Sharing the cache will significantly performance because expensive reflection will only happen once but could cause unexpected /// behavior if different instances of the resolver are suppose to produce different results. When set to false it is highly /// recommended to reuse <see cref="DefaultContractResolver"/> instances with the <see cref="JsonSerializer"/>. /// </param> public DefaultContractResolver(bool shareCache) { DefaultMembersSearchFlags = BindingFlags.Public | BindingFlags.Instance; _sharedCache = shareCache; #if MONOTOUCH _typeContractCache = new ThreadSafeStore<Type, JsonContract>(CreateContract); #endif } #if !MONOTOUCH private Dictionary<ResolverContractKey, JsonContract> GetCache() { if (_sharedCache) return _sharedContractCache; else return _instanceContractCache; } private void UpdateCache(Dictionary<ResolverContractKey, JsonContract> cache) { if (_sharedCache) _sharedContractCache = cache; else _instanceContractCache = cache; } #endif /// <summary> /// Resolves the contract for a given type. /// </summary> /// <param name="type">The type to resolve a contract for.</param> /// <returns>The contract for a given type.</returns> public virtual JsonContract ResolveContract(Type type) { if (type == null) throw new ArgumentNullException("type"); #if MONOTOUCH return _typeContractCache.Get(type); #else JsonContract contract; ResolverContractKey key = new ResolverContractKey(GetType(), type); Dictionary<ResolverContractKey, JsonContract> cache = GetCache(); if (cache == null || !cache.TryGetValue(key, out contract)) { contract = CreateContract(type); // avoid the possibility of modifying the cache dictionary while another thread is accessing it lock (_typeContractCacheLock) { cache = GetCache(); Dictionary<ResolverContractKey, JsonContract> updatedCache = (cache != null) ? new Dictionary<ResolverContractKey, JsonContract>(cache) : new Dictionary<ResolverContractKey, JsonContract>(); updatedCache[key] = contract; UpdateCache(updatedCache); } } return contract; #endif } /// <summary> /// Gets the serializable members for the type. /// </summary> /// <param name="objectType">The type to get serializable members for.</param> /// <returns>The serializable members for the type.</returns> protected virtual List<MemberInfo> GetSerializableMembers(Type objectType) { #if !PocketPC && !NET20 DataContractAttribute dataContractAttribute = JsonTypeReflector.GetDataContractAttribute(objectType); #endif List<MemberInfo> defaultMembers = ReflectionUtils.GetFieldsAndProperties(objectType, DefaultMembersSearchFlags) .Where(m => !ReflectionUtils.IsIndexedProperty(m)).ToList(); List<MemberInfo> allMembers = ReflectionUtils.GetFieldsAndProperties(objectType, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static) .Where(m => !ReflectionUtils.IsIndexedProperty(m)).ToList(); List<MemberInfo> serializableMembers = new List<MemberInfo>(); foreach (MemberInfo member in allMembers) { // exclude members that are compiler generated if set if (SerializeCompilerGeneratedMembers || !member.IsDefined(typeof(CompilerGeneratedAttribute), true)) { if (defaultMembers.Contains(member)) { // add all members that are found by default member search serializableMembers.Add(member); } else { // add members that are explicitly marked with JsonProperty/DataMember attribute if (JsonTypeReflector.GetAttribute<JsonPropertyAttribute>(member) != null) serializableMembers.Add(member); #if !PocketPC && !NET20 else if (dataContractAttribute != null && JsonTypeReflector.GetAttribute<DataMemberAttribute>(member) != null) serializableMembers.Add(member); #endif } } } #if !PocketPC && !SILVERLIGHT && !NET20 Type match; // don't include EntityKey on entities objects... this is a bit hacky if (objectType.AssignableToTypeName("System.Data.Objects.DataClasses.EntityObject", out match)) serializableMembers = serializableMembers.Where(ShouldSerializeEntityMember).ToList(); #endif return serializableMembers; } #if !PocketPC && !SILVERLIGHT && !NET20 private bool ShouldSerializeEntityMember(MemberInfo memberInfo) { PropertyInfo propertyInfo = memberInfo as PropertyInfo; if (propertyInfo != null) { if (propertyInfo.PropertyType.IsGenericType && propertyInfo.PropertyType.GetGenericTypeDefinition().FullName == "System.Data.Objects.DataClasses.EntityReference`1") return false; } return true; } #endif /// <summary> /// Creates a <see cref="JsonObjectContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonObjectContract"/> for the given type.</returns> protected virtual JsonObjectContract CreateObjectContract(Type objectType) { JsonObjectContract contract = new JsonObjectContract(objectType); InitializeContract(contract); contract.MemberSerialization = JsonTypeReflector.GetObjectMemberSerialization(objectType); contract.Properties.AddRange(CreateProperties(contract.UnderlyingType, contract.MemberSerialization)); // check if a JsonConstructorAttribute has been defined and use that if (objectType.GetConstructors(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).Any(c => c.IsDefined(typeof(JsonConstructorAttribute), true))) { ConstructorInfo constructor = GetAttributeConstructor(objectType); if (constructor != null) { contract.OverrideConstructor = constructor; contract.ConstructorParameters.AddRange(CreateConstructorParameters(constructor, contract.Properties)); } } else if (contract.DefaultCreator == null || contract.DefaultCreatorNonPublic) { ConstructorInfo constructor = GetParametrizedConstructor(objectType); if (constructor != null) { contract.ParametrizedConstructor = constructor; contract.ConstructorParameters.AddRange(CreateConstructorParameters(constructor, contract.Properties)); } } return contract; } private ConstructorInfo GetAttributeConstructor(Type objectType) { IList<ConstructorInfo> markedConstructors = objectType.GetConstructors(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).Where(c => c.IsDefined(typeof(JsonConstructorAttribute), true)).ToList(); if (markedConstructors.Count > 1) throw new Exception("Multiple constructors with the JsonConstructorAttribute."); else if (markedConstructors.Count == 1) return markedConstructors[0]; return null; } private ConstructorInfo GetParametrizedConstructor(Type objectType) { IList<ConstructorInfo> constructors = objectType.GetConstructors(BindingFlags.Public | BindingFlags.Instance); if (constructors.Count == 1) return constructors[0]; else return null; } /// <summary> /// Creates the constructor parameters. /// </summary> /// <param name="constructor">The constructor to create properties for.</param> /// <param name="memberProperties">The type's member properties.</param> /// <returns>Properties for the given <see cref="ConstructorInfo"/>.</returns> protected virtual IList<JsonProperty> CreateConstructorParameters(ConstructorInfo constructor, JsonPropertyCollection memberProperties) { var constructorParameters = constructor.GetParameters(); JsonPropertyCollection parameterCollection = new JsonPropertyCollection(constructor.DeclaringType); foreach (ParameterInfo parameterInfo in constructorParameters) { JsonProperty matchingMemberProperty = memberProperties.GetClosestMatchProperty(parameterInfo.Name); // type must match as well as name if (matchingMemberProperty != null && matchingMemberProperty.PropertyType != parameterInfo.ParameterType) matchingMemberProperty = null; JsonProperty property = CreatePropertyFromConstructorParameter(matchingMemberProperty, parameterInfo); if (property != null) { parameterCollection.AddProperty(property); } } return parameterCollection; } /// <summary> /// Creates a <see cref="JsonProperty"/> for the given <see cref="ParameterInfo"/>. /// </summary> /// <param name="matchingMemberProperty">The matching member property.</param> /// <param name="parameterInfo">The constructor parameter.</param> /// <returns>A created <see cref="JsonProperty"/> for the given <see cref="ParameterInfo"/>.</returns> protected virtual JsonProperty CreatePropertyFromConstructorParameter(JsonProperty matchingMemberProperty, ParameterInfo parameterInfo) { JsonProperty property = new JsonProperty(); property.PropertyType = parameterInfo.ParameterType; bool allowNonPublicAccess; bool hasExplicitAttribute; SetPropertySettingsFromAttributes(property, parameterInfo, parameterInfo.Name, parameterInfo.Member.DeclaringType, MemberSerialization.OptOut, out allowNonPublicAccess, out hasExplicitAttribute); property.Readable = false; property.Writable = true; // "inherit" values from matching member property if unset on parameter if (matchingMemberProperty != null) { property.PropertyName = (property.PropertyName != parameterInfo.Name) ? property.PropertyName : matchingMemberProperty.PropertyName; property.Converter = property.Converter ?? matchingMemberProperty.Converter; property.MemberConverter = property.MemberConverter ?? matchingMemberProperty.MemberConverter; property.DefaultValue = property.DefaultValue ?? matchingMemberProperty.DefaultValue; property.Required = (property.Required != Required.Default) ? property.Required : matchingMemberProperty.Required; property.IsReference = property.IsReference ?? matchingMemberProperty.IsReference; property.NullValueHandling = property.NullValueHandling ?? matchingMemberProperty.NullValueHandling; property.DefaultValueHandling = property.DefaultValueHandling ?? matchingMemberProperty.DefaultValueHandling; property.ReferenceLoopHandling = property.ReferenceLoopHandling ?? matchingMemberProperty.ReferenceLoopHandling; property.ObjectCreationHandling = property.ObjectCreationHandling ?? matchingMemberProperty.ObjectCreationHandling; property.TypeNameHandling = property.TypeNameHandling ?? matchingMemberProperty.TypeNameHandling; } return property; } /// <summary> /// Resolves the default <see cref="JsonConverter" /> for the contract. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns></returns> protected virtual JsonConverter ResolveContractConverter(Type objectType) { return JsonTypeReflector.GetJsonConverter(objectType, objectType); } private Func<object> GetDefaultCreator(Type createdType) { return JsonTypeReflector.ReflectionDelegateFactory.CreateDefaultConstructor<object>(createdType); } #if !PocketPC && !NET20 [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Portability", "CA1903:UseOnlyApiFromTargetedFramework", MessageId = "System.Runtime.Serialization.DataContractAttribute.#get_IsReference()")] #endif private void InitializeContract(JsonContract contract) { JsonContainerAttribute containerAttribute = JsonTypeReflector.GetJsonContainerAttribute(contract.UnderlyingType); if (containerAttribute != null) { contract.IsReference = containerAttribute._isReference; } #if !PocketPC && !NET20 else { DataContractAttribute dataContractAttribute = JsonTypeReflector.GetDataContractAttribute(contract.UnderlyingType); // doesn't have a null value if (dataContractAttribute != null && dataContractAttribute.IsReference) contract.IsReference = true; } #endif contract.Converter = ResolveContractConverter(contract.UnderlyingType); // then see whether object is compadible with any of the built in converters contract.InternalConverter = JsonSerializer.GetMatchingConverter(BuiltInConverters, contract.UnderlyingType); if (ReflectionUtils.HasDefaultConstructor(contract.CreatedType, true) || contract.CreatedType.IsValueType) { contract.DefaultCreator = GetDefaultCreator(contract.CreatedType); contract.DefaultCreatorNonPublic = (!contract.CreatedType.IsValueType && ReflectionUtils.GetDefaultConstructor(contract.CreatedType) == null); } ResolveCallbackMethods(contract, contract.UnderlyingType); } private void ResolveCallbackMethods(JsonContract contract, Type t) { if (t.BaseType != null) ResolveCallbackMethods(contract, t.BaseType); MethodInfo onSerializing; MethodInfo onSerialized; MethodInfo onDeserializing; MethodInfo onDeserialized; MethodInfo onError; GetCallbackMethodsForType(t, out onSerializing, out onSerialized, out onDeserializing, out onDeserialized, out onError); if (onSerializing != null) contract.OnSerializing = onSerializing; if (onSerialized != null) contract.OnSerialized = onSerialized; if (onDeserializing != null) contract.OnDeserializing = onDeserializing; if (onDeserialized != null) contract.OnDeserialized = onDeserialized; if (onError != null) contract.OnError = onError; } private void GetCallbackMethodsForType(Type type, out MethodInfo onSerializing, out MethodInfo onSerialized, out MethodInfo onDeserializing, out MethodInfo onDeserialized, out MethodInfo onError) { onSerializing = null; onSerialized = null; onDeserializing = null; onDeserialized = null; onError = null; foreach (MethodInfo method in type.GetMethods(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly)) { // compact framework errors when getting parameters for a generic method // lame, but generic methods should not be callbacks anyway if (method.ContainsGenericParameters) continue; Type prevAttributeType = null; ParameterInfo[] parameters = method.GetParameters(); if (IsValidCallback(method, parameters, typeof(OnSerializingAttribute), onSerializing, ref prevAttributeType)) { onSerializing = method; } if (IsValidCallback(method, parameters, typeof(OnSerializedAttribute), onSerialized, ref prevAttributeType)) { onSerialized = method; } if (IsValidCallback(method, parameters, typeof(OnDeserializingAttribute), onDeserializing, ref prevAttributeType)) { onDeserializing = method; } if (IsValidCallback(method, parameters, typeof(OnDeserializedAttribute), onDeserialized, ref prevAttributeType)) { onDeserialized = method; } if (IsValidCallback(method, parameters, typeof(OnErrorAttribute), onError, ref prevAttributeType)) { onError = method; } } } /// <summary> /// Creates a <see cref="JsonDictionaryContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonDictionaryContract"/> for the given type.</returns> protected virtual JsonDictionaryContract CreateDictionaryContract(Type objectType) { JsonDictionaryContract contract = new JsonDictionaryContract(objectType); InitializeContract(contract); contract.PropertyNameResolver = ResolvePropertyName; return contract; } /// <summary> /// Creates a <see cref="JsonArrayContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonArrayContract"/> for the given type.</returns> protected virtual JsonArrayContract CreateArrayContract(Type objectType) { JsonArrayContract contract = new JsonArrayContract(objectType); InitializeContract(contract); return contract; } /// <summary> /// Creates a <see cref="JsonPrimitiveContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonPrimitiveContract"/> for the given type.</returns> protected virtual JsonPrimitiveContract CreatePrimitiveContract(Type objectType) { JsonPrimitiveContract contract = new JsonPrimitiveContract(objectType); InitializeContract(contract); return contract; } /// <summary> /// Creates a <see cref="JsonLinqContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonLinqContract"/> for the given type.</returns> protected virtual JsonLinqContract CreateLinqContract(Type objectType) { JsonLinqContract contract = new JsonLinqContract(objectType); InitializeContract(contract); return contract; } #if !SILVERLIGHT && !PocketPC /// <summary> /// Creates a <see cref="JsonISerializableContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonISerializableContract"/> for the given type.</returns> protected virtual JsonISerializableContract CreateISerializableContract(Type objectType) { JsonISerializableContract contract = new JsonISerializableContract(objectType); InitializeContract(contract); ConstructorInfo constructorInfo = objectType.GetConstructor(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance, null, new [] {typeof (SerializationInfo), typeof (StreamingContext)}, null); if (constructorInfo != null) { MethodCall<object, object> methodCall = JsonTypeReflector.ReflectionDelegateFactory.CreateMethodCall<object>(constructorInfo); contract.ISerializableCreator = (args => methodCall(null, args)); } return contract; } #endif #if !(NET35 || NET20 || WINDOWS_PHONE || SILVERLIGHT) /// <summary> /// Creates a <see cref="JsonDynamicContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonDynamicContract"/> for the given type.</returns> protected virtual JsonDynamicContract CreateDynamicContract(Type objectType) { JsonDynamicContract contract = new JsonDynamicContract(objectType); InitializeContract(contract); contract.PropertyNameResolver = ResolvePropertyName; contract.Properties.AddRange(CreateProperties(objectType, MemberSerialization.OptOut)); return contract; } #endif /// <summary> /// Creates a <see cref="JsonStringContract"/> for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonStringContract"/> for the given type.</returns> protected virtual JsonStringContract CreateStringContract(Type objectType) { JsonStringContract contract = new JsonStringContract(objectType); InitializeContract(contract); return contract; } /// <summary> /// Determines which contract type is created for the given type. /// </summary> /// <param name="objectType">Type of the object.</param> /// <returns>A <see cref="JsonContract"/> for the given type.</returns> protected virtual JsonContract CreateContract(Type objectType) { Type t = ReflectionUtils.EnsureNotNullableType(objectType); if (JsonConvert.IsJsonPrimitiveType(t)) return CreatePrimitiveContract(t); if (JsonTypeReflector.GetJsonObjectAttribute(t) != null) return CreateObjectContract(t); if (JsonTypeReflector.GetJsonArrayAttribute(t) != null) return CreateArrayContract(t); if (t == typeof(JToken) || t.IsSubclassOf(typeof(JToken))) return CreateLinqContract(t); if (CollectionUtils.IsDictionaryType(t)) return CreateDictionaryContract(t); if (typeof(IEnumerable).IsAssignableFrom(t)) return CreateArrayContract(t); if (CanConvertToString(t)) return CreateStringContract(t); #if !SILVERLIGHT && !PocketPC if (typeof(ISerializable).IsAssignableFrom(t)) return CreateISerializableContract(t); #endif #if !(NET35 || NET20 || WINDOWS_PHONE || MONOTOUCH) if (typeof(IDynamicMetaObjectProvider).IsAssignableFrom(t)) return CreateDynamicContract(t); #endif return CreateObjectContract(t); } internal static bool CanConvertToString(Type type) { #if !PocketPC TypeConverter converter = ConvertUtils.GetConverter(type); // use the objectType's TypeConverter if it has one and can convert to a string if (converter != null #if !SILVERLIGHT && !(converter is ComponentConverter) && !(converter is ReferenceConverter) #endif && converter.GetType() != typeof(TypeConverter)) { if (converter.CanConvertTo(typeof(string))) return true; } #endif if (type == typeof(Type) || type.IsSubclassOf(typeof(Type))) return true; #if SILVERLIGHT || PocketPC if (type == typeof(Guid) || type == typeof(Uri) || type == typeof(TimeSpan)) return true; #endif return false; } private static bool IsValidCallback(MethodInfo method, ParameterInfo[] parameters, Type attributeType, MethodInfo currentCallback, ref Type prevAttributeType) { if (!method.IsDefined(attributeType, false)) return false; if (currentCallback != null) throw new Exception("Invalid attribute. Both '{0}' and '{1}' in type '{2}' have '{3}'.".FormatWith(CultureInfo.InvariantCulture, method, currentCallback, GetClrTypeFullName(method.DeclaringType), attributeType)); if (prevAttributeType != null) throw new Exception("Invalid Callback. Method '{3}' in type '{2}' has both '{0}' and '{1}'.".FormatWith(CultureInfo.InvariantCulture, prevAttributeType, attributeType, GetClrTypeFullName(method.DeclaringType), method)); if (method.IsVirtual) throw new Exception("Virtual Method '{0}' of type '{1}' cannot be marked with '{2}' attribute.".FormatWith(CultureInfo.InvariantCulture, method, GetClrTypeFullName(method.DeclaringType), attributeType)); if (method.ReturnType != typeof(void)) throw new Exception("Serialization Callback '{1}' in type '{0}' must return void.".FormatWith(CultureInfo.InvariantCulture, GetClrTypeFullName(method.DeclaringType), method)); if (attributeType == typeof(OnErrorAttribute)) { if (parameters == null || parameters.Length != 2 || parameters[0].ParameterType != typeof(StreamingContext) || parameters[1].ParameterType != typeof(ErrorContext)) throw new Exception("Serialization Error Callback '{1}' in type '{0}' must have two parameters of type '{2}' and '{3}'.".FormatWith(CultureInfo.InvariantCulture, GetClrTypeFullName(method.DeclaringType), method, typeof (StreamingContext), typeof(ErrorContext))); } else { if (parameters == null || parameters.Length != 1 || parameters[0].ParameterType != typeof(StreamingContext)) throw new Exception("Serialization Callback '{1}' in type '{0}' must have a single parameter of type '{2}'.".FormatWith(CultureInfo.InvariantCulture, GetClrTypeFullName(method.DeclaringType), method, typeof(StreamingContext))); } prevAttributeType = attributeType; return true; } internal static string GetClrTypeFullName(Type type) { if (type.IsGenericTypeDefinition || !type.ContainsGenericParameters) return type.FullName; return string.Format(CultureInfo.InvariantCulture, "{0}.{1}", new object[] { type.Namespace, type.Name }); } /// <summary> /// Creates properties for the given <see cref="JsonContract"/>. /// </summary> /// <param name="type">The type to create properties for.</param> /// /// <param name="memberSerialization">The member serialization mode for the type.</param> /// <returns>Properties for the given <see cref="JsonContract"/>.</returns> protected virtual IList<JsonProperty> CreateProperties(Type type, MemberSerialization memberSerialization) { List<MemberInfo> members = GetSerializableMembers(type); if (members == null) throw new JsonSerializationException("Null collection of seralizable members returned."); JsonPropertyCollection properties = new JsonPropertyCollection(type); foreach (MemberInfo member in members) { JsonProperty property = CreateProperty(member, memberSerialization); if (property != null) properties.AddProperty(property); } IList<JsonProperty> orderedProperties = properties.OrderBy(p => p.Order ?? -1).ToList(); return orderedProperties; } /// <summary> /// Creates the <see cref="IValueProvider"/> used by the serializer to get and set values from a member. /// </summary> /// <param name="member">The member.</param> /// <returns>The <see cref="IValueProvider"/> used by the serializer to get and set values from a member.</returns> protected virtual IValueProvider CreateMemberValueProvider(MemberInfo member) { #if !PocketPC && !SILVERLIGHT if (DynamicCodeGeneration) return new DynamicValueProvider(member); #endif return new ReflectionValueProvider(member); } /// <summary> /// Creates a <see cref="JsonProperty"/> for the given <see cref="MemberInfo"/>. /// </summary> /// <param name="memberSerialization">The member's parent <see cref="MemberSerialization"/>.</param> /// <param name="member">The member to create a <see cref="JsonProperty"/> for.</param> /// <returns>A created <see cref="JsonProperty"/> for the given <see cref="MemberInfo"/>.</returns> protected virtual JsonProperty CreateProperty(MemberInfo member, MemberSerialization memberSerialization) { JsonProperty property = new JsonProperty(); property.PropertyType = ReflectionUtils.GetMemberUnderlyingType(member); property.ValueProvider = CreateMemberValueProvider(member); bool allowNonPublicAccess; bool hasExplicitAttribute; SetPropertySettingsFromAttributes(property, member, member.Name, member.DeclaringType, memberSerialization, out allowNonPublicAccess, out hasExplicitAttribute); property.Readable = ReflectionUtils.CanReadMemberValue(member, allowNonPublicAccess); property.Writable = ReflectionUtils.CanSetMemberValue(member, allowNonPublicAccess, hasExplicitAttribute); property.ShouldSerialize = CreateShouldSerializeTest(member); SetIsSpecifiedActions(property, member, allowNonPublicAccess); return property; } private void SetPropertySettingsFromAttributes(JsonProperty property, ICustomAttributeProvider attributeProvider, string name, Type declaringType, MemberSerialization memberSerialization, out bool allowNonPublicAccess, out bool hasExplicitAttribute) { hasExplicitAttribute = false; #if !PocketPC && !NET20 DataContractAttribute dataContractAttribute = JsonTypeReflector.GetDataContractAttribute(declaringType); DataMemberAttribute dataMemberAttribute; if (dataContractAttribute != null && attributeProvider is MemberInfo) dataMemberAttribute = JsonTypeReflector.GetDataMemberAttribute((MemberInfo)attributeProvider); else dataMemberAttribute = null; #endif JsonPropertyAttribute propertyAttribute = JsonTypeReflector.GetAttribute<JsonPropertyAttribute>(attributeProvider); if (propertyAttribute != null) hasExplicitAttribute = true; bool hasIgnoreAttribute = (JsonTypeReflector.GetAttribute<JsonIgnoreAttribute>(attributeProvider) != null); string mappedName; if (propertyAttribute != null && propertyAttribute.PropertyName != null) mappedName = propertyAttribute.PropertyName; #if !PocketPC && !NET20 else if (dataMemberAttribute != null && dataMemberAttribute.Name != null) mappedName = dataMemberAttribute.Name; #endif else mappedName = name; property.PropertyName = ResolvePropertyName(mappedName); property.UnderlyingName = name; if (propertyAttribute != null) { property.Required = propertyAttribute.Required; property.Order = propertyAttribute._order; } #if !PocketPC && !NET20 else if (dataMemberAttribute != null) { property.Required = (dataMemberAttribute.IsRequired) ? Required.AllowNull : Required.Default; property.Order = (dataMemberAttribute.Order != -1) ? (int?) dataMemberAttribute.Order : null; } #endif else { property.Required = Required.Default; } property.Ignored = (hasIgnoreAttribute || (memberSerialization == MemberSerialization.OptIn && propertyAttribute == null #if !PocketPC && !NET20 && dataMemberAttribute == null #endif )); // resolve converter for property // the class type might have a converter but the property converter takes presidence property.Converter = JsonTypeReflector.GetJsonConverter(attributeProvider, property.PropertyType); property.MemberConverter = JsonTypeReflector.GetJsonConverter(attributeProvider, property.PropertyType); DefaultValueAttribute defaultValueAttribute = JsonTypeReflector.GetAttribute<DefaultValueAttribute>(attributeProvider); property.DefaultValue = (defaultValueAttribute != null) ? defaultValueAttribute.Value : null; property.NullValueHandling = (propertyAttribute != null) ? propertyAttribute._nullValueHandling : null; property.DefaultValueHandling = (propertyAttribute != null) ? propertyAttribute._defaultValueHandling : null; property.ReferenceLoopHandling = (propertyAttribute != null) ? propertyAttribute._referenceLoopHandling : null; property.ObjectCreationHandling = (propertyAttribute != null) ? propertyAttribute._objectCreationHandling : null; property.TypeNameHandling = (propertyAttribute != null) ? propertyAttribute._typeNameHandling : null; property.IsReference = (propertyAttribute != null) ? propertyAttribute._isReference : null; allowNonPublicAccess = false; if ((DefaultMembersSearchFlags & BindingFlags.NonPublic) == BindingFlags.NonPublic) allowNonPublicAccess = true; if (propertyAttribute != null) allowNonPublicAccess = true; #if !PocketPC && !NET20 if (dataMemberAttribute != null) { allowNonPublicAccess = true; hasExplicitAttribute = true; } #endif } private Predicate<object> CreateShouldSerializeTest(MemberInfo member) { MethodInfo shouldSerializeMethod = member.DeclaringType.GetMethod(JsonTypeReflector.ShouldSerializePrefix + member.Name, new Type[0]); if (shouldSerializeMethod == null || shouldSerializeMethod.ReturnType != typeof(bool)) return null; MethodCall<object, object> shouldSerializeCall = JsonTypeReflector.ReflectionDelegateFactory.CreateMethodCall<object>(shouldSerializeMethod); return o => (bool) shouldSerializeCall(o); } private void SetIsSpecifiedActions(JsonProperty property, MemberInfo member, bool allowNonPublicAccess) { MemberInfo specifiedMember = member.DeclaringType.GetProperty(member.Name + JsonTypeReflector.SpecifiedPostfix); if (specifiedMember == null) specifiedMember = member.DeclaringType.GetField(member.Name + JsonTypeReflector.SpecifiedPostfix); if (specifiedMember == null || ReflectionUtils.GetMemberUnderlyingType(specifiedMember) != typeof(bool)) { return; } Func<object, object> specifiedPropertyGet = JsonTypeReflector.ReflectionDelegateFactory.CreateGet<object>(specifiedMember); property.GetIsSpecified = o => (bool)specifiedPropertyGet(o); if (ReflectionUtils.CanSetMemberValue(specifiedMember, allowNonPublicAccess, false)) property.SetIsSpecified = JsonTypeReflector.ReflectionDelegateFactory.CreateSet<object>(specifiedMember); } /// <summary> /// Resolves the name of the property. /// </summary> /// <param name="propertyName">Name of the property.</param> /// <returns>Name of the property.</returns> protected internal virtual string ResolvePropertyName(string propertyName) { return propertyName; } } }
using System; using System.Data; using System.Configuration; using System.Collections; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; public partial class Backoffice_Tarif_ListRawatJalan : System.Web.UI.Page { public int NoKe = 0; protected string dsReportSessionName = "dsTarifRawatJalan"; protected void Page_Load(object sender, EventArgs e) { if (!Page.IsPostBack) { if (Session["SIMRS.UserId"] == null) { Response.Redirect(Request.ApplicationPath + "/Backoffice/login.aspx"); } int UserId = (int)Session["SIMRS.UserId"]; if (Session["TarifManagement"] == null) { Response.Redirect(Request.ApplicationPath + "/Backoffice/UnAuthorize.aspx"); } if (Session["AddTarif"] != null) { btnNew.Visible = true; btnNew.Text = "<img alt=\"New\" src=\"" + Request.ApplicationPath + "/images/new_f2.gif\" align=\"middle\" border=\"0\" name=\"new\" value=\"new\">" + Resources.GetString("Referensi", "AddTarif"); } else btnNew.Visible = false; btnSearch.Text = Resources.GetString("", "Search"); ImageButtonFirst.ImageUrl = Request.ApplicationPath + "/images/navigator/nbFirst.gif"; ImageButtonPrev.ImageUrl = Request.ApplicationPath + "/images/navigator/nbPrevpage.gif"; ImageButtonNext.ImageUrl = Request.ApplicationPath + "/images/navigator/nbNextpage.gif"; ImageButtonLast.ImageUrl = Request.ApplicationPath + "/images/navigator/nbLast.gif"; GetListPoliklinik(); GetListKelompokLayanan(); UpdateDataView(true); } } public void GetListKelompokLayanan() { string KelompokLayananId = ""; if (Request.QueryString["KelompokLayananId"] != null && Request.QueryString["KelompokLayananId"].ToString() != "") KelompokLayananId = Request.QueryString["KelompokLayananId"].ToString(); SIMRS.DataAccess.RS_KelompokLayanan myObj = new SIMRS.DataAccess.RS_KelompokLayanan(); DataTable dt = myObj.GetList(); cmbKelompokLayanan.Items.Clear(); int i = 0; cmbKelompokLayanan.Items.Add(""); cmbKelompokLayanan.Items[i].Text = ""; cmbKelompokLayanan.Items[i].Value = ""; i++; foreach (DataRow dr in dt.Rows) { cmbKelompokLayanan.Items.Add(""); cmbKelompokLayanan.Items[i].Text = dr["Kode"].ToString() + ". " + dr["Nama"].ToString(); cmbKelompokLayanan.Items[i].Value = dr["Id"].ToString(); if (dr["Id"].ToString() == KelompokLayananId) cmbKelompokLayanan.SelectedIndex = i; i++; } } public void GetListPoliklinik() { string PoliklinikId = ""; if (Request.QueryString["PoliklinikId"] != null && Request.QueryString["PoliklinikId"].ToString() != "") PoliklinikId = Request.QueryString["PoliklinikId"].ToString(); SIMRS.DataAccess.RS_Poliklinik myObj = new SIMRS.DataAccess.RS_Poliklinik(); DataTable dt = myObj.GetList(); cmbPoliklinik.Items.Clear(); int i = 0; cmbPoliklinik.Items.Add(""); cmbPoliklinik.Items[i].Text = ""; cmbPoliklinik.Items[i].Value = ""; i++; foreach (DataRow dr in dt.Rows) { if (dr["JenisPoliklinikId"].ToString() == "1") { cmbPoliklinik.Items.Add(""); cmbPoliklinik.Items[i].Text = "[" + dr["Kode"].ToString() + "] " + dr["Nama"].ToString() + " (" + dr["KelompokPoliklinikNama"].ToString() + ")"; cmbPoliklinik.Items[i].Value = dr["Id"].ToString(); if (dr["Id"].ToString() == PoliklinikId) cmbPoliklinik.SelectedIndex = i; i++; } } } #region .Update View Data ////////////////////////////////////////////////////////////////////// // PhysicalDataRead // ------------------------------------------------------------------ /// <summary> /// This function is responsible for loading data from database. /// </summary> /// <returns>DataSet</returns> public DataSet PhysicalDataRead() { // Local variables DataSet oDS = new DataSet(); // Get Data SIMRS.DataAccess.RS_Layanan myObj = new SIMRS.DataAccess.RS_Layanan(); DataTable myData = myObj.SelectAllTarifRawatJalan(); oDS.Tables.Add(myData); return oDS; } /// <summary> /// This function is responsible for binding data to Datagrid. /// </summary> /// <param name="dv"></param> private void BindData(DataView dv) { // Sets the sorting order dv.Sort = DataGridList.Attributes["SortField"]; if (DataGridList.Attributes["SortAscending"] == "no") dv.Sort += " DESC"; if (dv.Count > 0) { DataGridList.ShowFooter = false; int intRowCount = dv.Count; int intPageSaze = DataGridList.PageSize; int intPageCount = intRowCount / intPageSaze; if (intRowCount - (intPageCount * intPageSaze) > 0) intPageCount = intPageCount + 1; if (DataGridList.CurrentPageIndex >= intPageCount) DataGridList.CurrentPageIndex = intPageCount - 1; } else { DataGridList.ShowFooter = true; DataGridList.CurrentPageIndex = 0; } // Re-binds the grid NoKe = DataGridList.PageSize * DataGridList.CurrentPageIndex; DataGridList.DataSource = dv; DataGridList.DataBind(); int CurrentPage = DataGridList.CurrentPageIndex + 1; lblCurrentPage.Text = CurrentPage.ToString(); lblTotalPage.Text = DataGridList.PageCount.ToString(); lblTotalRecord.Text = dv.Count.ToString(); } /// <summary> /// This function is responsible for loading data from database and store to Session. /// </summary> /// <param name="strDataSessionName"></param> public void DataFromSourceToMemory(String strDataSessionName) { // Gets rows from the data source DataSet oDS = PhysicalDataRead(); // Stores it in the session cache Session[strDataSessionName] = oDS; } /// <summary> /// This function is responsible for update data view from datagrid. /// </summary> /// <param name="requery">true = get data from database, false= get data from session</param> public void UpdateDataView(bool requery) { // Retrieves the data if ((Session[dsReportSessionName] == null) || (requery)) { if (Request.QueryString["CurrentPage"] != null && Request.QueryString["CurrentPage"].ToString() != "") DataGridList.CurrentPageIndex = int.Parse(Request.QueryString["CurrentPage"].ToString()); DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; DataView dv = ds.Tables[0].DefaultView; string filter = ""; if (cmbPoliklinik.SelectedIndex > 0) { filter += filter != "" ? " AND " : ""; filter += " PoliklinikId = " + cmbPoliklinik.SelectedItem.Value; } if (cmbKelompokLayanan.SelectedIndex > 0) { filter += filter != "" ? " AND " : ""; filter += " KelompokLayananId = " + cmbKelompokLayanan.SelectedItem.Value; } dv.RowFilter = filter; BindData(dv); } public void UpdateDataView() { // Retrieves the data if ((Session[dsReportSessionName] == null)) { DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; BindData(ds.Tables[0].DefaultView); } #endregion #region .Event DataGridList ////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// // HANDLERs // ////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a new page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void PageChanged(Object sender, DataGridPageChangedEventArgs e) { DataGridList.CurrentPageIndex = e.NewPageIndex; DataGridList.SelectedIndex = -1; UpdateDataView(); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a new page. /// </summary> /// <param name="sender"></param> /// <param name="nPageIndex"></param> public void GoToPage(Object sender, int nPageIndex) { DataGridPageChangedEventArgs evPage; evPage = new DataGridPageChangedEventArgs(sender, nPageIndex); PageChanged(sender, evPage); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a first page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToFirst(Object sender, ImageClickEventArgs e) { GoToPage(sender, 0); } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a previous page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToPrev(Object sender, ImageClickEventArgs e) { if (DataGridList.CurrentPageIndex > 0) { GoToPage(sender, DataGridList.CurrentPageIndex - 1); } else { GoToPage(sender, 0); } } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a next page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToNext(Object sender, System.Web.UI.ImageClickEventArgs e) { if (DataGridList.CurrentPageIndex < (DataGridList.PageCount - 1)) { GoToPage(sender, DataGridList.CurrentPageIndex + 1); } } /// <summary> /// This function is responsible for loading the content of the new /// page when you click on the pager to move to a last page. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void GoToLast(Object sender, ImageClickEventArgs e) { GoToPage(sender, DataGridList.PageCount - 1); } /// <summary> /// This function is invoked when you click on a column's header to /// sort by that. It just saves the current sort field name and /// refreshes the grid. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void SortByColumn(Object sender, DataGridSortCommandEventArgs e) { String strSortBy = DataGridList.Attributes["SortField"]; String strSortAscending = DataGridList.Attributes["SortAscending"]; // Sets the new sorting field DataGridList.Attributes["SortField"] = e.SortExpression; // Sets the order (defaults to ascending). If you click on the // sorted column, the order reverts. DataGridList.Attributes["SortAscending"] = "yes"; if (e.SortExpression == strSortBy) DataGridList.Attributes["SortAscending"] = (strSortAscending == "yes" ? "no" : "yes"); // Refreshes the view OnClearSelection(null, null); UpdateDataView(); } /// <summary> /// The function gets invoked when a new item is being created in /// the datagrid. This applies to pager, header, footer, regular /// and alternating items. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void PageItemCreated(Object sender, DataGridItemEventArgs e) { // Get the newly created item ListItemType itemType = e.Item.ItemType; ////////////////////////////////////////////////////////// // Is it the HEADER? if (itemType == ListItemType.Header) { for (int i = 0; i < DataGridList.Columns.Count; i++) { // draw to reflect sorting if (DataGridList.Attributes["SortField"] == DataGridList.Columns[i].SortExpression) { ////////////////////////////////////////////// // Should be much easier this way: // ------------------------------------------ // TableCell cell = e.Item.Cells[i]; // Label lblSorted = new Label(); // lblSorted.Font = "webdings"; // lblSorted.Text = strOrder; // cell.Controls.Add(lblSorted); // // but it seems it doesn't work <g> ////////////////////////////////////////////// // Add a non-clickable triangle to mean desc or asc. // The </a> ensures that what follows is non-clickable TableCell cell = e.Item.Cells[i]; LinkButton lb = (LinkButton)cell.Controls[0]; //lb.Text += "</a>&nbsp;<span style=font-family:webdings;>" + GetOrderSymbol() + "</span>"; lb.Text += "</a>&nbsp;<img src=" + Request.ApplicationPath + "/images/icons/" + GetOrderSymbol() + " >"; } } } ////////////////////////////////////////////////////////// // Is it the PAGER? if (itemType == ListItemType.Pager) { // There's just one control in the list... TableCell pager = (TableCell)e.Item.Controls[0]; // Enumerates all the items in the pager... for (int i = 0; i < pager.Controls.Count; i += 2) { // It can be either a Label or a Link button try { Label l = (Label)pager.Controls[i]; l.Text = "Hal " + l.Text; l.CssClass = "CurrentPage"; } catch { LinkButton h = (LinkButton)pager.Controls[i]; h.Text = "[ " + h.Text + " ]"; h.CssClass = "HotLink"; } } } } /// <summary> /// Verifies whether the current sort is ascending or descending and /// returns an appropriate display text (i.e., a webding) /// </summary> /// <returns></returns> private String GetOrderSymbol() { bool bDescending = (bool)(DataGridList.Attributes["SortAscending"] == "no"); //return (bDescending ? " 6" : " 5"); return (bDescending ? "downbr.gif" : "upbr.gif"); } /// <summary> /// When clicked clears the current selection if any /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnClearSelection(Object sender, EventArgs e) { DataGridList.SelectedIndex = -1; } #endregion #region .Event Button /// <summary> /// When clicked, redirect to form add for inserts a new record to the database /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnNewRecord(Object sender, EventArgs e) { string PoliklinikId = ""; if (cmbPoliklinik.SelectedIndex > 0) PoliklinikId = cmbPoliklinik.SelectedItem.Value; string KelompokLayananId = ""; if (cmbKelompokLayanan.SelectedIndex > 0) KelompokLayananId = cmbKelompokLayanan.SelectedItem.Value; string CurrentPage = DataGridList.CurrentPageIndex.ToString(); Response.Redirect("AddRawatJalan.aspx?CurrentPage=" + CurrentPage + "&PoliklinikId=" + PoliklinikId + "&KelompokLayananId=" + KelompokLayananId); } /// <summary> /// When clicked, filter data. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void OnSearch(Object sender, System.EventArgs e) { if ((Session[dsReportSessionName] == null)) { DataFromSourceToMemory(dsReportSessionName); } DataSet ds = (DataSet)Session[dsReportSessionName]; DataView dv = ds.Tables[0].DefaultView; string filter = ""; if (cmbPoliklinik.SelectedIndex > 0) { filter += filter != "" ? " AND ":""; filter += " PoliklinikId = " + cmbPoliklinik.SelectedItem.Value; } if (cmbKelompokLayanan.SelectedIndex > 0) { filter += filter != "" ? " AND " : ""; filter += " KelompokLayananId = " + cmbKelompokLayanan.SelectedItem.Value; } dv.RowFilter = filter; BindData(dv); } #endregion #region .Update Link Item Butom /// <summary> /// The function is responsible for get link button form. /// </summary> /// <param name="szId"></param> /// <param name="CurrentPage"></param> /// <returns></returns> public string GetLinkButton(string Id, string Nama, string CurrentPage) { string szResult = ""; if (Session["EditTarif"] != null) { szResult += "<a class=\"toolbar\" href=\"EditRawatJalan.aspx?CurrentPage=" + CurrentPage + "&Id=" + Id + "\" "; szResult += ">" + Resources.GetString("", "Edit") + "</a>"; } if (Session["DeleteTarif"] != null) { szResult += "<a class=\"toolbar\" href=\"DeleteRawatJalan.aspx?CurrentPage=" + CurrentPage + "&Id=" + Id + "\" "; szResult += ">" + Resources.GetString("", "Delete") + "</a>"; } return szResult; } #endregion }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Network { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// NetworkWatchersOperations operations. /// </summary> public partial interface INetworkWatchersOperations { /// <summary> /// Creates or updates a network watcher in the specified resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the network watcher resource. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<NetworkWatcher>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, NetworkWatcher parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the specified network watcher by resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<NetworkWatcher>> GetWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified network watcher resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all network watchers by resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IEnumerable<NetworkWatcher>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all network watchers by subscription. /// </summary> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IEnumerable<NetworkWatcher>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the current network topology by resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the representation of topology. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<Topology>> GetTopologyWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, TopologyParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Verify IP flow from the specified VM to a location given the /// currently configured NSG rules. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the IP flow to be verified. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<VerificationIPFlowResult>> VerifyIPFlowWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, VerificationIPFlowParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the next hop from the specified VM. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the source and destination endpoint. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<NextHopResult>> GetNextHopWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, NextHopParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the configured and effective security group rules on the /// specified VM. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the VM to check security groups for. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<SecurityGroupViewResult>> GetVMSecurityRulesWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, SecurityGroupViewParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Initiate troubleshooting on a specified resource /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the resource to troubleshoot. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<TroubleshootingResult>> GetTroubleshootingWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, TroubleshootingParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Get the last completed troubleshooting result on a specified /// resource /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the resource to query the troubleshooting /// result. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<TroubleshootingResult>> GetTroubleshootingResultWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, QueryTroubleshootingParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Configures flow log on a specified resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the configuration of flow log. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<FlowLogInformation>> SetFlowLogConfigurationWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, FlowLogInformation parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Queries status of flow log on a specified resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define a resource to query flow log status. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<FlowLogInformation>> GetFlowLogStatusWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, FlowLogStatusParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Verifies the possibility of establishing a direct TCP connection /// from a virtual machine to a given endpoint including another VM or /// an arbitrary remote server. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that determine how the connectivity check will be /// performed. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ConnectivityInformation>> CheckConnectivityWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, ConnectivityParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the relative latency score for internet service providers from /// a specified location to Azure regions. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that determine Azure reachability report configuration. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<AzureReachabilityReport>> GetAzureReachabilityReportWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, AzureReachabilityReportParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Lists all available internet service providers for a specified /// Azure region. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that scope the list of available providers. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<AvailableProvidersList>> ListAvailableProvidersWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, AvailableProvidersListParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified network watcher resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Verify IP flow from the specified VM to a location given the /// currently configured NSG rules. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the IP flow to be verified. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<VerificationIPFlowResult>> BeginVerifyIPFlowWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, VerificationIPFlowParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the next hop from the specified VM. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the source and destination endpoint. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<NextHopResult>> BeginGetNextHopWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, NextHopParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the configured and effective security group rules on the /// specified VM. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher. /// </param> /// <param name='parameters'> /// Parameters that define the VM to check security groups for. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<SecurityGroupViewResult>> BeginGetVMSecurityRulesWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, SecurityGroupViewParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Initiate troubleshooting on a specified resource /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the resource to troubleshoot. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<TroubleshootingResult>> BeginGetTroubleshootingWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, TroubleshootingParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Get the last completed troubleshooting result on a specified /// resource /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the resource to query the troubleshooting /// result. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<TroubleshootingResult>> BeginGetTroubleshootingResultWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, QueryTroubleshootingParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Configures flow log on a specified resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define the configuration of flow log. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<FlowLogInformation>> BeginSetFlowLogConfigurationWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, FlowLogInformation parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Queries status of flow log on a specified resource. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that define a resource to query flow log status. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<FlowLogInformation>> BeginGetFlowLogStatusWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, FlowLogStatusParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Verifies the possibility of establishing a direct TCP connection /// from a virtual machine to a given endpoint including another VM or /// an arbitrary remote server. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that determine how the connectivity check will be /// performed. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ConnectivityInformation>> BeginCheckConnectivityWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, ConnectivityParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the relative latency score for internet service providers from /// a specified location to Azure regions. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that determine Azure reachability report configuration. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<AzureReachabilityReport>> BeginGetAzureReachabilityReportWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, AzureReachabilityReportParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Lists all available internet service providers for a specified /// Azure region. /// </summary> /// <param name='resourceGroupName'> /// The name of the network watcher resource group. /// </param> /// <param name='networkWatcherName'> /// The name of the network watcher resource. /// </param> /// <param name='parameters'> /// Parameters that scope the list of available providers. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<AvailableProvidersList>> BeginListAvailableProvidersWithHttpMessagesAsync(string resourceGroupName, string networkWatcherName, AvailableProvidersListParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Text.RegularExpressions; namespace Ceen.Database { /// <summary> /// Helper class that can parse a limited subset of an SQL where statement /// </summary> public static class FilterParser { /// <summary> /// Exception for invalid filter strings /// </summary> public class ParserException : Exception { /// <summary> /// Creats a new parser exception /// </summary> /// <param name="message">The error message</param> public ParserException(string message) : base(message) { } } /// <summary> /// The UNIX timestamp epoch value /// </summary> private static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0); /// <summary> /// The regular expression to use for parsing an orderBy string /// </summary> private static readonly Regex _orderByTokenizer = new Regex( @"\s*(?<sortorder1>\+|\-)?((?<nonquoted>\w+)|((?<quoted>""[^""]*"")))(\s+(?<sortorder2>\w+))?\s*(?<comma>,?)" ); /// <summary> /// A regular expression to tokenize a filter string, /// looking for quoted and unquoted identifiers /// and supporting a small number of arithmetic and compare operators /// </summary> private static readonly Regex _filterTokenizer = new Regex( @"(?<number>(\d+(\.\d*))|(\.\d+))|(?<nonquoted>\w+)|((?<quoted>""[^""]*""))|(?<special>\<=|\>=|==|!=|<>|\(|\)|<|=|>|\+|-|\*|/|\%|,)|(?<whitespace>\s+)" ); /// <summary> /// Operator preceedence table, based on: /// https://www.sqlite.org/lang_expr.html /// /// The operators are applied bottom-up, meaning we split on the lowest /// priority and then recursively evaluate the parts, leaving us with /// the correct &quot;higher-value-first&quot; bindings. /// /// But we need to make sure the parenthesis is always binding hardest /// as it changes precedence, thus it has the lowest priority value /// </summary> private static readonly Dictionary<string, int> _priorityTable = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase) { { ")", 50 }, { "(", 1 }, { ",", 2 }, { "*", 40 }, { "%", 40 }, { "/", 40 }, { "+", 35 }, { "-", 35 }, { "not", 35 }, { "<", 30 }, { "<=", 30 }, { ">", 30 }, { ">=", 30 }, { "=", 25 }, { "==", 25 }, { "!=", 25 }, { "<>", 25 }, { "in", 25 }, { "like", 25 }, { "and", 20 }, { "or", 15 } }; /// <summary> /// Map of all supported arithmetic operators /// </summary> private static readonly HashSet<string> _arithmeticOps = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { "*", "%", "/", "+", "-" }; /// <summary> /// Map of all supported compare operators /// </summary> private static readonly HashSet<string> _compareOps = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { "<", ">", "<=", ">=", "=", "==", "!=", "<>", "in", "like" }; /// <summary> /// Map of all supported binary operators /// </summary> private static readonly HashSet<string> _binOps = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { "*", "%", "/", "+", "-", "<", ">", "<=", ">=", "=", "==", "!=", "<>", "in", "like", "and", "or" }; /// <summary> /// Map of all supported unary operators /// </summary> private static readonly HashSet<string> _unOps = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { "+", "-", "not" }; /// <summary> /// Structure for keeping track of semi-parsed tokens /// </summary> private class SemiParsed { /// <summary> /// The original token string /// </summary> public readonly string Token; /// <summary> /// The offset into the original string /// </summary> public readonly int Offset; /// <summary> /// The potentially parsed item /// </summary> public QueryElement Parsed; /// <summary> /// The priority of the item /// </summary> public readonly int Priority; /// <summary> /// Constructs a new semi-parsed instance /// </summary> /// <param name="map">The table mapping</param> /// <param name="value">The token being parsed</param> /// <param name="offset">The string offset</param> public SemiParsed(TableMapping map, string value, int offset) { Token = value; Offset = offset; if (!_priorityTable.TryGetValue(value, out Priority)) { if (Token.StartsWith("\"") && Token.EndsWith("\"")) { Parsed = new Value(Token.Substring(1, Token.Length - 2)); } else { var prop = map.AllColumns.FirstOrDefault(x => string.Equals(x.MemberName, Token, StringComparison.OrdinalIgnoreCase)); if (prop != null) Parsed = new Property(prop.MemberName); else Parsed = new Value(Token); } } } } /// <summary> /// Parses an order string /// </summary> /// <param name="map">The table map to yse</param> /// <param name="order">The order string</param> /// <returns>The parsed order</returns> public static QueryOrder ParseOrder(TableMapping map, string order) { QueryOrder res = null; foreach(var n in ParseOrderList(map, order).Reverse()) res = new QueryOrder(n, res); return res; } /// <summary> /// Parses an order string /// </summary> /// <param name="map">The table map to yse</param> /// <param name="order">The order string</param> /// <returns>The parsed order elements</returns> public static IEnumerable<QueryOrder> ParseOrderList(TableMapping map, string order) { if (string.IsNullOrWhiteSpace(order)) yield break; // Make a case-insensitive lookup table for the column names var propmap = map.AllColumns .ToDictionary( x => x.MemberName, StringComparer.OrdinalIgnoreCase ); var prevcomma = true; var pos = 0; foreach (var m in _orderByTokenizer.Matches(order).Cast<Match>()) { if (!prevcomma) throw new ParserException($"Missing comma before: {m.Value} at {m.Index}"); if (!m.Success) throw new ParserException($"No match at {m.Index}"); if (pos != m.Index) throw new ParserException($"Failed to parse {order.Substring(pos, m.Index - pos)} at offset {pos}"); pos += m.Length; var dir = string.Empty; if (m.Groups["sortorder1"].Success) dir = m.Groups["sortorder1"].Value; if (m.Groups["sortorder2"].Success) { if (!string.IsNullOrWhiteSpace(dir)) throw new ParserException($"Cannot use both pre- and post-fix direction specifiers: {m.Value} at offset {m.Index}"); dir = m.Groups["sortorder2"].Value; } if (string.IsNullOrWhiteSpace(dir) || string.Equals(dir, "ASC", StringComparison.OrdinalIgnoreCase)) dir = "+"; if (string.Equals(dir, "DESC", StringComparison.OrdinalIgnoreCase)) dir = "-"; if (dir != "-" && dir != "+") throw new ParserException($"Unsupported direction specifier: {dir}"); var column = m.Groups["quoted"].Success ? m.Groups["quoted"].Value : m.Groups["nonquoted"].Value; if (!propmap.ContainsKey(column)) throw new ParserException($"The property {column} does not exist on the type"); yield return new QueryOrder(propmap[column].MemberName, dir == "-"); prevcomma = m.Groups["comma"].Length > 0; } if (pos != order.Length) throw new ParserException($"Failed to parse {order.Substring(pos)} at offset {pos}"); if (prevcomma) throw new ParserException($"Invalid trailing comma: {order}"); } /// <summary> /// Parsese a filter string and returns a query element for it /// </summary> /// <param name="map">The table map to yse</param> /// <param name="filter">The filter to parse</param> /// <returns>The parsed query</returns> public static QueryElement ParseFilter(TableMapping map, string filter) { if (string.IsNullOrWhiteSpace(filter)) return new Empty(); var lst = Parse(map, Tokenize(map, filter).ToList()); if (lst.Count() == 0) return new Empty(); else if (lst.Count() > 1) throw new ParserException($"Found multiple expressions: {string.Join(", ", lst.Select(x => x.Offset))}"); return CorrectValues(map, lst.First().Parsed ?? throw new ParserException("Unexpected null value")); } /// <summary> /// Visits the query elements and converts any string values to match the operands /// </summary> /// <param name="map">The table mapping</param> /// <param name="top">The element to explore</param> /// <returns>The top element</returns> private static QueryElement CorrectValues(TableMapping map, QueryElement top) { // Keep a reference to the element we return var entry = top; // Remove any parenthesis while (top is ParenthesisExpression pe) top = (QueryElement)pe.Expression; if (top is Compare cp) CorrectValues(map, null, (QueryElement)cp.LeftHandSide, (QueryElement)cp.RightHandSide); if (top is Arithmetic am) CorrectValues(map, null, (QueryElement)am.LeftHandSide, (QueryElement)am.RightHandSide); if (top is And andExp) foreach (var e in andExp.Items) CorrectValues(map, typeof(bool), (QueryElement)e); if (top is Or orExp) foreach (var e in orExp.Items) CorrectValues(map, typeof(bool), (QueryElement)e); if (top is UnaryOperator ue) { if (string.Equals(ue.Operator, "not", StringComparison.OrdinalIgnoreCase)) CorrectValues(map, typeof(bool), (QueryElement)ue.Expression); else CorrectValues(map, (QueryElement)ue.Expression); } return entry; } /// <summary> /// Changes the string value of any <see name="Value" /> instances to match the property types /// </summary> /// <param name="map">The table mapping</param> /// <param name="targettype">The type to change the item to</param> /// <param name="left">The left element</param> /// <param name="right">The right element</param> private static void CorrectValues(TableMapping map, Type targettype, QueryElement left, QueryElement right) { if (left is Property lpr) CorrectValues(map, map.AllColumnsByMemberName[lpr.PropertyName].MemberType, right); else if (right is Property rpr) CorrectValues(map, map.AllColumnsByMemberName[rpr.PropertyName].MemberType, left); else if (left is Compare lcp) CorrectValues(map, typeof(bool), right); else if (left is Compare rcp) CorrectValues(map, typeof(bool), left); else { CorrectValues(map, targettype, left); CorrectValues(map, targettype, right); } } /// <summary> /// Changes the string value of any <see name="Value" /> instances to match the property types /// </summary> /// <param name="map">The table mapping</param> /// <param name="targettype">The type to change the item to</param> /// <param name="el">The element to visit</param> private static void CorrectValues(TableMapping map, Type targettype, QueryElement el) { while(el is ParenthesisExpression p) el = (QueryElement)p.Expression; if (el is Arithmetic a) CorrectValues(map, targettype, (QueryElement)a.LeftHandSide, (QueryElement)a.RightHandSide); if (el is Compare c) CorrectValues(map, targettype, (QueryElement)c.LeftHandSide, (QueryElement)c.RightHandSide); if (el is And andExp) foreach (var n in andExp.Items) CorrectValues(map, targettype, (QueryElement)n); if (el is Or orExp) foreach (var n in orExp.Items) CorrectValues(map, targettype, (QueryElement)n); // Change the type if we get here if (el is Value v && targettype != null && v.Item is string vs) { v.Item = ConvertEl(vs, targettype); } else if (el is Value vx && targettype != null && vx.Item is Array en) { for(var i = 0; i < en.Length; i++) if (en.GetValue(i) is string vsa) en.SetValue(ConvertEl(vsa, targettype), i); } } /// <summary> /// Converts a string value to the target type /// </summary> /// <param name="vs">The input string</param> /// <param name="targettype">The desired type</param> /// <returns>The converted object</returns> private static object ConvertEl(string vs, Type targettype) { if (targettype.IsEnum) { object e = null; try { e = Enum.Parse(targettype, vs, true); } catch { } if (e == null) throw new ParserException($"Cannot parse {vs} as a {targettype.Name}"); // We return as a string, because enums are stored as strings return e.ToString(); } else if (targettype == typeof(bool)) { if (string.Equals("true", vs, StringComparison.OrdinalIgnoreCase)) return true; else if (string.Equals("false", vs, StringComparison.OrdinalIgnoreCase)) return false; else throw new ParserException($"Cannot parse {vs} as a boolean"); } else if (targettype.IsPrimitive) { try { return Convert.ChangeType(vs, targettype); } catch { throw new ParserException($"Cannot parse {vs} as a {targettype.Name}"); } } else if (targettype == typeof(TimeSpan)) { if (!double.TryParse(vs, NumberStyles.Any, CultureInfo.InvariantCulture, out var lval)) throw new ParserException($"Cannot parse {vs} as a {targettype.Name}"); return TimeSpan.FromSeconds(lval); } else if (targettype == typeof(DateTime)) { if (!double.TryParse(vs, NumberStyles.Any, CultureInfo.InvariantCulture, out var lval)) throw new ParserException($"Cannot parse {vs} as a {targettype.Name}"); return EPOCH.AddSeconds(lval); } return vs; } /// <summary> /// Parses a sequence of tokens into a list of parsed elements /// </summary> /// <param name="map">The table map</param> /// <param name="semiparsed">The list of semi-parsed items</param> /// <returns>An updated condensed list of parsed items</returns> private static List<SemiParsed> Parse(TableMapping map, List<SemiParsed> semiparsed) { // Keep on parsing untill everything is parsed while(semiparsed.Any(x => x.Parsed == null)) { var best_index = -1; for (int i = 0; i < semiparsed.Count; i++) { var sp = semiparsed[i]; if (sp.Parsed == null && (best_index < 0 || sp.Priority < semiparsed[best_index].Priority)) best_index = i; } if (best_index < 0) throw new ParserException("Unable to parse filter"); var best = semiparsed[best_index]; if (best.Token == "(") { // Find the next matching brace var count = 1; var p = best_index + 1; for (; p < semiparsed.Count; p++) { var t = semiparsed[p]; if (t.Parsed != null) continue; if (t.Token == "(") count++; else if (t.Token == ")") count--; if (count == 0) break; } if (count != 0 || p >= semiparsed.Count) throw new ParserException($"Unbalanced parenthesis starting at {best.Offset}"); var subseq = semiparsed.GetRange(best_index + 1, p - best_index - 1); semiparsed.RemoveRange(best_index + 1, p - best_index); var parsed = Parse(map, subseq); if (parsed.Count != 1) throw new ParserException($"Unable to parse sub expression starting at {best.Offset}"); best.Parsed = new ParenthesisExpression( parsed.First().Parsed ?? throw new ParserException($"Failed to parse {parsed.First().Token} at {parsed.First().Offset}") ); } else if (_binOps.Contains(best.Token) || _unOps.Contains(best.Token)) { if (best_index == semiparsed.Count - 1) throw new ParserException($"No right-hand operand for {best.Token} at {best.Offset}"); var right = Parse(map, semiparsed.GetRange(best_index + 1, semiparsed.Count - best_index - 1)); var right_hand = right.First(); // Handle unary operators if (best_index == 0 || !_binOps.Contains(best.Token)) { if (_unOps.Contains(best.Token)) { right[0] = best; best.Parsed = new UnaryOperator(best.Token, right_hand); semiparsed = right; } else { throw new ParserException($"No left-hand operand for {best.Token} at {best.Offset}"); } } else { var left = Parse(map, semiparsed.GetRange(0, best_index)); var left_hand = left.Last(); var t = best.Token; if (_arithmeticOps.Contains(t)) best.Parsed = new Arithmetic(left_hand.Parsed, t, right_hand.Parsed); else if (_compareOps.Contains(t)) { if (t == "==") t = "="; else if (t == "<>") t = "!="; best.Parsed = new Compare(left_hand.Parsed, t, right_hand.Parsed); } else if (string.Equals(t, "and")) best.Parsed = new And(left_hand.Parsed, right_hand.Parsed); else if (string.Equals(t, "or")) best.Parsed = new Or(left_hand.Parsed, right_hand.Parsed); else throw new ParserException($"Failed to classify operator {best.Token} at {best.Offset}"); // Remove the left-hand symbol left.RemoveAt(left.Count - 1); // Replace the right-hand symbol right[0] = best; // Build the new list of unifished tasks semiparsed = left.Concat(right).ToList(); } } else { if (best.Token == ")") throw new ParserException($"Dangling parenthesis at {best.Offset}"); else if (best.Token == ",") { // Most likely this is a sequence of elements if (semiparsed.Count > 1 && (semiparsed.Count % 2) == 1) { var good = true; for (int i = 0; i < semiparsed.Count; i++) { if (i % 2 == 0) good &= semiparsed[i].Parsed is Value; else good &= semiparsed[i].Token == ","; } if (good) { semiparsed[0].Parsed = new Value( semiparsed .Where(x => x != null) .Select(x => x.Parsed) .OfType<Value>() .Select(x => x.Item) .ToArray() ); semiparsed.RemoveRange(1, semiparsed.Count - 1); continue; } } throw new ParserException($"Mismatched comma at {best.Offset}"); } else throw new ParserException($"Unable to process token {best.Token} at {best.Offset}"); } } return semiparsed; } /// <summary> /// Divides a filter string into tokens /// </summary> /// <param name="filter">The filter string</param> /// <returns>A sequence of tokens</returns> private static IEnumerable<SemiParsed> Tokenize(TableMapping map, string filter) { if (string.IsNullOrWhiteSpace(filter)) yield break; var pos = 0; foreach (var item in _filterTokenizer.Matches(filter).Cast<Match>()) { if (!item.Success) throw new ParserException($"Failed to parse {item.Value} at offset {item.Index}"); if (pos != item.Index) throw new ParserException($"Failed to parse {filter.Substring(pos, item.Index - pos)} at offset {pos}"); // Ignore whitespace tokens if (!string.IsNullOrWhiteSpace(item.Value)) yield return new SemiParsed(map, item.Value, item.Index); pos += item.Length; } if (pos != filter.Length) throw new ParserException($"Failed to parse {filter.Substring(pos)} at offset {pos}"); } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.IO; using System.Collections.Generic; using System.Text; using System.Xml; using Microsoft.Win32; using WebsitePanel.Providers; using WebsitePanel.Providers.FTP; using WebsitePanel.Providers.Utils; using WebsitePanel.Server.Utils; namespace WebsitePanel.Providers.FTP { public class FileZilla : HostingServiceProviderBase, IFtpServer { #region Constants private const string FILEZILLA_REG = @"SOFTWARE\FileZilla Server"; private const string FILEZILLA_REG_X64 = @"SOFTWARE\Wow6432Node\FileZilla Server"; private const string FILEZILLA_SERVER_FILE = "FileZilla Server.xml"; #endregion #region Properties protected virtual string FileZillaFolder { get { RegistryKey fzKey = Registry.LocalMachine.OpenSubKey(FILEZILLA_REG) ?? Registry.LocalMachine.OpenSubKey(FILEZILLA_REG_X64); if (fzKey == null) throw new Exception("FileZilla registry key was not found: " + FILEZILLA_REG); return (string)fzKey.GetValue("Install_Dir"); } } #endregion #region Sites public virtual void ChangeSiteState(string siteId, ServerState state) { // not implemented } public virtual string CreateSite(FtpSite site) { // not implemented return null; } public virtual void DeleteSite(string siteId) { // not implemented } public virtual FtpSite GetSite(string siteId) { // not implemented return null; } public virtual FtpSite[] GetSites() { // not implemented return null; } public virtual bool SiteExists(string siteId) { // not implemented return false; } public virtual ServerState GetSiteState(string siteId) { // not implemented return ServerState.Started; } public virtual void UpdateSite(FtpSite site) { // not implemented } #endregion #region Accounts public virtual bool AccountExists(string accountName) { XmlDocument doc = GetFileZillaConfig(); XmlNode nodeUser = doc.SelectSingleNode("/FileZillaServer/Users/User[@Name='" + accountName + "']"); return (nodeUser != null); } public virtual FtpAccount GetAccount(string accountName) { XmlDocument doc = GetFileZillaConfig(); XmlNode nodeUser = doc.SelectSingleNode("/FileZillaServer/Users/User[@Name='" + accountName + "']"); if (nodeUser == null) return null; return CreateAccountFromXmlNode(nodeUser, false); } public virtual FtpAccount[] GetAccounts() { XmlDocument doc = GetFileZillaConfig(); XmlNodeList nodeUsers = doc.SelectNodes("/FileZillaServer/Users/User"); List<FtpAccount> accounts = new List<FtpAccount>(); foreach (XmlNode nodeUser in nodeUsers) accounts.Add(CreateAccountFromXmlNode(nodeUser, true)); return accounts.ToArray(); } public virtual void CreateAccount(FtpAccount account) { Log.WriteInfo("GetFileZillaConfig"); XmlDocument doc = GetFileZillaConfig(); Log.WriteInfo("Find users nodes"); // find users node XmlNode fzServerNode = doc.SelectSingleNode("FileZillaServer"); XmlNode fzAccountsNode = fzServerNode.SelectSingleNode("Users"); if (fzAccountsNode == null) { fzAccountsNode = doc.CreateElement("Users"); fzServerNode.AppendChild(fzAccountsNode); } XmlElement fzAccountNode = doc.CreateElement("User"); fzAccountsNode.AppendChild(fzAccountNode); // set properties fzAccountNode.SetAttribute("Name", account.Name); SetOption(fzAccountNode, "Pass", MD5(account.Password)); SetOption(fzAccountNode, "Group", ""); SetOption(fzAccountNode, "Bypass server userlimit", "0"); SetOption(fzAccountNode, "User Limit", "0"); SetOption(fzAccountNode, "IP Limit", "0"); SetOption(fzAccountNode, "Enabled", BoolToString(account.Enabled)); SetOption(fzAccountNode, "Comments", ""); SetOption(fzAccountNode, "ForceSsl", "0"); // IP filter XmlElement nodeIPFilter = doc.CreateElement("IpFilter"); fzAccountNode.AppendChild(nodeIPFilter); XmlElement nodeDisallowed = doc.CreateElement("Disallowed"); nodeIPFilter.AppendChild(nodeDisallowed); XmlElement nodeAllowed = doc.CreateElement("Allowed"); nodeIPFilter.AppendChild(nodeAllowed); // folder XmlElement nodePermissions = doc.CreateElement("Permissions"); fzAccountNode.AppendChild(nodePermissions); XmlElement nodePermission = doc.CreateElement("Permission"); nodePermissions.AppendChild(nodePermission); // folder settings nodePermission.SetAttribute("Dir", account.Folder); SetOption(nodePermission, "FileRead", BoolToString(account.CanRead)); SetOption(nodePermission, "FileWrite", BoolToString(account.CanWrite)); SetOption(nodePermission, "FileDelete", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirCreate", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirDelete", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirList", BoolToString(account.CanRead)); SetOption(nodePermission, "DirSubdirs", BoolToString(account.CanRead)); SetOption(nodePermission, "IsHome", "1"); SetOption(nodePermission, "AutoCreate", "0"); // speed limits XmlElement nodeSpeedLimits = doc.CreateElement("SpeedLimits"); fzAccountNode.AppendChild(nodeSpeedLimits); nodeSpeedLimits.SetAttribute("DlType", "0"); nodeSpeedLimits.SetAttribute("DlLimit", "10"); nodeSpeedLimits.SetAttribute("ServerDlLimitBypass", "0"); nodeSpeedLimits.SetAttribute("UlType", "0"); nodeSpeedLimits.SetAttribute("UlLimit", "10"); nodeSpeedLimits.SetAttribute("ServerUlLimitBypass", "0"); XmlElement nodeDownload = doc.CreateElement("Download"); nodeSpeedLimits.AppendChild(nodeDownload); XmlElement nodeUpload = doc.CreateElement("Upload"); nodeSpeedLimits.AppendChild(nodeUpload); // save document doc.Save(GetFileZillaConfigPath()); // reload config ReloadFileZillaConfig(); } public virtual void UpdateAccount(FtpAccount account) { XmlDocument doc = GetFileZillaConfig(); XmlNode nodeUser = doc.SelectSingleNode("/FileZillaServer/Users/User[@Name='" + account.Name + "']"); if (nodeUser == null) return; // update user if(!String.IsNullOrEmpty(account.Password)) SetOption(nodeUser, "Pass", MD5(account.Password)); SetOption(nodeUser, "Enabled", BoolToString(account.Enabled)); // update folder XmlNode nodePermission = nodeUser.SelectSingleNode("Permissions/Permission"); if (nodePermission != null) { ((XmlElement)nodePermission).SetAttribute("Dir", account.Folder); SetOption(nodePermission, "FileRead", BoolToString(account.CanRead)); SetOption(nodePermission, "FileWrite", BoolToString(account.CanWrite)); SetOption(nodePermission, "FileDelete", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirCreate", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirDelete", BoolToString(account.CanWrite)); SetOption(nodePermission, "DirList", BoolToString(account.CanRead)); SetOption(nodePermission, "DirSubdirs", BoolToString(account.CanRead)); } // save document doc.Save(GetFileZillaConfigPath()); // reload config ReloadFileZillaConfig(); } public virtual void DeleteAccount(string accountName) { XmlDocument doc = GetFileZillaConfig(); XmlNode nodeUser = doc.SelectSingleNode("/FileZillaServer/Users/User[@Name='" + accountName + "']"); if (nodeUser == null) return; // delete account nodeUser.ParentNode.RemoveChild(nodeUser); // save document doc.Save(GetFileZillaConfigPath()); // reload config ReloadFileZillaConfig(); } #endregion public override void ChangeServiceItemsState(ServiceProviderItem[] items, bool enabled) { foreach (ServiceProviderItem item in items) { if (item is FtpAccount) { try { // change FTP account state FtpAccount account = GetAccount(item.Name); account.Enabled = enabled; UpdateAccount(account); } catch (Exception ex) { Log.WriteError(String.Format("Error switching '{0}' {1}", item.Name, item.GetType().Name), ex); } } } } public override void DeleteServiceItems(ServiceProviderItem[] items) { foreach (ServiceProviderItem item in items) { if (item is FtpAccount) { try { // delete FTP account DeleteAccount(item.Name); } catch (Exception ex) { Log.WriteError(String.Format("Error deleting '{0}' {1}", item.Name, item.GetType().Name), ex); } } } } #region Private Helpers private string BoolToString(bool val) { return val ? "1" : "0"; } private void SetOption(XmlNode parentNode, string name, string val) { XmlNode option = parentNode.SelectSingleNode("Option[@Name='" + name + "']"); if (option == null) { option = parentNode.OwnerDocument.CreateElement("Option"); parentNode.AppendChild(option); ((XmlElement)option).SetAttribute("Name", name); } option.InnerText = val; } private FtpAccount CreateAccountFromXmlNode(XmlNode nodeUser, bool excludeDetails) { FtpAccount account = new FtpAccount(); account.Name = nodeUser.Attributes["Name"].Value; if (!excludeDetails) { account.Password = nodeUser.SelectSingleNode("Option[@Name='Pass']").InnerText; account.Enabled = (nodeUser.SelectSingleNode("Option[@Name='Enabled']").InnerText == "1"); XmlNode nodeFolder = nodeUser.SelectSingleNode("Permissions/Permission"); if (nodeFolder != null) { account.Folder = nodeFolder.Attributes["Dir"].Value; account.CanRead = (nodeFolder.SelectSingleNode("Option[@Name='FileRead']").InnerText == "1"); account.CanWrite = (nodeFolder.SelectSingleNode("Option[@Name='FileWrite']").InnerText == "1"); } } return account; } private XmlDocument GetFileZillaConfig() { string path = GetFileZillaConfigPath(); if (!File.Exists(path)) throw new Exception("FileZilla configuration file was not found: " + path); XmlDocument doc = new XmlDocument(); doc.Load(path); return doc; } private string GetFileZillaConfigPath() { return Path.Combine(FileZillaFolder, FILEZILLA_SERVER_FILE); } private string MD5(string str) { System.Text.UTF8Encoding ue = new System.Text.UTF8Encoding(); byte[] bytes = ue.GetBytes(str); // encrypt bytes System.Security.Cryptography.MD5CryptoServiceProvider md5 = new System.Security.Cryptography.MD5CryptoServiceProvider(); byte[] hashBytes = md5.ComputeHash(bytes); // Convert the encrypted bytes back to a string (base 16) string hashString = ""; for (int i = 0; i < hashBytes.Length; i++) hashString += Convert.ToString(hashBytes[i], 16).PadLeft(2, '0'); return hashString.PadLeft(32, '0'); } private void ReloadFileZillaConfig() { FileUtils.ExecuteSystemCommand( Path.Combine(FileZillaFolder, "FileZilla Server.exe"), "/reload-config"); } #endregion public override bool IsInstalled() { string instPath = null; RegistryKey HKLM = Registry.LocalMachine; RegistryKey key = HKLM.OpenSubKey(@"SOFTWARE\FileZilla Server"); if (key != null) { instPath = (string)key.GetValue("Install_Dir"); return instPath != null; } else { key = HKLM.OpenSubKey(@"SOFTWARE\Wow6432Node\FileZilla Server"); if (key != null) { instPath = (string)key.GetValue("Install_Dir"); return instPath != null; } else { return false; } } } } }
using NUnit.Framework; using ImAVendor.Forms.PlatformConfiguration.iOS; using Xamarin.Forms.PlatformConfiguration; using Xamarin.Forms.PlatformConfiguration.AndroidSpecific; using Xamarin.Forms.PlatformConfiguration.iOSSpecific; namespace Xamarin.Forms.Core.UnitTests { [TestFixture] public class PlatformSpecificsTests { [Test] public void VendorPlatformProperty() { var x = new MasterDetailPage(); Assert.IsTrue(x.On<iOS>().GetVendorFoo()); x.On<iOS>().SetVendorFoo(false); Assert.IsFalse(x.On<iOS>().GetVendorFoo()); } [Test] public void ConsumeVendorSetting() { var x = new MasterDetailPage(); x.On<iOS>().SetVendorFoo(false); Assert.IsFalse(x.On<iOS>().GetVendorFoo()); } [Test] public void Properties() { var x = new MasterDetailPage(); x.On<Android>().SetSomeAndroidThing(42); Assert.IsTrue(x.On<Android>().GetSomeAndroidThing() == 42); } [Test] public void ConvenienceConfiguration() { var x = new MasterDetailPage(); x.On<Android>().UseTabletDefaults(); Assert.IsTrue(x.On<Android>().GetSomeAndroidThing() == 10); Assert.IsTrue(x.On<Android>().GetSomeOtherAndroidThing() == 45); x.On<Android>().UsePhabletDefaults(); Assert.IsTrue(x.On<Android>().GetSomeAndroidThing() == 8); Assert.IsTrue(x.On<Android>().GetSomeOtherAndroidThing() == 40); } [Test] public void NavigationPageiOSConfiguration() { var x = new NavigationPage(); x.On<iOS>().SetIsNavigationBarTranslucent(true); Assert.IsTrue(x.On<iOS>().IsNavigationBarTranslucent()); } } } namespace ImAVendor.Forms.PlatformConfiguration.iOS { using Xamarin.Forms; using Xamarin.Forms.PlatformConfiguration; using FormsElement = Xamarin.Forms.MasterDetailPage; public static class MasterDetailPage { public static readonly BindableProperty FooProperty = BindableProperty.Create("VendorFoo", typeof(bool), typeof(MasterDetailPage), true); public static void SetVendorFoo(BindableObject element, bool value) { element.SetValue(FooProperty, value); } public static bool GetVendorFoo(BindableObject element) { return (bool)element.GetValue(FooProperty); } public static IPlatformElementConfiguration<iOS, FormsElement> SetVendorFoo(this IPlatformElementConfiguration<iOS, FormsElement> config, bool value) { SetVendorFoo(config.Element, value); return config; } public static bool GetVendorFoo(this IPlatformElementConfiguration<iOS, FormsElement> mdp) { return GetVendorFoo(mdp.Element); } } } namespace ImAVendor.Forms.PlatformConfiguration.iOS { using Xamarin.Forms; using Xamarin.Forms.PlatformConfiguration; using FormsElement = Xamarin.Forms.NavigationPage; public static class NavigationPage { const string NavBarTranslucentEffectName = "XamControl.NavigationPageTranslucentEffect"; public static readonly BindableProperty IsNavigationBarTranslucentProperty = BindableProperty.CreateAttached("IsNavigationBarTranslucent", typeof(bool), typeof(NavigationPage), false, propertyChanging: IsNavigationBarTranslucentPropertyChanging); public static bool GetIsNavigationBarTranslucent(BindableObject element) { return (bool)element.GetValue(IsNavigationBarTranslucentProperty); } public static void SetIsNavigationBarTranslucent(BindableObject element, bool value) { element.SetValue(IsNavigationBarTranslucentProperty, value); } public static bool IsNavigationBarTranslucentVendor(this IPlatformElementConfiguration<iOS, FormsElement> config) { return GetIsNavigationBarTranslucent(config.Element); } public static IPlatformElementConfiguration<iOS, FormsElement> EnableTranslucentNavigationBarVendor(this IPlatformElementConfiguration<iOS, FormsElement> config, bool value) { SetIsNavigationBarTranslucent(config.Element, value); return config; } static void IsNavigationBarTranslucentPropertyChanging(BindableObject bindable, object oldValue, object newValue) { AttachEffect(bindable as FormsElement); } static void AttachEffect(FormsElement element) { IElementController controller = element; if (controller == null || controller.EffectIsAttached(NavBarTranslucentEffectName)) return; element.Effects.Add(Effect.Resolve(NavBarTranslucentEffectName)); } } } namespace Xamarin.Forms.PlatformConfiguration.AndroidSpecific { using FormsElement = Xamarin.Forms.MasterDetailPage; public static class MasterDetailPage { public static readonly BindableProperty SomeAndroidThingProperty = BindableProperty.Create("SomeAndroidThing", typeof(int), typeof(MasterDetailPage), 1); public static readonly BindableProperty SomeOtherAndroidThingProperty = BindableProperty.Create("SomeOtherAndroidThing", typeof(int), typeof(MasterDetailPage), 1); public static int GetSomeAndroidThing(BindableObject element) { return (int)element.GetValue(SomeAndroidThingProperty); } public static void SetSomeAndroidThing(BindableObject element, int value) { element.SetValue(SomeAndroidThingProperty, value); } public static int GetSomeOtherAndroidThing(BindableObject element) { return (int)element.GetValue(SomeOtherAndroidThingProperty); } public static void SetSomeOtherAndroidThing(BindableObject element, int value) { element.SetValue(SomeOtherAndroidThingProperty, value); } public static int GetSomeAndroidThing(this IPlatformElementConfiguration<Android, FormsElement> config) { return (int)config.Element.GetValue(SomeAndroidThingProperty); } public static IPlatformElementConfiguration<Android, FormsElement> SetSomeAndroidThing(this IPlatformElementConfiguration<Android, FormsElement> config, int value) { config.Element.SetValue(SomeAndroidThingProperty, value); return config; } public static int GetSomeOtherAndroidThing(this IPlatformElementConfiguration<Android, FormsElement> config) { return (int)config.Element.GetValue(SomeOtherAndroidThingProperty); } public static IPlatformElementConfiguration<Android, FormsElement> SetSomeOtherAndroidThing(this IPlatformElementConfiguration<Android, FormsElement> config, int value) { config.Element.SetValue(SomeOtherAndroidThingProperty, value); return config; } public static IPlatformElementConfiguration<Android, FormsElement> UseTabletDefaults(this IPlatformElementConfiguration<Android, FormsElement> config) { config.SetSomeAndroidThing(10); config.SetSomeOtherAndroidThing(45); return config; } public static IPlatformElementConfiguration<Android, FormsElement> UsePhabletDefaults(this IPlatformElementConfiguration<Android, FormsElement> config) { config.SetSomeAndroidThing(8); config.SetSomeOtherAndroidThing(40); return config; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Diagnostics; using System.Drawing; using System.Text; using System.Windows.Forms; namespace OpenSim.GridLaunch.GUI.WinForm { public partial class ProcessPanel : Form, IGUI { public ProcessPanel() { Application.EnableVisualStyles(); //Application.SetCompatibleTextRenderingDefault(false); InitializeComponent(); Program.AppCreated += Program_AppCreated; Program.AppRemoved += Program_AppRemoved; Program.AppConsoleOutput += Program_AppConsoleOutput; Program.AppConsoleError += Program_AppConsoleError; log4netAppender.LogLine += log4netAppender_LogLine; } #region Module Start / Stop public void StartGUI() { Application.Run(this); } public void StopGUI() { this.Close(); } #endregion #region Main log tab void log4netAppender_LogLine(Color color, string LogText) { ucLogWindow1.Write(color, LogText); } #endregion #region Form events private void btnShutdown_Click(object sender, EventArgs e) { Program.Shutdown(); } #endregion #region GridLaunch Events public delegate void Program_AppCreatedDelegate(string App); public void Program_AppCreated(string App) { if (this.InvokeRequired) { this.Invoke(new Program_AppCreatedDelegate(Program_AppCreated), App); return; } Trace.WriteLine("Start: " + App); // Do we already have app window for that app? if (AppWindow_Get(App) != null) return; // New log window ucAppWindow aw = new ucAppWindow(); // New tab page TabPage tp = new TabPage(App); // Add log window into tab page tp.Controls.Add(aw); // Add tab page into tab control tabControl1.TabPages.Add(tp); // Add it all to our internal list AppWindow_Add(App, aw); // Hook up events aw.LineEntered += AppWindow_LineEntered; // Fill log window fully inside tab page aw.Dock = DockStyle.Fill; } public delegate void Program_AppRemovedDelegate(string App); public void Program_AppRemoved(string App) { if (this.InvokeRequired) { this.Invoke(new Program_AppRemovedDelegate(Program_AppRemoved), App); return; } Trace.WriteLine("Stop: " + App); // Get app window ucAppWindow aw = AppWindow_Get(App); if (aw == null) return; // Get its tab page TabPage tp = aw.Parent as TabPage; if (tp != null) { // Remove tab page from tab control tabControl1.TabPages.Remove(tp); // Remove app window from tab tp.Controls.Remove(aw); } // Dispose of app window aw.Dispose(); // Dispose of tab page if (tp != null) tp.Dispose(); // Remove from our internal list AppWindow_Remove(App); } public delegate void Program_AppConsoleOutputDelegate(string App, string LogText); void Program_AppConsoleOutput(string App, string LogText) { if (this.InvokeRequired) { this.Invoke(new Program_AppConsoleOutputDelegate(Program_AppConsoleOutput), App, LogText); return; } // Get app window ucAppWindow aw = AppWindow_Get(App); // Write text to it if (aw != null) aw.Write(System.Drawing.Color.Black, LogText); } public delegate void Program_AppConsoleErrorDelegate(string App, string LogText); void Program_AppConsoleError(string App, string LogText) { if (this.InvokeRequired) { this.Invoke(new Program_AppConsoleErrorDelegate(Program_AppConsoleError), App, LogText); return; } // Get app window ucAppWindow aw = AppWindow_Get(App); // Write text to it if (aw != null) aw.Write(System.Drawing.Color.Red, LogText); } #endregion #region App Window events private void AppWindow_LineEntered(ucAppWindow AppWindow, string LogText) { Program.WriteLine(AppWindow_Get(AppWindow), LogText); } #endregion private void ProcessPanel_Load(object sender, EventArgs e) { string[] arr = new string[Program.Settings.Components.Keys.Count]; Program.Settings.Components.Keys.CopyTo(arr, 0); cblStartupComponents.Items.AddRange(arr); // Now correct all check states for (int i = 0; i < cblStartupComponents.Items.Count; i++ ) { string _name = cblStartupComponents.Items[i] as string; bool _checked = Program.Settings.Components[_name]; cblStartupComponents.SetItemChecked(i, _checked); } } #region Internal App Window list and functions private Dictionary<string, ucAppWindow> _appWindows = new Dictionary<string, ucAppWindow>(); private Dictionary<ucAppWindow, string> _appWindows_rev = new Dictionary<ucAppWindow, string>(); private void AppWindow_Add(string AppWindowName, ucAppWindow AppWindow) { lock (_appWindows) { _appWindows.Add(AppWindowName, AppWindow); _appWindows_rev.Add(AppWindow, AppWindowName); // Hook events AppWindow.LineEntered += AppWindow_LineEntered; } } private void AppWindow_Remove(ucAppWindow AppWindow) { lock (_appWindows) { if (_appWindows_rev.ContainsKey(AppWindow)) { // Unhook events AppWindow.LineEntered -= AppWindow_LineEntered; // Delete from list string name = _appWindows_rev[AppWindow]; _appWindows.Remove(name); _appWindows_rev.Remove(AppWindow); } } } private void AppWindow_Remove(string AppWindowName) { lock (_appWindows) { if (_appWindows.ContainsKey(AppWindowName)) { ucAppWindow AppWindow = _appWindows[AppWindowName]; // Unhook events AppWindow.LineEntered -= AppWindow_LineEntered; // Delete from list _appWindows.Remove(AppWindowName); _appWindows_rev.Remove(AppWindow); } } } private string AppWindow_Get(ucAppWindow AppWindow) { lock (_appWindows) { if (_appWindows_rev.ContainsKey(AppWindow)) return _appWindows_rev[AppWindow]; } return null; } private ucAppWindow AppWindow_Get(string AppWindowName) { lock (_appWindows) { if (_appWindows.ContainsKey(AppWindowName)) return _appWindows[AppWindowName]; } return null; } #endregion private void btnSave_Click(object sender, EventArgs e) { Program.Settings.Components.Clear(); for (int i = 0; i < cblStartupComponents.Items.Count; i++) { string _name = cblStartupComponents.Items[i] as string; bool _checked = cblStartupComponents.GetItemChecked(i); Program.Settings.Components.Add(_name, _checked); Program.Settings.SaveConfig(); } } } }
using System.Diagnostics; namespace CocosSharp { /// <summary> /// This is a special sprite container that represents a 9 point sprite region, where 8 of hte /// points are along the perimeter, and the 9th is the center area. This special sprite is capable of resizing /// itself to arbitrary scales. /// </summary> public class CCScale9Sprite : CCNode { #region Enums private enum Positions { Centre = 0, Top, Left, Right, Bottom, TopRight, TopLeft, BottomRight, BottomLeft }; #endregion Enums bool positionsAreDirty; bool opacityModifyRGB; bool spriteFrameRotated; bool spritesGenerated; float insetBottom; float insetLeft; float insetRight; float insetTop; /** * The end-cap insets. * On a non-resizeable sprite, this property is set to CGRectZero; the sprite * does not use end caps and the entire sprite is subject to stretching. */ CCRect capInsets; CCRect capInsetsInternal; CCRect spriteRect; CCSize originalSize; CCSize preferredSize; CCSpriteBatchNode scale9Image; CCSprite top; CCSprite topLeft; CCSprite topRight; CCSprite bottom; CCSprite bottomLeft; CCSprite bottomRight; CCSprite centre; CCSprite left; CCSprite right; #region Properties public override CCSize ContentSize { get { return base.ContentSize; } set { base.ContentSize = value; positionsAreDirty = true; } } public CCSize PreferredSize { get { return preferredSize; } set { ContentSize = value; preferredSize = value; } } public CCRect CapInsets { get { return capInsets; } set { CCSize contentSize = ContentSize; UpdateWithBatchNode(scale9Image, spriteRect, spriteFrameRotated, value); ContentSize = contentSize; } } public float InsetLeft { set { insetLeft = value; UpdateCapInset(); } get { return insetLeft; } } public float InsetTop { set { insetTop = value; UpdateCapInset(); } get { return insetTop; } } public float InsetRight { set { insetRight = value; UpdateCapInset(); } get { return insetRight; } } public float InsetBottom { set { insetBottom = value; UpdateCapInset(); } get { return insetBottom; } } public override CCColor3B Color { get { return RealColor; } set { base.Color = value; if (scale9Image != null && scale9Image.Children != null) { foreach(CCNode child in Children) { var node = child; if (node != null) { node.Color = value; } } } } } public override byte Opacity { get { return RealOpacity; } set { base.Opacity = value; if (scale9Image != null && scale9Image.Children != null) { foreach(CCNode child in Children) { var node = child; if (node != null) { node.Opacity = value; } } } } } public override bool IsColorModifiedByOpacity { get { return opacityModifyRGB; } set { opacityModifyRGB = value; if (scale9Image != null && scale9Image.Children != null) { foreach(CCNode child in Children) { var node = child; if (node != null) { node.IsColorModifiedByOpacity = value; } } } } } public CCSpriteFrame SpriteFrame { set { CCSpriteBatchNode batchnode = new CCSpriteBatchNode (value.Texture, 9); UpdateWithBatchNode (batchnode, value.TextureRectInPixels, value.IsRotated, CCRect.Zero); // Reset insets insetLeft = 0; insetTop = 0; insetRight = 0; insetBottom = 0; } } #endregion Properties #region Constructors public CCScale9Sprite(CCSpriteBatchNode batchnode, CCRect rect, bool rotated, CCRect capInsets) { InitCCScale9Sprite(batchnode, rect, rotated, capInsets); } public CCScale9Sprite(CCSpriteBatchNode batchnode, CCRect rect, CCRect capInsets) : this(batchnode, rect, false, capInsets) { } public CCScale9Sprite() : this((CCSpriteBatchNode)null, CCRect.Zero, CCRect.Zero) { } public CCScale9Sprite(CCRect capInsets) : this((CCSpriteBatchNode)null, CCRect.Zero, capInsets) { } // File public CCScale9Sprite(string file, CCRect rect, CCRect capInsets) : this(new CCSpriteBatchNode(file, 9), rect, capInsets) { } public CCScale9Sprite(string file, CCRect rect) : this(file, rect, CCRect.Zero) { } public CCScale9Sprite(string file) : this(file, CCRect.Zero) { } // Sprite frame public CCScale9Sprite(CCSpriteFrame spriteFrame, CCRect capInsets) : this(new CCSpriteBatchNode(spriteFrame.Texture, 9), spriteFrame.TextureRectInPixels, spriteFrame.IsRotated, capInsets) { } public CCScale9Sprite(CCSpriteFrame spriteFrame) : this(spriteFrame, CCRect.Zero) { } // Sprite frame name // A constructor with argument string already exists (filename), so create this factory method instead public static CCScale9Sprite SpriteWithFrameName(string spriteFrameName, CCRect capInsets) { CCScale9Sprite sprite = new CCScale9Sprite(); sprite.InitWithSpriteFrameName(spriteFrameName, capInsets); return sprite; } public static CCScale9Sprite SpriteWithFrameName(string spriteFrameName) { return CCScale9Sprite.SpriteWithFrameName(spriteFrameName, CCRect.Zero); } void InitCCScale9Sprite(CCSpriteBatchNode batchnode, CCRect rect, bool rotated, CCRect capInsets) { if (batchnode != null) { UpdateWithBatchNode(batchnode, rect, rotated, capInsets); } AnchorPoint = new CCPoint(0.5f, 0.5f); positionsAreDirty = true; } // Init calls that are called externally for objects that are already instantiated internal void InitWithSpriteFrame(CCSpriteFrame spriteFrame) { InitCCScale9Sprite(new CCSpriteBatchNode(spriteFrame.Texture, 9), spriteFrame.TextureRectInPixels, spriteFrame.IsRotated, CCRect.Zero); } internal void InitWithSpriteFrameName(string spriteFrameName) { InitWithSpriteFrameName(spriteFrameName, CCRect.Zero); } internal void InitWithSpriteFrameName(string spriteFrameName, CCRect capInsets) { CCSpriteFrame spriteFrame = CCSpriteFrameCache.SharedSpriteFrameCache[spriteFrameName]; InitCCScale9Sprite(new CCSpriteBatchNode(spriteFrame.Texture, 9), spriteFrame.TextureRectInPixels, spriteFrame.IsRotated, capInsets); } #endregion Constructors public override void Visit() { if (positionsAreDirty) { UpdatePositions(); positionsAreDirty = false; } base.Visit(); } public override void UpdateDisplayedColor(CCColor3B parentColor) { base.UpdateDisplayedColor(parentColor); if (scale9Image != null && scale9Image.Children != null) { foreach(CCNode child in Children) { var node = child; if (node != null) { node.UpdateDisplayedColor(parentColor); } } } } public bool UpdateWithBatchNode(CCSpriteBatchNode batchnode, CCRect rect, bool rotated, CCRect capInsets) { var opacity = Opacity; var color = Color; // Release old sprites RemoveAllChildren(true); scale9Image = batchnode; scale9Image.RemoveAllChildren(true); this.capInsets = capInsets; spriteFrameRotated = rotated; // If there is no given rect if (rect.Equals(CCRect.Zero)) { // Get the texture size as original CCSize textureSize = scale9Image.TextureAtlas.Texture.ContentSizeInPixels; rect = new CCRect(0, 0, textureSize.Width, textureSize.Height); } // Set the given rect's size as original size spriteRect = rect; originalSize = rect.Size; preferredSize = originalSize; capInsetsInternal = capInsets; float h = rect.Size.Height; float w = rect.Size.Width; // If there is no specified center region if (capInsetsInternal.Equals(CCRect.Zero)) { capInsetsInternal = new CCRect(w / 3, h / 3, w / 3, h / 3); } float left_w = capInsetsInternal.Origin.X; float center_w = capInsetsInternal.Size.Width; float right_w = rect.Size.Width - (left_w + center_w); float top_h = capInsetsInternal.Origin.Y; float center_h = capInsetsInternal.Size.Height; float bottom_h = rect.Size.Height - (top_h + center_h); // calculate rects // ... top row float x = 0.0f; float y = 0.0f; // top left CCRect lefttopbounds = new CCRect(x, y, left_w, top_h); // top center x += left_w; CCRect centertopbounds = new CCRect(x, y, center_w, top_h); // top right x += center_w; CCRect righttopbounds = new CCRect(x, y, right_w, top_h); // ... center row x = 0.0f; y = 0.0f; y += top_h; // center left CCRect leftcenterbounds = new CCRect(x, y, left_w, center_h); // center center x += left_w; CCRect centerbounds = new CCRect(x, y, center_w, center_h); // center right x += center_w; CCRect rightcenterbounds = new CCRect(x, y, right_w, center_h); // ... bottom row x = 0.0f; y = 0.0f; y += top_h; y += center_h; // bottom left CCRect leftbottombounds = new CCRect(x, y, left_w, bottom_h); // bottom center x += left_w; CCRect centerbottombounds = new CCRect(x, y, center_w, bottom_h); // bottom right x += center_w; CCRect rightbottombounds = new CCRect(x, y, right_w, bottom_h); if (!rotated) { CCAffineTransform t = CCAffineTransform.Identity; t = CCAffineTransform.Translate(t, rect.Origin.X, rect.Origin.Y); centerbounds = CCAffineTransform.Transform(centerbounds, t); rightbottombounds = CCAffineTransform.Transform(rightbottombounds, t); leftbottombounds = CCAffineTransform.Transform(leftbottombounds, t); righttopbounds = CCAffineTransform.Transform(righttopbounds, t); lefttopbounds = CCAffineTransform.Transform(lefttopbounds, t); rightcenterbounds = CCAffineTransform.Transform(rightcenterbounds, t); leftcenterbounds = CCAffineTransform.Transform(leftcenterbounds, t); centerbottombounds = CCAffineTransform.Transform(centerbottombounds, t); centertopbounds = CCAffineTransform.Transform(centertopbounds, t); // Centre centre = new CCSprite(scale9Image.Texture, centerbounds); scale9Image.AddChild(centre, 0, (int)Positions.Centre); // Top top = new CCSprite(scale9Image.Texture, centertopbounds); scale9Image.AddChild(top, 1, (int)Positions.Top); // Bottom bottom = new CCSprite(scale9Image.Texture, centerbottombounds); scale9Image.AddChild(bottom, 1, (int)Positions.Bottom); // Left left = new CCSprite(scale9Image.Texture, leftcenterbounds); scale9Image.AddChild(left, 1, (int)Positions.Left); // Right right = new CCSprite(scale9Image.Texture, rightcenterbounds); scale9Image.AddChild(right, 1, (int)Positions.Right); // Top left topLeft = new CCSprite(scale9Image.Texture, lefttopbounds); scale9Image.AddChild(topLeft, 2, (int)Positions.TopLeft); // Top right topRight = new CCSprite(scale9Image.Texture, righttopbounds); scale9Image.AddChild(topRight, 2, (int)Positions.TopRight); // Bottom left bottomLeft = new CCSprite(scale9Image.Texture, leftbottombounds); scale9Image.AddChild(bottomLeft, 2, (int)Positions.BottomLeft); // Bottom right bottomRight = new CCSprite(scale9Image.Texture, rightbottombounds); scale9Image.AddChild(bottomRight, 2, (int)Positions.BottomRight); } else { // set up transformation of coordinates // to handle the case where the sprite is stored rotated // in the spritesheet // CCLog("rotated"); CCAffineTransform t = CCAffineTransform.Identity; CCRect rotatedcenterbounds = centerbounds; CCRect rotatedrightbottombounds = rightbottombounds; CCRect rotatedleftbottombounds = leftbottombounds; CCRect rotatedrighttopbounds = righttopbounds; CCRect rotatedlefttopbounds = lefttopbounds; CCRect rotatedrightcenterbounds = rightcenterbounds; CCRect rotatedleftcenterbounds = leftcenterbounds; CCRect rotatedcenterbottombounds = centerbottombounds; CCRect rotatedcentertopbounds = centertopbounds; t = CCAffineTransform.Translate(t, rect.Size.Height + rect.Origin.X, rect.Origin.Y); t = CCAffineTransform.Rotate(t, 1.57079633f); centerbounds = CCAffineTransform.Transform(centerbounds, t); rightbottombounds = CCAffineTransform.Transform(rightbottombounds, t); leftbottombounds = CCAffineTransform.Transform(leftbottombounds, t); righttopbounds = CCAffineTransform.Transform(righttopbounds, t); lefttopbounds = CCAffineTransform.Transform(lefttopbounds, t); rightcenterbounds = CCAffineTransform.Transform(rightcenterbounds, t); leftcenterbounds = CCAffineTransform.Transform(leftcenterbounds, t); centerbottombounds = CCAffineTransform.Transform(centerbottombounds, t); centertopbounds = CCAffineTransform.Transform(centertopbounds, t); rotatedcenterbounds.Origin = centerbounds.Origin; rotatedrightbottombounds.Origin = rightbottombounds.Origin; rotatedleftbottombounds.Origin = leftbottombounds.Origin; rotatedrighttopbounds.Origin = righttopbounds.Origin; rotatedlefttopbounds.Origin = lefttopbounds.Origin; rotatedrightcenterbounds.Origin = rightcenterbounds.Origin; rotatedleftcenterbounds.Origin = leftcenterbounds.Origin; rotatedcenterbottombounds.Origin = centerbottombounds.Origin; rotatedcentertopbounds.Origin = centertopbounds.Origin; // Centre centre = new CCSprite(scale9Image.Texture, rotatedcenterbounds, true); //centre.InitWithTexture(scale9Image.Texture, rotatedcenterbounds, true); scale9Image.AddChild(centre, 0, (int)Positions.Centre); // Top top = new CCSprite(scale9Image.Texture, rotatedcentertopbounds, true); //top.InitWithTexture(scale9Image.Texture, rotatedcentertopbounds, true); scale9Image.AddChild(top, 1, (int)Positions.Top); // Bottom bottom = new CCSprite(scale9Image.Texture, rotatedcenterbottombounds, true); //bottom.InitWithTexture(scale9Image.Texture, rotatedcenterbottombounds, true); scale9Image.AddChild(bottom, 1, (int)Positions.Bottom); // Left left = new CCSprite(scale9Image.Texture, rotatedleftcenterbounds, true); //left.InitWithTexture(scale9Image.Texture, rotatedleftcenterbounds, true); scale9Image.AddChild(left, 1, (int)Positions.Left); // Right right = new CCSprite(scale9Image.Texture, rotatedrightcenterbounds, true); //right.InitWithTexture(scale9Image.Texture, rotatedrightcenterbounds, true); scale9Image.AddChild(right, 1, (int)Positions.Right); // Top left topLeft = new CCSprite(scale9Image.Texture, rotatedlefttopbounds, true); //topLeft.InitWithTexture(scale9Image.Texture, rotatedlefttopbounds, true); scale9Image.AddChild(topLeft, 2, (int)Positions.TopLeft); // Top right topRight = new CCSprite(scale9Image.Texture, rotatedrighttopbounds, true); //topRight.InitWithTexture(scale9Image.Texture, rotatedrighttopbounds, true); scale9Image.AddChild(topRight, 2, (int)Positions.TopRight); // Bottom left bottomLeft = new CCSprite(scale9Image.Texture, rotatedleftbottombounds, true); //bottomLeft.InitWithTexture(scale9Image.Texture, rotatedleftbottombounds, true); scale9Image.AddChild(bottomLeft, 2, (int)Positions.BottomLeft); // Bottom right bottomRight = new CCSprite(scale9Image.Texture, rotatedrightbottombounds, true); //bottomRight.InitWithTexture(scale9Image.Texture, rotatedrightbottombounds, true); scale9Image.AddChild(bottomRight, 2, (int)Positions.BottomRight); } ContentSize = rect.Size / CCSprite.DefaultTexelToContentSizeRatios; AddChild(scale9Image); if (spritesGenerated) { // Restore color and opacity Opacity = opacity; Color = color; } spritesGenerated = true; return true; } protected void UpdatePositions() { // Check that instances are non-NULL if (!((topLeft != null) && (topRight != null) && (bottomRight != null) && (bottomLeft != null) && (centre != null))) { // if any of the above sprites are NULL, return return; } CCSize size = ContentSize; float sizableWidth = size.Width - topLeft.ContentSize.Width - topRight.ContentSize.Width; float sizableHeight = size.Height - topLeft.ContentSize.Height - bottomRight.ContentSize.Height; float horizontalScale = sizableWidth / centre.ContentSize.Width; float verticalScale = sizableHeight / centre.ContentSize.Height; centre.ScaleX = horizontalScale; centre.ScaleY = verticalScale; float rescaledWidth = centre.ContentSize.Width * horizontalScale; float rescaledHeight = centre.ContentSize.Height * verticalScale; float leftWidth = bottomLeft.ContentSize.Width; float bottomHeight = bottomLeft.ContentSize.Height; bottomLeft.AnchorPoint = CCPoint.Zero; bottomRight.AnchorPoint = CCPoint.Zero; topLeft.AnchorPoint = CCPoint.Zero; topRight.AnchorPoint = CCPoint.Zero; left.AnchorPoint = CCPoint.Zero; right.AnchorPoint = CCPoint.Zero; top.AnchorPoint = CCPoint.Zero; bottom.AnchorPoint = CCPoint.Zero; centre.AnchorPoint = CCPoint.Zero; // Position corners bottomLeft.Position = CCPoint.Zero; bottomRight.Position = new CCPoint(leftWidth + rescaledWidth, 0); topLeft.Position = new CCPoint(0, bottomHeight + rescaledHeight); topRight.Position = new CCPoint(leftWidth + rescaledWidth, bottomHeight + rescaledHeight); // Scale and position borders left.Position = new CCPoint(0, bottomHeight); left.ScaleY = verticalScale; right.Position = new CCPoint(leftWidth + rescaledWidth, bottomHeight); right.ScaleY = verticalScale; bottom.Position = new CCPoint(leftWidth, 0); bottom.ScaleX = horizontalScale; top.Position = new CCPoint(leftWidth, bottomHeight + rescaledHeight); top.ScaleX = horizontalScale; // Position centre centre.Position = new CCPoint(leftWidth, bottomHeight); } protected void UpdateCapInset() { CCRect insets; if (insetLeft == 0 && insetTop == 0 && insetRight == 0 && insetBottom == 0) { insets = CCRect.Zero; } else { insets = new CCRect(insetLeft, insetTop, spriteRect.Size.Width - insetLeft - insetRight, spriteRect.Size.Height - insetTop - insetBottom); } CapInsets = insets; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics.CodeAnalysis; using Microsoft.Modeling; using Microsoft.Protocols.TestTools; namespace Microsoft.Protocol.TestSuites.Smb { /// <summary> /// This is used for model to verify the RS2299 which is should/may template code. /// </summary> /// <param name="isRsImplemented">If RS2299 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void IsRs2299ImplementedResponseHandle(bool isRsImplemented); /// <summary> /// This is used for model to verify the RS4984 which is should/may template code. /// </summary> /// <param name="isRsImplemented">If RS4984 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void IsRs4984ImplementedResponseHandle(bool isRsImplemented); /// <summary> /// SessionSetup response additional handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a /// previously established session identifier to reauthenticate to an existing session. /// </param> /// <param name="securitySignatureValue"> /// Indicate the security signature used in session setup response header. /// </param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required. </param> /// <param name="isGuestAccount">Indicate whether the account is a guest account or an admin account.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void SessionSetupResponseAdditionalHandle( int messageId, int sessionId, int securitySignatureValue, bool isSigned, bool isGuestAccount, MessageStatus messageStatus); /// <summary> /// Read response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a /// previously established session identifier to reauthenticate to an existing session. /// </param> /// <param name="treeId">The tree identifier.</param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required. </param> /// <param name="isSendBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize sent by the SUT exceeds the max buffer size or not. /// </param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ReadResponseHandle( int messageId, int sessionId, int treeId, bool isSigned, bool isSendBufferSizeExceedMaxBufferSize, MessageStatus messageStatus); /// <summary> /// Write response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a /// previously established session identifier to reauthenticate to an existing session. /// </param> /// <param name="treeId">The tree identifier.</param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isWrittenByteCountEqualCountField"> /// Indicate whether the byte count written by the SUT equal to the count field or not. /// </param> /// <param name="isWrittenByteCountEqualCountHighField"> /// Indicate whether the byte count written by the SUT equal to the count high field or not. /// </param> /// <param name="isWrittenByteCountLargerThanMaxBufferSize"> /// Indicate whether the byte count written by the SUT larger than the max buffer size or not. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void WriteResponseHandle( int messageId, int sessionId, int treeId, bool isSigned, MessageStatus messageStatus, bool isWrittenByteCountEqualCountField, bool isWrittenByteCountEqualCountHighField, bool isWrittenByteCountLargerThanMaxBufferSize); /// <summary> /// Error write response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isRS5229Implemented">If the RS5229 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ErrorWriteResponseHandle( int messageId, MessageStatus messageStatus, bool isRS5229Implemented); /// <summary> /// SMB connection response handler. /// </summary> /// <param name="clientPlatform">Platform of client.</param> /// <param name="sutPlatform">Platform of the SUT.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void SmbConnectionResponseHandler( Platform clientPlatform, Platform sutPlatform); /// <summary> /// Negotiate response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="isSignatureRequired"> /// Indicate whether the NEGOTIATE_SECURITY_SIGNATURES_REQUIRED is set inSecurityMode field /// of Negotiation Response. /// </param> /// <param name="isSignatureEnabled"> /// Indicate whether the NEGOTIATE_SECURITY_SIGNATURES_ENABLED is set in /// SecurityMode field of Negotiation Response. /// </param> /// <param name="dialectIndex"> /// The index of the SMB dialect, it was selected from the DialectName field which was passed in the /// SMB_COM_NEGOTIATE client request. /// </param> /// <param name="serverCapabilities" >Indicate the capabilities that the server supports.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void NegotiateResponseHandler( int messageId, bool isSignatureRequired, bool isSignatureEnabled, int dialectIndex, [Domain("serverCapabilities")] Microsoft.Modeling.Set<Capabilities> serverCapabilities, MessageStatus messageStatus); /// <summary> /// Non Extended Security for negotiate response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="isSignatureRequired"> /// Indicate whether the NEGOTIATE_SECURITY_SIGNATURES_REQUIRED is set inSecurityMode field /// of Negotiation Response. /// </param> /// <param name="isSignatureEnabled"> /// Indicate whether the NEGOTIATE_SECURITY_SIGNATURES_ENABLED is set in /// SecurityMode field of Negotiation Response. /// </param> /// <param name="dialectIndex"> /// The index of the SMB dialect, it was selected from the DialectName field which was passed in the /// SMB_COM_NEGOTIATE client request. /// </param> /// <param name="serverCapabilities" >Indicate the capabilities that the server supports.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void NonExtendedNegotiateResponseHandler( int messageId, bool isSignatureRequired, bool isSignatureEnabled, int dialectIndex, [Domain("serverCapabilitiesForNonextendedSecurity")] Microsoft.Modeling.Set<Capabilities> serverCapabilities, MessageStatus messageStatus); /// <summary> /// Non Extended Security for session setup response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a /// previously established session identifier to reauthenticate to an existing session. /// </param> /// <param name="securitySignatureValue"> /// Indicate the security signature used in session setup response header. /// </param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required.</param> /// <param name="isGuestAccount">Indicate whether the account is a guest account or an admin account.</param> /// <param name="isRS2322Implemented">If the RS2322 implemented, it is true, else it is false.</param> /// <param name="messageStatus"> /// Indicate that the status code is returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void NonExtendedSessionSetupResponseHandler( int messageId, int sessionId, int securitySignatureValue, bool isSigned, bool isGuestAccount, bool isRS2322Implemented, MessageStatus messageStatus); /// <summary> /// Session setup response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously /// established session identifier to reauthenticate to an existing session. /// </param> /// <param name="securitySignatureValue"> /// Indicate the security signature used in session setup response header. /// </param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required.</param> /// <param name="isGuestAccount">Indicate whether the account is a guest account or an admin account.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void SessionSetupResponseHandler( int messageId, int sessionId, int securitySignatureValue, bool isSigned, bool isGuestAccount, MessageStatus messageStatus); /// <summary> /// Tree connect response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously /// established session identifier to reauthenticate to an existing session. /// </param> /// <param name="treeId"> /// This field identifies the subdirectory (or tree) (also referred as a share in this /// document) on the server that the client is accessing. /// </param> /// <param name="isSecuritySignatureZero">Indicate whether the securitySignature is 0.</param> /// <param name="shareType">The type of resource the client intends to access.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required.</param> /// <param name="isInDfs">Indicate whether the share is managed by DFS.</param> /// <param name="isSupportExtSignature"> /// One flag of OptionalSupport field. If set, the server is using signing /// key protection as the client requested. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void TreeConnectResponseHandler( int messageId, int sessionId, int treeId, bool isSecuritySignatureZero, ShareType shareType, MessageStatus messageStatus, bool isSigned, bool isInDfs, bool isSupportExtSignature); /// <summary> /// Create response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously /// established session identifier to reauthenticate to an existing session. /// </param> /// <param name="treeId"> /// This field identifies the subdirectory (or tree) (also referred as a share in this /// document) on the server that the client is accessing. /// </param> /// <param name="fid">The file identifier. </param> /// <param name="isSigned">Indicate whether the SUT has message signing enabled or required.</param> /// <param name="createAction">Create an action.</param> /// <param name="isFileIdZero">Indicate whether the fileId is 0.</param> /// <param name="isVolumeGuidZero">Indicate whether the volumeGUIDIsZero is 0.</param> /// <param name="isDirectoryZero">Indicate whether the Directory field is zero or not.</param> /// <param name="isByteCountZero">Indicate whether the byte count is zero or not.</param> /// <param name="isNoStream"> /// Indicate whether NO_SUBSTREAMS bit in the FileStatusFlags field is set in the /// SMB_COM_NT_CREATE_ANDX server response. /// </param> /// <param name="messageStatus"> Indicate the status code returned from server, success or fail.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void CreateResponseHandler( int messageId, int sessionId, int treeId, int fid, bool isSigned, [Domain("ActionTaken")] Microsoft.Modeling.Set<CreateAction> createAction, bool isFileIdZero, bool isVolumeGuidZero, bool isDirectoryZero, bool isByteCountZero, bool isNoStream, MessageStatus messageStatus); /// <summary> /// Error response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ErrorResponseHandler( int messageId, MessageStatus messageStatus); /// <summary> /// Error tree connect response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isRS357Implemented">If the RS357 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ErrorTreeConnectResponseHandler( int messageId, MessageStatus messageStatus, bool isRS357Implemented); /// <summary> /// Error TRANS2_QUERY_FS_INFORMATION response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isRS2073Implemented">If the RS2073 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ErrorTrans2QueryFileInfoResponseHandler( int messageId, MessageStatus messageStatus, bool isRS2073Implemented); /// <summary> /// Error TRANS2_QUERY_PATH_INFORMATION response handler. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="messageStatus"> /// Indicate that the status code returned from the SUT is success or failure. /// </param> /// <param name="isRS2076Implemented">If the RS2076 implemented, it is true, else it is false.</param> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] public delegate void ErrorTrans2QueryPathInfoResponseHandler( int messageId, MessageStatus messageStatus, bool isRS2076Implemented); /// <summary> /// The interface of SMBAdapter. /// </summary> public partial interface ISmbAdapter : IAdapter { #region previous interface /// <summary> /// SMB connection response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event SmbConnectionResponseHandler SmbConnectionResponse; /// <summary> /// Negotiate response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event NegotiateResponseHandler NegotiateResponse; /// <summary> /// Non Extended Security for negotiate response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event NonExtendedNegotiateResponseHandler NonExtendedNegotiateResponse; /// <summary> /// Non Extended Security for session setup response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event NonExtendedSessionSetupResponseHandler NonExtendedSessionSetupResponse; /// <summary> /// Session setup response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event SessionSetupResponseHandler SessionSetupResponse; /// <summary> /// Tree connect response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event TreeConnectResponseHandler TreeConnectResponse; /// <summary> /// Create response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event CreateResponseHandler CreateResponse; /// <summary> /// Error response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ErrorResponseHandler ErrorResponse; /// <summary> /// Error tree connect response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ErrorTreeConnectResponseHandler ErrorTreeConnectResponse; /// <summary> /// ErrorTrans2QueryFileInfo response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ErrorTrans2QueryFileInfoResponseHandler ErrorTrans2QueryFileInfoResponse; /// <summary> /// ErrorTrans2QueryPathInfo response handler. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ErrorTrans2QueryPathInfoResponseHandler ErrorTrans2QueryPathInfoResponse; /// <summary> /// SMB connection request. /// </summary> void SmbConnectionRequest(); /// <summary> /// Negotiate request. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="isSupportExtSecurity">This Indicate whether the client supports extended security.</param> /// <param name="clientSignState"> /// Indicate the sign state of the client: Required, Enabled, Disabled or Disabled Unless Required. /// </param> /// <param name="dialectName">The input array of dialects.</param> void NegotiateRequest( int messageId, bool isSupportExtSecurity, SignState clientSignState, Sequence<Dialect> dialectName); /// <summary> /// Session setup request. /// </summary> /// <param name="account">Indicate the account type to establish the session.</param> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously established session /// identifier to request reauthenticate to an existing session. /// </param> /// <param name="securitySignature"> /// Delegate the security signature used in session setup request header. /// </param> /// <param name="isRequireSign"> /// Indicate whether the SUT has message signing enabled or required. /// </param> /// <param name="capabilities">A set of client capabilities.</param> /// <param name="isSendBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize sent by the SUT exceeds the max buffer size or not. /// </param> /// <param name="isWriteBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize written by the SUT exceeds the max buffer size or not. /// </param> /// <param name="flag2">Whether the Flag2 field of the SMB header is valid or not.</param> void SessionSetupRequest( AccountType account, int messageId, int sessionId, int securitySignature, bool isRequireSign, [Domain("clientCapabilities")] Microsoft.Modeling.Set<Capabilities> capabilities, bool isSendBufferSizeExceedMaxBufferSize, bool isWriteBufferSizeExceedMaxBufferSize, bool flag2); /// <summary> /// Non Extended Security for session setup request. /// </summary> /// <param name="account">Indicate the account type to establish the session.</param> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously established session /// identifier to request reauthenticate to an existing session. /// </param> /// <param name="securitySignature"> /// Delegate the security signature used in session setup request header. /// </param> /// <param name="isRequireSign"> /// Indicate whether the SUT has message signing enabled or required. /// </param> /// <param name="capabilities">A set of client capabilities. </param> /// <param name="isSendBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize sent by the SUT exceeds the max buffer size or not. /// </param> /// <param name="isWriteBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize written by the SUT exceeds the max buffer size or not. /// </param> /// <param name="flag2">Whether the Flag2 field of the SMB header is valid or not.</param> void NonExtendedSessionSetupRequest( AccountType account, int messageId, int sessionId, int securitySignature, bool isRequireSign, [Domain("clientCapabilitiesForNonextendedSecurity")] Microsoft.Modeling.Set<Capabilities> capabilities, bool isSendBufferSizeExceedMaxBufferSize, bool isWriteBufferSizeExceedMaxBufferSize, bool flag2); /// <summary> /// Session setup request with SMB_FLAGS2_UNICODE not set in Flags2. /// </summary> /// <param name="account">Indicate the account type to establish the session.</param> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId"> /// Set this value to 0 to request a new session setup, or set this value to a previously established session /// identifier to request re-authenticatean existing session. /// </param> /// <param name="securitySignature"> /// Delegate the security signature used in session setup request header.</param> /// <param name="isRequireSign">Indicate whether the message signing is required.</param> /// <param name="capabilities">A set of client capabilities. </param> /// <param name="isSendBufferSizeExceedMaxBufferSize"> /// Indicate whether the maximum buffer size for sending can exceed the MaxBufferSize field. /// </param> /// <param name="isWriteBufferSizeExceedMaxBufferSize"> /// Indicate whether the maximum buffer size for writing can exceed the MaxBufferSize field. /// </param> /// <param name="flag2"> This value is ignored by the server and it is used for traditional test.</param> void SessionSetupNonUnicodeRequest( AccountType account, int messageId, int sessionId, int securitySignature, bool isRequireSign, [Domain("clientCapabilities")] Set<Capabilities> capabilities, bool isSendBufferSizeExceedMaxBufferSize, bool isWriteBufferSizeExceedMaxBufferSize, bool flag2); /// <summary> /// Close session request /// </summary> /// <param name="sessionId"> The session Id which the session is being closed.</param> void SessionClose(int sessionId); /// <summary> /// Tree connect request. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">Session id. </param> /// <param name="isTidDisconnectionSet">Indicate whether the client sets the tid disconnection.</param> /// <param name="isRequestExtSignature">Indicate whether the client requests extended signature.</param> /// <param name="isRequestExtResponse"> /// Indicate whether the client requests extended information on Tree connection response. /// </param> /// <param name="share">The share method.</param> /// <param name="shareType">The type of resource the client intends to access.</param> /// <param name="isSigned" > /// Indicate whether the SUT has message signing enabled or required. /// </param> void TreeConnectRequest( int messageId, int sessionId, bool isTidDisconnectionSet, bool isRequestExtSignature, bool isRequestExtResponse, string share, ShareType shareType, bool isSigned); /// <summary> /// Tree multiple connect request /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">The current session ID for this connection.</param> /// <param name="isTidDisconnectionSet">Indicate whether the client sets the tid disconnection.</param> /// <param name="isRequestExtSignature">Indicate whether the client requests the extended signature.</param> /// <param name="isRequestExtResponse"> /// Indicate whether the client requests the extended information on Tree connection response. /// </param> /// <param name="share">The share method.</param> /// <param name="shareType">The share type client intends to access.</param> /// <param name="isSigned">Indicate whether the message is signed or not for this request.</param> void TreeMultipleConnectRequest( int messageId, int sessionId, bool isTidDisconnectionSet, bool isRequestExtSignature, bool isRequestExtResponse, string share, ShareType shareType, bool isSigned); /// <summary> /// Create request /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">Set this value to 0 to request a new session setup, or set this value to a /// previously established session identifier to reauthenticate to an existing session. /// </param> /// <param name="treeId"> /// This field identifies the subdirectory (or tree) (also referred as a share in this document) on the /// server that the client is accessing. /// </param> /// <param name="desiredAccess"> /// The client wants to have access to the SUT. This value must be specified in the ACCESS_MASK /// format. /// </param> /// <param name="createDisposition">The action to take if a file does or does not exist.</param> /// <param name="impersonationLevel"> /// This field specifies the information given to the server about the client and how the server MUST /// represent, or impersonate, the client. /// </param> /// <param name="name">File name.</param> /// <param name="shareType">The type of resource the client intends to access.</param> /// <param name="isSigned"> /// Indicate whether the SUT has message signing enabled or required. /// </param> /// <param name="isOpenByFileId"> /// Indicate whether the FILE_OPEN_BY_FILE_ID is set in CreateOptions field of Create Request. /// </param> /// <param name="isDirectoryFile"> /// Indicate whether the FILE_DIRECTORY_FILE and FILE_NON_DIRECTORY_FILE are set. If true,FILE_DIRECTORY_FILE /// is set; else FILE_NON_DIRECTORY_FILE is set. /// </param> /// <param name="isMaximumAllowedSet">Whether the maximum allowed value is set.</param> void CreateRequest( int messageId, int sessionId, int treeId, [Domain("DesiredAccess")] int desiredAccess, CreateDisposition createDisposition, [Domain("ImpersonationLevel")] int impersonationLevel, [Domain("FileDomain")] string name, ShareType shareType, bool isSigned, bool isOpenByFileId, bool isDirectoryFile, bool isMaximumAllowedSet); #endregion #region Added interface /// <summary> /// Read response handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ReadResponseHandle ReadResponse; /// <summary> /// Write response handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event WriteResponseHandle WriteResponse; /// <summary> /// Error write response handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event ErrorWriteResponseHandle ErrorWriteResponse; /// <summary> /// IsRS2299Implemented response handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event IsRs2299ImplementedResponseHandle IsRs2299ImplementedResponse; /// <summary> /// IsRS4984Implemented response handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event IsRs4984ImplementedResponseHandle IsRs4984ImplementedResponse; /// <summary> /// Session setup response additional handle. /// </summary> /// Disable warning CA1009 because according to Test Case design, /// the two parameters, System.Object and System.EventArgs, are unnecessary. [SuppressMessage("Microsoft.Design", "CA1009:DeclareEventHandlersCorrectly")] event SessionSetupResponseAdditionalHandle SessionSetupResponseAdditional; /// <summary> /// IsRS2299Implemented request. /// </summary> void IsRs2299ImplementedRequest(); /// <summary> /// IsRS4984Implemented request. /// </summary> void IsRs4984ImplementedRequest(); /// <summary> /// Read request. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">The session identifier.</param> /// <param name="treeId">The tree identifier.</param> /// <param name="fid">The file identifier.</param> /// <param name="shareType">The type of share.</param> /// <param name="isSigned"> /// Indicate whether the SUT has message signing enabled or required. /// </param> void ReadRequest( int messageId, int sessionId, int treeId, int fid, ShareType shareType, bool isSigned); /// <summary> /// Write request. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">The session identifier.</param> /// <param name="treeId">The tree identifier.</param> /// <param name="fid">The file identifier.</param> /// <param name="shareType">The type of share.</param> /// <param name="isSigned"> /// Indicate whether the SUT has message signing enabled or required. /// </param> /// <param name="synchronize">The synchronize method.</param> void WriteRequest( int messageId, int sessionId, int treeId, int fid, ShareType shareType, bool isSigned, int synchronize); /// <summary> /// Session setup request additional. /// </summary> /// <param name="account">Indicate the account type to establish the session.</param> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">The session identifier.</param> /// <param name="securitySignature"> /// Delegate the security signature used in session setup request header. /// </param> /// <param name="isRequireSign"> /// Indicate whether the server has message signing enabled or required. /// </param> /// <param name="capabilities">A set of client capabilities.</param> /// <param name="isSendBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize sent by the SUT exceeds the max buffer size or not. /// </param> /// <param name="isWriteBufferSizeExceedMaxBufferSize"> /// Indicate whether the bufferSize written by the SUT exceeds the max buffer size or not. /// </param> /// <param name="flag2">Whether the Flag2 field of the SMB header is valid or not.</param> /// <param name="isGssTokenValid">Whether the GSS token in valid or not.</param> /// <param name="isUserIdValid">Whether the user ID is valid or not.</param> void SessionSetupRequestAdditional( AccountType account, int messageId, int sessionId, int securitySignature, bool isRequireSign, [Domain("clientCapabilities")] Set<Capabilities> capabilities, bool isSendBufferSizeExceedMaxBufferSize, bool isWriteBufferSizeExceedMaxBufferSize, bool flag2, bool isGssTokenValid, bool isUserIdValid); /// <summary> /// FSCTL Bad command request. /// </summary> /// <param name="messageId">This is used to associate a response with a request.</param> /// <param name="sessionId">The current session ID for this connection.</param> /// <param name="treeId">The tree ID for the corrent share connection.</param> /// <param name="isSigned">Indicate whether the message is signed or not for this response.</param> /// <param name="fid">The file identifier.</param> /// <param name="command">This is used to tell the adapter to send an invalid kind of command.</param> void FsctlBadCommandRequest( int messageId, int sessionId, int treeId, bool isSigned, int fid, FsctlInvalidCommand command); #endregion } }
//------------------------------------------------------------------------------ // <copyright file="TraceListener.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ /* */ namespace System.Diagnostics { using System; using System.Text; using System.Security.Permissions; using System.Globalization; using System.Runtime.InteropServices; using System.Collections; /// <devdoc> /// <para>Provides the <see langword='abstract '/>base class for the listeners who /// monitor trace and debug output.</para> /// </devdoc> [HostProtection(Synchronization=true)] public abstract class TraceListener : MarshalByRefObject, IDisposable { int indentLevel; int indentSize = 4; TraceOptions traceOptions = TraceOptions.None; bool needIndent = true; string listenerName; TraceFilter filter = null; //////StringDictionary attributes; internal string initializeData; /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.Diagnostics.TraceListener'/> class.</para> /// </devdoc> protected TraceListener () { } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.Diagnostics.TraceListener'/> class using the specified name as the /// listener.</para> /// </devdoc> protected TraceListener(string name) { this.listenerName = name; } //////public StringDictionary Attributes { ////// get { ////// if (attributes == null) ////// attributes = new StringDictionary(); ////// return attributes; ////// } //////} /// <devdoc> /// <para> Gets or sets a name for this <see cref='System.Diagnostics.TraceListener'/>.</para> /// </devdoc> public virtual string Name { get { return (listenerName == null) ? "" : listenerName; } set { listenerName = value; } } public virtual bool IsThreadSafe { get { return false; } } /// <devdoc> /// </devdoc> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <devdoc> /// </devdoc> protected virtual void Dispose(bool disposing) { return; } /// <devdoc> /// <para>When overridden in a derived class, closes the output stream /// so that it no longer receives tracing or debugging output.</para> /// </devdoc> public virtual void Close() { return; } /// <devdoc> /// <para>When overridden in a derived class, flushes the output buffer.</para> /// </devdoc> public virtual void Flush() { return; } /// <devdoc> /// <para>Gets or sets the indent level.</para> /// </devdoc> public int IndentLevel { get { return indentLevel; } set { indentLevel = (value < 0) ? 0 : value; } } /// <devdoc> /// <para>Gets or sets the number of spaces in an indent.</para> /// </devdoc> public int IndentSize { get { return indentSize; } set { if (value < 0) throw new ArgumentOutOfRangeException(/*"IndentSize", value, SR.GetString(SR.TraceListenerIndentSize)*/); indentSize = value; } } [ ComVisible(false) ] public TraceFilter Filter { get { return filter; } set { filter = value; } } /// <devdoc> /// <para>Gets or sets a value indicating whether an indent is needed.</para> /// </devdoc> protected bool NeedIndent { get { return needIndent; } set { needIndent = value; } } [ ComVisible(false) ] public TraceOptions TraceOutputOptions { get { return traceOptions; } set { if (( (int) value >> 6) != 0) { throw new ArgumentOutOfRangeException("value"); } traceOptions = value; } } //////internal void SetAttributes(Hashtable attribs) { ////// TraceUtils.VerifyAttributes(attribs, GetSupportedAttributes(), this); ////// attributes = new StringDictionary(); ////// attributes.ReplaceHashtable(attribs); //////} /// <devdoc> /// <para>Emits or displays a message for an assertion that always fails.</para> /// </devdoc> public virtual void Fail(string message) { Fail(message, null); } /// <devdoc> /// <para>Emits or displays messages for an assertion that always fails.</para> /// </devdoc> public virtual void Fail(string message, string detailMessage) { StringBuilder failMessage = new StringBuilder(); failMessage.Append("TraceListenerFail:"/*SR.GetString(SR.TraceListenerFail)*/); failMessage.Append(" "); failMessage.Append(message); if (detailMessage != null) { failMessage.Append(" "); failMessage.Append(detailMessage); } WriteLine(failMessage.ToString()); } virtual protected internal string[] GetSupportedAttributes() { return null; } /// <devdoc> /// <para>When overridden in a derived class, writes the specified /// message to the listener you specify in the derived class.</para> /// </devdoc> public abstract void Write(string message); /// <devdoc> /// <para>Writes the name of the <paramref name="o"/> parameter to the listener you specify when you inherit from the <see cref='System.Diagnostics.TraceListener'/> /// class.</para> /// </devdoc> public virtual void Write(object o) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, null, null, o)) return; if (o == null) return; Write(o.ToString()); } /// <devdoc> /// <para>Writes a category name and a message to the listener you specify when you /// inherit from the <see cref='System.Diagnostics.TraceListener'/> /// class.</para> /// </devdoc> public virtual void Write(string message, string category) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, message)) return; if (category == null) Write(message); else Write(category + ": " + ((message == null) ? string.Empty : message)); } /// <devdoc> /// <para>Writes a category name and the name of the <paramref name="o"/> parameter to the listener you /// specify when you inherit from the <see cref='System.Diagnostics.TraceListener'/> /// class.</para> /// </devdoc> public virtual void Write(object o, string category) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, category, null, o)) return; if (category == null) Write(o); else Write(o == null ? "" : o.ToString(), category); } /// <devdoc> /// <para>Writes the indent to the listener you specify when you /// inherit from the <see cref='System.Diagnostics.TraceListener'/> /// class, and resets the <see cref='TraceListener.NeedIndent'/> property to <see langword='false'/>.</para> /// </devdoc> protected virtual void WriteIndent() { NeedIndent = false; for (int i = 0; i < indentLevel; i++) { if (indentSize == 4) Write(" "); else { for (int j = 0; j < indentSize; j++) { Write(" "); } } } } /// <devdoc> /// <para>When overridden in a derived class, writes a message to the listener you specify in /// the derived class, followed by a line terminator. The default line terminator is a carriage return followed /// by a line feed (\r\n).</para> /// </devdoc> public abstract void WriteLine(string message); /// <devdoc> /// <para>Writes the name of the <paramref name="o"/> parameter to the listener you specify when you inherit from the <see cref='System.Diagnostics.TraceListener'/> class, followed by a line terminator. The default line terminator is a /// carriage return followed by a line feed /// (\r\n).</para> /// </devdoc> public virtual void WriteLine(object o) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, null, null, o)) return; WriteLine(o == null ? "" : o.ToString()); } /// <devdoc> /// <para>Writes a category name and a message to the listener you specify when you /// inherit from the <see cref='System.Diagnostics.TraceListener'/> class, /// followed by a line terminator. The default line terminator is a carriage return followed by a line feed (\r\n).</para> /// </devdoc> public virtual void WriteLine(string message, string category) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, message)) return; if (category == null) WriteLine(message); else WriteLine(category + ": " + ((message == null) ? string.Empty : message)); } /// <devdoc> /// <para>Writes a category /// name and the name of the <paramref name="o"/>parameter to the listener you /// specify when you inherit from the <see cref='System.Diagnostics.TraceListener'/> /// class, followed by a line terminator. The default line terminator is a carriage /// return followed by a line feed (\r\n).</para> /// </devdoc> public virtual void WriteLine(object o, string category) { if (Filter != null && !Filter.ShouldTrace(null, "", TraceEventType.Verbose, 0, category, null, o)) return; WriteLine(o == null ? "" : o.ToString(), category); } // new write methods used by TraceSource [ ComVisible(false) ] public virtual void TraceData(TraceEventCache eventCache, String source, TraceEventType eventType, int id, object data) { if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, null, null, data)) return; WriteHeader(source, eventType, id); string datastring = String.Empty; if (data != null) datastring = data.ToString(); WriteLine(datastring); WriteFooter(eventCache); } [ ComVisible(false) ] public virtual void TraceData(TraceEventCache eventCache, String source, TraceEventType eventType, int id, params object[] data) { if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, null, null, null, data)) return; WriteHeader(source, eventType, id); StringBuilder sb = new StringBuilder(); if (data != null) { for (int i=0; i< data.Length; i++) { if (i != 0) sb.Append(", "); if (data[i] != null) sb.Append(data[i].ToString()); } } WriteLine(sb.ToString()); WriteFooter(eventCache); } [ ComVisible(false) ] public virtual void TraceEvent(TraceEventCache eventCache, String source, TraceEventType eventType, int id) { TraceEvent(eventCache, source, eventType, id, String.Empty); } // All other TraceEvent methods come through this one. [ ComVisible(false) ] public virtual void TraceEvent(TraceEventCache eventCache, String source, TraceEventType eventType, int id, string message) { if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, message)) return; WriteHeader(source, eventType, id); WriteLine(message); WriteFooter(eventCache); } [ ComVisible(false) ] public virtual void TraceEvent(TraceEventCache eventCache, String source, TraceEventType eventType, int id, string format, params object[] args) { if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, format, args)) return; WriteHeader(source, eventType, id); if (args != null) WriteLine(String.Format(CultureInfo.InvariantCulture, format, args)); else WriteLine(format); WriteFooter(eventCache); } [ ComVisible(false) ] public virtual void TraceTransfer(TraceEventCache eventCache, String source, int id, string message, Guid relatedActivityId) { TraceEvent(eventCache, source, TraceEventType.Transfer, id, message + ", relatedActivityId=" + relatedActivityId.ToString()); } private void WriteHeader(String source, TraceEventType eventType, int id) { Write(String.Format(CultureInfo.InvariantCulture, "{0} {1}: {2} : ", source, eventType.ToString(), id.ToString(CultureInfo.InvariantCulture))); } //////[ResourceExposure(ResourceScope.None)] //////[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] private void WriteFooter(TraceEventCache eventCache) { if (eventCache == null) return; indentLevel++; if (IsEnabled(TraceOptions.ProcessId)) WriteLine("ProcessId=" + eventCache.ProcessId); if (IsEnabled(TraceOptions.LogicalOperationStack)) { Write("LogicalOperationStack="); Stack operationStack = eventCache.LogicalOperationStack; bool first = true; foreach (Object obj in operationStack) { if (!first) Write(", "); else first = false; Write(obj.ToString()); } WriteLine(String.Empty); } if (IsEnabled(TraceOptions.ThreadId)) WriteLine("ThreadId=" + eventCache.ThreadId); if (IsEnabled(TraceOptions.DateTime)) WriteLine("DateTime=" + eventCache.DateTime.ToString("o"/*, CultureInfo.InvariantCulture*/)); if (IsEnabled(TraceOptions.Timestamp)) WriteLine("Timestamp=" + eventCache.Timestamp); //////if (IsEnabled(TraceOptions.Callstack)) ////// WriteLine("Callstack=" + eventCache.Callstack); indentLevel--; } internal bool IsEnabled(TraceOptions opts) { return (opts & TraceOutputOptions) != 0; } } }