context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// dnlib: See LICENSE.txt for more info using System; using dnlib.PE; #if THREAD_SAFE using ThreadSafe = dnlib.Threading.Collections; #else using ThreadSafe = System.Collections.Generic; #endif namespace dnlib.DotNet.MD { /// <summary> /// Interface to access the .NET metadata /// </summary> public interface IMetaData : IDisposable { /// <summary> /// <c>true</c> if the compressed (normal) metadata is used, <c>false</c> if the non-compressed /// (Edit N' Continue) metadata is used. This can be <c>false</c> even if the table stream /// is <c>#~</c> but that's very uncommon. /// </summary> bool IsCompressed { get; } /// <summary> /// Gets the .NET header /// </summary> ImageCor20Header ImageCor20Header { get; } /// <summary> /// Gets the major version number found in the MetaData header /// </summary> ushort MajorVersion { get; } /// <summary> /// Gets the minor version number found in the MetaData header /// </summary> ushort MinorVersion { get; } /// <summary> /// Gets the version string found in the MetaData header /// </summary> string VersionString { get; } /// <summary> /// Gets the <see cref="IPEImage"/> /// </summary> IPEImage PEImage { get; } /// <summary> /// Gets the metadata header /// </summary> MetaDataHeader MetaDataHeader { get; } /// <summary> /// Returns the #Strings stream or a default empty one if it's not present /// </summary> StringsStream StringsStream { get; } /// <summary> /// Returns the #US stream or a default empty one if it's not present /// </summary> USStream USStream { get; } /// <summary> /// Returns the #Blob stream or a default empty one if it's not present /// </summary> BlobStream BlobStream { get; } /// <summary> /// Returns the #GUID stream or a default empty one if it's not present /// </summary> GuidStream GuidStream { get; } /// <summary> /// Returns the #~ or #- tables stream /// </summary> TablesStream TablesStream { get; } /// <summary> /// Gets all streams /// </summary> ThreadSafe.IList<DotNetStream> AllStreams { get; } /// <summary> /// Gets a list of all the valid <c>TypeDef</c> rids. It's usually every rid in the /// <c>TypeDef</c> table, but could be less if a type has been deleted. /// </summary> RidList GetTypeDefRidList(); /// <summary> /// Gets a list of all the valid <c>ExportedType</c> rids. It's usually every rid in the /// <c>ExportedType</c> table, but could be less if a type has been deleted. /// </summary> RidList GetExportedTypeRidList(); /// <summary> /// Gets the <c>Field</c> rid list /// </summary> /// <param name="typeDefRid"><c>TypeDef</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetFieldRidList(uint typeDefRid); /// <summary> /// Gets the <c>Method</c> rid list /// </summary> /// <param name="typeDefRid"><c>TypeDef</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetMethodRidList(uint typeDefRid); /// <summary> /// Gets the <c>Param</c> rid list /// </summary> /// <param name="methodRid"><c>Method</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetParamRidList(uint methodRid); /// <summary> /// Gets the <c>Event</c> rid list /// </summary> /// <param name="eventMapRid"><c>EventMap</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetEventRidList(uint eventMapRid); /// <summary> /// Gets the <c>Property</c> rid list /// </summary> /// <param name="propertyMapRid"><c>PropertyMap</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetPropertyRidList(uint propertyMapRid); /// <summary> /// Finds all <c>InterfaceImpl</c> rids owned by <paramref name="typeDefRid"/> /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>InterfaceImpl</c> rids</returns> RidList GetInterfaceImplRidList(uint typeDefRid); /// <summary> /// Finds all <c>GenericParam</c> rids owned by <paramref name="rid"/> in table <paramref name="table"/> /// </summary> /// <param name="table">A <c>TypeOrMethodDef</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>GenericParam</c> rids</returns> RidList GetGenericParamRidList(Table table, uint rid); /// <summary> /// Finds all <c>GenericParamConstraint</c> rids owned by <paramref name="genericParamRid"/> /// </summary> /// <param name="genericParamRid">Owner <c>GenericParam</c> rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>GenericParamConstraint</c> rids</returns> RidList GetGenericParamConstraintRidList(uint genericParamRid); /// <summary> /// Finds all <c>CustomAttribute</c> rids owned by <paramref name="rid"/> in table <paramref name="table"/> /// </summary> /// <param name="table">A <c>HasCustomAttribute</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>CustomAttribute</c> rids</returns> RidList GetCustomAttributeRidList(Table table, uint rid); /// <summary> /// Finds all <c>DeclSecurity</c> rids owned by <paramref name="rid"/> in table <paramref name="table"/> /// </summary> /// <param name="table">A <c>HasDeclSecurity</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>DeclSecurity</c> rids</returns> RidList GetDeclSecurityRidList(Table table, uint rid); /// <summary> /// Finds all <c>MethodSemantics</c> rids owned by <paramref name="rid"/> in table <paramref name="table"/> /// </summary> /// <param name="table">A <c>HasSemantic</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>MethodSemantics</c> rids</returns> RidList GetMethodSemanticsRidList(Table table, uint rid); /// <summary> /// Finds all <c>MethodImpl</c> rids owned by <paramref name="typeDefRid"/> /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>A <see cref="RidList"/> instance containing the valid <c>MethodImpl</c> rids</returns> RidList GetMethodImplRidList(uint typeDefRid); /// <summary> /// Finds a <c>ClassLayout</c> rid /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>The <c>ClassLayout</c> rid or 0 if <paramref name="typeDefRid"/> is invalid /// or if it has no row in the <c>ClassLayout</c> table.</returns> uint GetClassLayoutRid(uint typeDefRid); /// <summary> /// Finds a <c>FieldLayout</c> rid /// </summary> /// <param name="fieldRid">Owner <c>Field</c> rid</param> /// <returns>The <c>FieldLayout</c> rid or 0 if <paramref name="fieldRid"/> is invalid /// or if it has no row in the <c>FieldLayout</c> table.</returns> uint GetFieldLayoutRid(uint fieldRid); /// <summary> /// Finds a <c>FieldMarshal</c> rid /// </summary> /// <param name="table">A <c>HasFieldMarshal</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>The <c>FieldMarshal</c> rid or 0 if <paramref name="rid"/> is invalid /// or if it has no row in the <c>FieldMarshal</c> table.</returns> uint GetFieldMarshalRid(Table table, uint rid); /// <summary> /// Finds a <c>FieldRVA</c> rid /// </summary> /// <param name="fieldRid">Owner <c>Field</c> rid</param> /// <returns>The <c>FieldRVA</c> rid or 0 if <paramref name="fieldRid"/> is invalid /// or if it has no row in the <c>FieldRVA</c> table.</returns> uint GetFieldRVARid(uint fieldRid); /// <summary> /// Finds an <c>ImplMap</c> rid /// </summary> /// <param name="table">A <c>MemberForwarded</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>The <c>ImplMap</c> rid or 0 if <paramref name="rid"/> is invalid /// or if it has no row in the <c>ImplMap</c> table.</returns> uint GetImplMapRid(Table table, uint rid); /// <summary> /// Finds a <c>NestedClass</c> rid /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>The <c>NestedClass</c> rid or 0 if <paramref name="typeDefRid"/> is invalid /// or if it has no row in the <c>NestedClass</c> table.</returns> uint GetNestedClassRid(uint typeDefRid); /// <summary> /// Finds an <c>EventMap</c> rid /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>The <c>EventMap</c> rid or 0 if <paramref name="typeDefRid"/> is invalid /// or if it has no row in the <c>EventMap</c> table.</returns> uint GetEventMapRid(uint typeDefRid); /// <summary> /// Finds a <c>PropertyMap</c> rid /// </summary> /// <param name="typeDefRid">Owner <c>TypeDef</c> rid</param> /// <returns>The <c>PropertyMap</c> rid or 0 if <paramref name="typeDefRid"/> is invalid /// or if it has no row in the <c>PropertyMap</c> table.</returns> uint GetPropertyMapRid(uint typeDefRid); /// <summary> /// Finds a <c>Constant</c> rid /// </summary> /// <param name="table">A <c>HasConstant</c> table</param> /// <param name="rid">Owner rid</param> /// <returns>The <c>Constant</c> rid or 0 if <paramref name="rid"/> is invalid /// or if it has no row in the <c>Constant</c> table.</returns> uint GetConstantRid(Table table, uint rid); /// <summary> /// Returns the owner <c>TypeDef</c> rid /// </summary> /// <param name="fieldRid">A <c>Field</c> rid</param> /// <returns>The owner <c>TypeDef</c> rid or 0 if <paramref name="fieldRid"/> is invalid /// or if it has no owner.</returns> uint GetOwnerTypeOfField(uint fieldRid); /// <summary> /// Returns the owner <c>TypeDef</c> rid /// </summary> /// <param name="methodRid">A <c>Method</c> rid</param> /// <returns>The owner <c>TypeDef</c> rid or 0 if <paramref name="methodRid"/> is invalid /// or if it has no owner.</returns> uint GetOwnerTypeOfMethod(uint methodRid); /// <summary> /// Returns the owner <c>TypeDef</c> rid /// </summary> /// <param name="eventRid">A <c>Event</c> rid</param> /// <returns>The owner <c>TypeDef</c> rid or 0 if <paramref name="eventRid"/> is invalid /// or if it has no owner.</returns> uint GetOwnerTypeOfEvent(uint eventRid); /// <summary> /// Returns the owner <c>TypeDef</c> rid /// </summary> /// <param name="propertyRid">A <c>Property</c> rid</param> /// <returns>The owner <c>TypeDef</c> rid or 0 if <paramref name="propertyRid"/> is invalid /// or if it has no owner.</returns> uint GetOwnerTypeOfProperty(uint propertyRid); /// <summary> /// Returns the owner <c>TypeOrMethodDef</c> rid /// </summary> /// <param name="gpRid">A <c>GenericParam</c> rid</param> /// <returns>The owner <c>TypeOrMethodDef</c> rid or 0 if <paramref name="gpRid"/> is /// invalid or if it has no owner.</returns> uint GetOwnerOfGenericParam(uint gpRid); /// <summary> /// Returns the owner <c>GenericParam</c> rid /// </summary> /// <param name="gpcRid">A <c>GenericParamConstraint</c> rid</param> /// <returns>The owner <c>GenericParam</c> rid or 0 if <paramref name="gpcRid"/> is /// invalid or if it has no owner.</returns> uint GetOwnerOfGenericParamConstraint(uint gpcRid); /// <summary> /// Returns the owner <c>Method</c> rid /// </summary> /// <param name="paramRid">A <c>Param</c> rid</param> /// <returns>The owner <c>Method</c> rid or 0 if <paramref name="paramRid"/> is invalid /// or if it has no owner.</returns> uint GetOwnerOfParam(uint paramRid); /// <summary> /// Gets a list of all nested classes owned by <paramref name="typeDefRid"/> /// </summary> /// <param name="typeDefRid">A <c>TypeDef</c> rid</param> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetNestedClassRidList(uint typeDefRid); /// <summary> /// Gets a list of all non-nested classes. A type is a non-nested type if /// <see cref="GetNestedClassRidList(uint)"/> returns an empty list. /// </summary> /// <returns>A new <see cref="RidList"/> instance</returns> RidList GetNonNestedClassRidList(); } }
using System; using Assets.Common.StandardAssets.CrossPlatformInput.Scripts.PlatformSpecific; using UnityEngine; namespace Assets.Common.StandardAssets.CrossPlatformInput.Scripts { [AddComponentMenu("Scripts/Standard Assets/CrossPlatformInput/CrossPlatformInputManager")] public static class CrossPlatformInputManager { public enum ActiveInputMethod { Hardware, Touch } private static VirtualInput _activeInput; private static readonly VirtualInput STouchInput; private static readonly VirtualInput SHardwareInput; static CrossPlatformInputManager() { STouchInput = new MobileInput(); SHardwareInput = new StandaloneInput(); #if MOBILE_INPUT activeInput = s_TouchInput; #else _activeInput = SHardwareInput; #endif } public static void SwitchActiveInputMethod(ActiveInputMethod activeInputMethod) { switch (activeInputMethod) { case ActiveInputMethod.Hardware: _activeInput = SHardwareInput; break; case ActiveInputMethod.Touch: _activeInput = STouchInput; break; default: throw new ArgumentOutOfRangeException("activeInputMethod", activeInputMethod, null); } } public static bool AxisExists(string name) { return _activeInput.AxisExists(name); } public static bool ButtonExists(string name) { return _activeInput.ButtonExists(name); } public static void RegisterVirtualAxis(VirtualAxis axis) { _activeInput.RegisterVirtualAxis(axis); } public static void RegisterVirtualButton(VirtualButton button) { _activeInput.RegisterVirtualButton(button); } public static void UnRegisterVirtualAxis(string name) { if (name == null) { throw new ArgumentNullException("name"); } _activeInput.UnRegisterVirtualAxis(name); } public static void UnRegisterVirtualButton(string name) { _activeInput.UnRegisterVirtualButton(name); } // returns a reference to a named virtual axis if it exists otherwise null public static VirtualAxis VirtualAxisReference(string name) { return _activeInput.VirtualAxisReference(name); } // returns the platform appropriate axis for the given name public static float GetAxis(string name) { return GetAxis(name, false); } public static float GetAxisRaw(string name) { return GetAxis(name, true); } // private function handles both types of axis (raw and not raw) private static float GetAxis(string name, bool raw) { return _activeInput.GetAxis(name, raw); } // -- Button handling -- public static bool GetButton(string name) { return _activeInput.GetButton(name); } public static bool GetButtonDown(string name) { return _activeInput.GetButtonDown(name); } public static bool GetButtonUp(string name) { return _activeInput.GetButtonUp(name); } public static void SetButtonDown(string name) { _activeInput.SetButtonDown(name); } public static void SetButtonUp(string name) { _activeInput.SetButtonUp(name); } public static void SetAxisPositive(string name) { _activeInput.SetAxisPositive(name); } public static void SetAxisNegative(string name) { _activeInput.SetAxisNegative(name); } public static void SetAxisZero(string name) { _activeInput.SetAxisZero(name); } public static void SetAxis(string name, float value) { _activeInput.SetAxis(name, value); } public static Vector3 MousePosition { get { return _activeInput.MousePosition(); } } public static void SetVirtualMousePositionX(float f) { _activeInput.SetVirtualMousePositionX(f); } public static void SetVirtualMousePositionY(float f) { _activeInput.SetVirtualMousePositionY(f); } public static void SetVirtualMousePositionZ(float f) { _activeInput.SetVirtualMousePositionZ(f); } // virtual axis and button classes - applies to mobile input // Can be mapped to touch joysticks, tilt, gyro, etc, depending on desired implementation. // Could also be implemented by other input devices - kinect, electronic sensors, etc public class VirtualAxis { public string Name { get; private set; } private float _mValue; public bool MatchWithInputManager { get; private set; } public VirtualAxis(string name) : this(name, true) { } public VirtualAxis(string name, bool matchToInputSettings) { this.Name = name; MatchWithInputManager = matchToInputSettings; } // removes an axes from the cross platform input system public void Remove() { UnRegisterVirtualAxis(Name); } // a controller gameobject (eg. a virtual thumbstick) should update this class public void Update(float value) { _mValue = value; } public float GetValue { get { return _mValue; } } public float GetValueRaw { get { return _mValue; } } } // a controller gameobject (eg. a virtual GUI button) should call the // 'pressed' function of this class. Other objects can then read the // Get/Down/Up state of this button. public class VirtualButton { public string Name { get; private set; } public bool MatchWithInputManager { get; private set; } private int _mLastPressedFrame = -5; private int _mReleasedFrame = -5; private bool _mPressed; public VirtualButton(string name) : this(name, true) { } public VirtualButton(string name, bool matchToInputSettings) { this.Name = name; MatchWithInputManager = matchToInputSettings; } // A controller gameobject should call this function when the button is pressed down public void Pressed() { if (_mPressed) { return; } _mPressed = true; _mLastPressedFrame = Time.frameCount; } // A controller gameobject should call this function when the button is released public void Released() { _mPressed = false; _mReleasedFrame = Time.frameCount; } // the controller gameobject should call Remove when the button is destroyed or disabled public void Remove() { UnRegisterVirtualButton(Name); } // these are the states of the button which can be read via the cross platform input system public bool GetButton { get { return _mPressed; } } public bool GetButtonDown { get { return _mLastPressedFrame - Time.frameCount == -1; } } public bool GetButtonUp { get { return _mReleasedFrame == Time.frameCount - 1; } } } } }
using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Orleans; using Orleans.Runtime; using Orleans.Runtime.Scheduler; using UnitTests.GrainInterfaces; using UnitTests.Grains; namespace UnitTestGrains { public class TimerGrain : Grain, ITimerGrain { private bool deactivating; int counter = 0; Dictionary<string, IDisposable> allTimers; IDisposable defaultTimer; private static readonly TimeSpan period = TimeSpan.FromMilliseconds(100); string DefaultTimerName = "DEFAULT TIMER"; IGrainContext context; private ILogger logger; public TimerGrain(ILoggerFactory loggerFactory) { this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}"); } public override Task OnActivateAsync() { ThrowIfDeactivating(); context = RuntimeContext.CurrentGrainContext; defaultTimer = this.RegisterTimer(Tick, DefaultTimerName, period, period); allTimers = new Dictionary<string, IDisposable>(); return Task.CompletedTask; } public Task StopDefaultTimer() { ThrowIfDeactivating(); defaultTimer.Dispose(); return Task.CompletedTask; } private Task Tick(object data) { counter++; logger.Info(data.ToString() + " Tick # " + counter + " RuntimeContext = " + RuntimeContext.Current?.ToString()); // make sure we run in the right activation context. if(!Equals(context, RuntimeContext.CurrentGrainContext)) logger.Error((int)ErrorCode.Runtime_Error_100146, "grain not running in the right activation context"); string name = (string)data; IDisposable timer; if (name == DefaultTimerName) { timer = defaultTimer; } else { timer = allTimers[(string)data]; } if(timer == null) logger.Error((int)ErrorCode.Runtime_Error_100146, "Timer is null"); if (timer != null && counter > 10000) { // do not let orphan timers ticking for long periods timer.Dispose(); } return Task.CompletedTask; } public Task<TimeSpan> GetTimerPeriod() { return Task.FromResult(period); } public Task<int> GetCounter() { ThrowIfDeactivating(); return Task.FromResult(counter); } public Task SetCounter(int value) { ThrowIfDeactivating(); lock (this) { counter = value; } return Task.CompletedTask; } public Task StartTimer(string timerName) { ThrowIfDeactivating(); IDisposable timer = this.RegisterTimer(Tick, timerName, TimeSpan.Zero, period); allTimers.Add(timerName, timer); return Task.CompletedTask; } public Task StopTimer(string timerName) { ThrowIfDeactivating(); IDisposable timer = allTimers[timerName]; timer.Dispose(); return Task.CompletedTask; } public Task LongWait(TimeSpan time) { ThrowIfDeactivating(); Thread.Sleep(time); return Task.CompletedTask; } public Task Deactivate() { deactivating = true; DeactivateOnIdle(); return Task.CompletedTask; } private void ThrowIfDeactivating() { if (deactivating) throw new InvalidOperationException("This activation is deactivating"); } } public class TimerCallGrain : Grain, ITimerCallGrain { private int tickCount; private Exception tickException; private IDisposable timer; private string timerName; private IGrainContext context; private TaskScheduler activationTaskScheduler; private ILogger logger; public TimerCallGrain(ILoggerFactory loggerFactory) { this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}"); } public Task<int> GetTickCount() { return Task.FromResult(tickCount); } public Task<Exception> GetException() { return Task.FromResult(tickException); } public override Task OnActivateAsync() { context = RuntimeContext.CurrentGrainContext; activationTaskScheduler = TaskScheduler.Current; return Task.CompletedTask; } public Task StartTimer(string name, TimeSpan delay) { logger.Info("StartTimer Name={0} Delay={1}", name, delay); this.timerName = name; this.timer = base.RegisterTimer(TimerTick, name, delay, Constants.INFINITE_TIMESPAN); // One shot timer return Task.CompletedTask; } public Task StopTimer(string name) { logger.Info("StopTimer Name={0}", name); if (name != this.timerName) { throw new ArgumentException(string.Format("Wrong timer name: Expected={0} Actual={1}", this.timerName, name)); } timer.Dispose(); return Task.CompletedTask; } private async Task TimerTick(object data) { try { await ProcessTimerTick(data); } catch (Exception exc) { this.tickException = exc; throw; } } private async Task ProcessTimerTick(object data) { string step = "TimerTick"; LogStatus(step); // make sure we run in the right activation context. CheckRuntimeContext(step); string name = (string)data; if (name != this.timerName) { throw new ArgumentException(string.Format("Wrong timer name: Expected={0} Actual={1}", this.timerName, name)); } ISimpleGrain grain = GrainFactory.GetGrain<ISimpleGrain>(0, SimpleGrain.SimpleGrainNamePrefix); LogStatus("Before grain call #1"); await grain.SetA(tickCount); step = "After grain call #1"; LogStatus(step); CheckRuntimeContext(step); LogStatus("Before Delay"); await Task.Delay(TimeSpan.FromSeconds(1)); step = "After Delay"; LogStatus(step); CheckRuntimeContext(step); LogStatus("Before grain call #2"); await grain.SetB(tickCount); step = "After grain call #2"; LogStatus(step); CheckRuntimeContext(step); LogStatus("Before grain call #3"); int res = await grain.GetAxB(); step = "After grain call #3 - Result = " + res; LogStatus(step); CheckRuntimeContext(step); tickCount++; } private void CheckRuntimeContext(string what) { if (RuntimeContext.CurrentGrainContext == null || !RuntimeContext.CurrentGrainContext.Equals(context)) { throw new InvalidOperationException( string.Format("{0} in timer callback with unexpected activation context: Expected={1} Actual={2}", what, context, RuntimeContext.CurrentGrainContext)); } if (TaskScheduler.Current.Equals(activationTaskScheduler) && TaskScheduler.Current is ActivationTaskScheduler) { // Everything is as expected } else { throw new InvalidOperationException( string.Format("{0} in timer callback with unexpected TaskScheduler.Current context: Expected={1} Actual={2}", what, activationTaskScheduler, TaskScheduler.Current)); } } private void LogStatus(string what) { logger.Info("{0} Tick # {1} - {2} - RuntimeContext.Current={3} TaskScheduler.Current={4} CurrentWorkerThread={5}", timerName, tickCount, what, RuntimeContext.Current, TaskScheduler.Current, Thread.CurrentThread.Name); } } public class TimerRequestGrain : Grain, ITimerRequestGrain { private TaskCompletionSource<int> completionSource; public Task<string> GetRuntimeInstanceId() { return Task.FromResult(this.RuntimeIdentity); } public async Task StartAndWaitTimerTick(TimeSpan dueTime) { this.completionSource = new TaskCompletionSource<int>(); var timer = this.RegisterTimer(TimerTick, null, dueTime, TimeSpan.FromMilliseconds(-1)); await this.completionSource.Task; } public Task StartStuckTimer(TimeSpan dueTime) { this.completionSource = new TaskCompletionSource<int>(); var timer = this.RegisterTimer(StuckTimerTick, null, dueTime, TimeSpan.FromSeconds(1)); return Task.CompletedTask; } private Task TimerTick(object state) { this.completionSource.SetResult(1); return Task.CompletedTask; } private async Task StuckTimerTick(object state) { await completionSource.Task; } } }
using UnityEngine; using SimpleJSON; using System.IO; using System.Collections; using System; public class TileSystem : MonoBehaviour { public int mWidth = 0; public int mHeight = 0; public GameObject PassableTile; public GameObject WallTile; public GameObject DoorTile; public GameObject HumanDoorTile; public GameObject EmptyTile; public GameObject BlockOnlyTile; public GameObject PressureTile; public GameObject OmenDoor; public GameObject PushUpTile; public GameObject PushLeftTile; public GameObject PushRightTile; public GameObject PushDownTile; public GameObject SimpleBlock; public GameObject RollingBlock; public GameObject AttachableBlock; public GameObject CommandBlock; public SharedDataScript sharedDdataObject; public Vector3 mDisplayOffset = new Vector3(0.0f, 0.0f, 0.0f); public int mPlayerStartX; public int mPlayerStartY; public Tile[][] mNavGrid; private ArrayList mPlaceableUpdates = new ArrayList(); private ArrayList mTileGameObjs, mPlaceableGameObjs; private string[] mLevels; public int mLevelIndex = -1; private float speedFactor = 1f; public TileSystem () { mTileGameObjs = new ArrayList (); mPlaceableGameObjs = new ArrayList (); } public Tile GetTile(int x, int y) { if (x >= mWidth || x < 0 || y >= mHeight || y < 0) { return null; } return mNavGrid[x][y]; } public bool CanMove(ITilePlaceable obj, int dirX, int dirY) { Tile dest = GetTile(obj.GetX() + dirX, obj.GetY() + dirY); if (dest != null && dest.AllowIncomingMove(obj, dirX, dirY)) { return true; } return false; } public void TryMove(ITilePlaceable obj, int dirX, int dirY) { Tile dest = GetTile(obj.GetX() + dirX, obj.GetY() + dirY); if(dest != null) { dest.TryIncomingMove (obj, dirX, dirY); } } private void LoadLevels () { TextAsset levelFile = Resources.Load<TextAsset>("Levels/levels"); JSONNode jsonObj = JSON.Parse(levelFile.text); int levelCount = jsonObj["data"].AsArray.Count; if (levelCount > 0) { mLevelIndex = (mLevelIndex < 0 ? 0 : mLevelIndex); mLevels = new string[levelCount]; for (int i = 0; i < levelCount; i++) { mLevels[i] = jsonObj["data"][i]; Debug.Log (mLevels[i].ToString ()); } } } public void LoadMap(string lvlNum) { if(lvlNum == "8") { // First level is guaranteed cat if(sharedDdataObject.trivialEndings > 1) { lvlNum = "8cat"; } else { lvlNum = "8human"; } } Debug.Log(String.Format("LoadMap: {0}, Trivial Endings: {1}", lvlNum, sharedDdataObject.trivialEndings)); TextAsset levelFile = Resources.Load<TextAsset>("Levels/" + lvlNum); JSONNode jsonObj = JSON.Parse(levelFile.text); mWidth = jsonObj["data"].AsArray.Count; mHeight = jsonObj["data"][0].AsArray.Count; mDisplayOffset.x = (float)(-mWidth) / 2; mDisplayOffset.y = (float)(-mHeight) / 2; mNavGrid = new Tile[mWidth][]; for (int x = 0; x < mWidth; ++x) { mNavGrid[x] = new Tile[mHeight]; for (int y = 0; y < mHeight; ++y) { int tileCode = jsonObj["data"][x][y].AsInt; if(tileCode < 0) { mNavGrid[x][y] = new Tile(this, Tile.TerrainType.kPass, x, y, 0); LoadSpecializedItem(mNavGrid[x][y], tileCode); } else { int id = 0; if(tileCode >= 40) { id = tileCode % 10; tileCode -= id; } mNavGrid[x][y] = new Tile(this, (Tile.TerrainType)tileCode, x, y, id); } } } int numExtra = jsonObj["extraSpawns"].AsArray.Count; for(int extra = 0; extra < numExtra; ++extra) { JSONNode node = jsonObj["extraSpawns"][extra]; int x = node["x"].AsInt; int y = node["y"].AsInt; LoadSpecializedItem(mNavGrid[x][y], node["id"].AsInt); } } public void GenerateTileMap() { // Instantiate the tile types onto the game world, or placing them - KTZ for (int x = 0; x < mWidth; ++x) { for (int y = 0; y < mHeight; ++y) { Vector3 tilePos = new Vector3(y, x, 0.0f); tilePos += mDisplayOffset; Quaternion tileRot = Quaternion.identity; tilePos += mNavGrid[x][y].mDisplayOffsets; tilePos.y *= -1; tilePos.y--; // randomize the position of the tiles first Vector3 startFrom = GetRandomVector3 (15f, 30f); GameObject newTile = (GameObject)Instantiate(mNavGrid[x][y].mTileBaseObject, startFrom, tileRot); newTile.GetComponent<SlideBlock> ().SetStartPosition (tilePos, startFrom.magnitude / 1.2f * speedFactor); newTile.transform.parent = gameObject.transform; mTileGameObjs.Add (newTile); mNavGrid[x][y].SetTileGameObject(newTile); } } } public void PostGenerateTileMap () { for (int x = 0; x < mWidth; ++x) { for (int y = 0; y < mHeight; ++y) { ArrayList placeables = (ArrayList)mNavGrid[x][y].mPlaceables.Clone(); foreach(System.Object block in placeables) { Block tryBlock = block as Block; if (tryBlock != null) { GameObject blockObj = (GameObject)Instantiate(tryBlock.mBlockBaseObject, Vector3.zero, Quaternion.identity); tryBlock.SetBlockGameObject(blockObj); mPlaceableGameObjs.Add (blockObj); Debug.Log ("block generated"); } } } } } public void SetSpeedFactor (float speedFactor) { this.speedFactor = speedFactor; } private void LoadSpecializedItem(Tile tile, int tileCode) { if(tileCode == -9) { // Player position mPlayerStartX = tile.mX; mPlayerStartY = tile.mY; } else { new Block((Block.BlockType)(tileCode * -1), tile); } } const float kDelays = 0.15f; private class PlaceableUpdate { public ITilePlaceable placeable; public int dirX; public int dirY; public float delayUntilNextMove; } public void RemoveFromUpdateList(ITilePlaceable placeable) { if (placeable.GetProperties().inUpdateSequenceFor > 0) { ArrayList removeUpdates = new ArrayList(); foreach (PlaceableUpdate rUpdate in mPlaceableUpdates) { if (rUpdate.placeable == placeable) { removeUpdates.Add(rUpdate); } } foreach (PlaceableUpdate rUpdate in removeUpdates) { mPlaceableUpdates.Remove(rUpdate); } } } public void Delete(GameObject obj) { Destroy(obj); } public void AddToUpdateList(ITilePlaceable placeable, int dirX, int dirY, int duration) { // Don't duplicate RemoveFromUpdateList(placeable); PlaceableUpdate update = new PlaceableUpdate(); update.placeable = placeable; update.dirX = dirX; update.dirY = dirY; update.delayUntilNextMove = kDelays; update.placeable.GetProperties().inUpdateSequenceFor = duration; mPlaceableUpdates.Add(update); } // Use this for initialization void Start () { LoadLevels (); // mLevelIndex = (sharedDdataObject.levelIndex > mLevelIndex? sharedDdataObject.levelIndex : mLevelIndex); mLevelIndex = sharedDdataObject.levelIndex; Debug.Log (sharedDdataObject.levelIndex); LoadCurrentLevel (); } public void LoadCurrentLevel () { mPlaceableUpdates.Clear(); foreach (GameObject placeable in mPlaceableGameObjs) { Delete (placeable); } mPlaceableGameObjs.Clear (); foreach (GameObject tile in mTileGameObjs) { Delete (tile); } mTileGameObjs.Clear (); // clean up the current tiles before generating DelegateHost.Recreate(); LoadMap(mLevels[mLevelIndex]); GenerateTileMap (); } public void PreNextLevel () { sharedDdataObject.levelIndex = ++mLevelIndex; Debug.Log (mLevelIndex); foreach (GameObject placeable in mPlaceableGameObjs) { Delete (placeable); } mPlaceableGameObjs.Clear (); UnityEngine.Random.seed = (int) Time.time; Debug.Log (UnityEngine.Random.seed.ToString ()); foreach (GameObject tile in mTileGameObjs) { Vector3 velocity = GetRandomVector3 (7f * speedFactor * 1.5f, 10f * speedFactor * 1.5f); tile.GetComponent<SlideBlock> ().SetVelocity (velocity); } } public Vector3 GetRandomVector3 (float min, float max) { float axisDeterminator = UnityEngine.Random.value; float randomizedSpeed = UnityEngine.Random.Range (min, max) * (UnityEngine.Random.value > 0.5f ? 1 : -1); if (axisDeterminator < 1.0f / 3) { return new Vector3 (randomizedSpeed, 0, 0); } else if (axisDeterminator > 1.0f / 3 * 2) { return new Vector3 (0, randomizedSpeed, 0); } else { return new Vector3 (0, 0, randomizedSpeed); } } public void NextLevel () { ++mLevelIndex; LoadCurrentLevel (); } private float mLastUpdate = 0.0f; // Update is called once per frame void Update () { float now = Time.unscaledTime; float delta = now - mLastUpdate; mLastUpdate = now; ArrayList updates = (ArrayList)mPlaceableUpdates.Clone(); foreach(PlaceableUpdate update in updates) { update.delayUntilNextMove -= delta; if(update.delayUntilNextMove < 0.0f) { update.delayUntilNextMove += kDelays; PlaceableProperties props = update.placeable.GetProperties(); bool remove = false; bool move = false; if (update.placeable.CanMove(update.dirX, update.dirY)) { move = true; props.inUpdateSequenceFor--; if (props.inUpdateSequenceFor <= 0) { remove = true; } } else { remove = true; } if(remove) { update.placeable.GetProperties().inUpdateSequenceFor = -1; mPlaceableUpdates.Remove(update); } if(move) { update.placeable.TryMove(update.dirX, update.dirY); } } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Metastore.V1Alpha.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedDataprocMetastoreClientTest { [xunit::FactAttribute] public void GetServiceRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetService() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetServiceResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetService(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service response = client.GetService(request.ServiceName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetServiceResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetServiceRequest request = new GetServiceRequest { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), }; Service expectedResponse = new Service { ServiceName = ServiceName.FromProjectLocationService("[PROJECT]", "[LOCATION]", "[SERVICE]"), CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), Labels = { { "key8a0b6e3c", "value60c16320" }, }, HiveMetastoreConfig = new HiveMetastoreConfig(), NetworkAsNetworkName = NetworkName.FromProjectNetwork("[PROJECT]", "[NETWORK]"), EndpointUri = "endpoint_uri59c03c94", Port = -78310000, State = Service.Types.State.Deleting, StateMessage = "state_message46cf28c0", ArtifactGcsUri = "artifact_gcs_uri4d2b3985", Tier = Service.Types.Tier.Developer, MetadataIntegration = new MetadataIntegration(), MaintenanceWindow = new MaintenanceWindow(), Uid = "uida2d37198", MetadataManagementActivity = new MetadataManagementActivity(), ReleaseChannel = Service.Types.ReleaseChannel.Stable, }; mockGrpcClient.Setup(x => x.GetServiceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Service>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Service responseCallSettings = await client.GetServiceAsync(request.ServiceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Service responseCancellationToken = await client.GetServiceAsync(request.ServiceName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImportRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImport() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetMetadataImportResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImport(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport response = client.GetMetadataImport(request.MetadataImportName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetMetadataImportResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetMetadataImportRequest request = new GetMetadataImportRequest { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), }; MetadataImport expectedResponse = new MetadataImport { MetadataImportName = MetadataImportName.FromProjectLocationServiceMetadataImport("[PROJECT]", "[LOCATION]", "[SERVICE]", "[METADATA_IMPORT]"), Description = "description2cf9da67", CreateTime = new wkt::Timestamp(), UpdateTime = new wkt::Timestamp(), State = MetadataImport.Types.State.Updating, DatabaseDump = new MetadataImport.Types.DatabaseDump(), }; mockGrpcClient.Setup(x => x.GetMetadataImportAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MetadataImport>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); MetadataImport responseCallSettings = await client.GetMetadataImportAsync(request.MetadataImportName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); MetadataImport responseCancellationToken = await client.GetMetadataImportAsync(request.MetadataImportName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackupRequestObject() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupRequestObjectAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackup() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetBackupResourceNames() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackup(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup response = client.GetBackup(request.BackupName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetBackupResourceNamesAsync() { moq::Mock<DataprocMetastore.DataprocMetastoreClient> mockGrpcClient = new moq::Mock<DataprocMetastore.DataprocMetastoreClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetBackupRequest request = new GetBackupRequest { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), }; Backup expectedResponse = new Backup { BackupName = BackupName.FromProjectLocationServiceBackup("[PROJECT]", "[LOCATION]", "[SERVICE]", "[BACKUP]"), CreateTime = new wkt::Timestamp(), EndTime = new wkt::Timestamp(), State = Backup.Types.State.Active, ServiceRevision = new Service(), Description = "description2cf9da67", }; mockGrpcClient.Setup(x => x.GetBackupAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Backup>(stt::Task.FromResult(expectedResponse), null, null, null, null)); DataprocMetastoreClient client = new DataprocMetastoreClientImpl(mockGrpcClient.Object, null); Backup responseCallSettings = await client.GetBackupAsync(request.BackupName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Backup responseCancellationToken = await client.GetBackupAsync(request.BackupName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyrightD * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Text; using OpenSim.Framework; using OMV = OpenMetaverse; namespace OpenSim.Region.PhysicsModule.BulletS { public sealed class BSLinksetCompound : BSLinkset { #pragma warning disable 414 private static string LogHeader = "[BULLETSIM LINKSET COMPOUND]"; #pragma warning restore 414 public BSLinksetCompound(BSScene scene, BSPrimLinkable parent) : base(scene, parent) { LinksetImpl = LinksetImplementation.Compound; } // ================================================================ // Changing the physical property of the linkset only needs to change the root public override void SetPhysicalFriction(float friction) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.SetFriction(LinksetRoot.PhysBody, friction); } public override void SetPhysicalRestitution(float restitution) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.SetRestitution(LinksetRoot.PhysBody, restitution); } public override void SetPhysicalGravity(OMV.Vector3 gravity) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.SetGravity(LinksetRoot.PhysBody, gravity); } public override void ComputeAndSetLocalInertia(OMV.Vector3 inertiaFactor, float linksetMass) { OMV.Vector3 inertia = m_physicsScene.PE.CalculateLocalInertia(LinksetRoot.PhysShape.physShapeInfo, linksetMass); LinksetRoot.Inertia = inertia * inertiaFactor; m_physicsScene.PE.SetMassProps(LinksetRoot.PhysBody, linksetMass, LinksetRoot.Inertia); m_physicsScene.PE.UpdateInertiaTensor(LinksetRoot.PhysBody); } public override void SetPhysicalCollisionFlags(CollisionFlags collFlags) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.SetCollisionFlags(LinksetRoot.PhysBody, collFlags); } public override void AddToPhysicalCollisionFlags(CollisionFlags collFlags) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.AddToCollisionFlags(LinksetRoot.PhysBody, collFlags); } public override void RemoveFromPhysicalCollisionFlags(CollisionFlags collFlags) { if (LinksetRoot.PhysBody.HasPhysicalBody) m_physicsScene.PE.RemoveFromCollisionFlags(LinksetRoot.PhysBody, collFlags); } // ================================================================ // When physical properties are changed the linkset needs to recalculate // its internal properties. public override void Refresh(BSPrimLinkable requestor) { // Something changed so do the rebuilding thing ScheduleRebuild(requestor); base.Refresh(requestor); } // Schedule a refresh to happen after all the other taint processing. private void ScheduleRebuild(BSPrimLinkable requestor) { // When rebuilding, it is possible to set properties that would normally require a rebuild. // If already rebuilding, don't request another rebuild. // If a linkset with just a root prim (simple non-linked prim) don't bother rebuilding. lock (m_linksetActivityLock) { if (!RebuildScheduled && !Rebuilding && HasAnyChildren) { InternalScheduleRebuild(requestor); } } } // Must be called with m_linksetActivityLock or race conditions will haunt you. private void InternalScheduleRebuild(BSPrimLinkable requestor) { DetailLog("{0},BSLinksetCompound.InternalScheduleRebuild,,rebuilding={1},hasChildren={2}", requestor.LocalID, Rebuilding, HasAnyChildren); RebuildScheduled = true; m_physicsScene.PostTaintObject("BSLinksetCompound.ScheduleRebuild", LinksetRoot.LocalID, delegate() { if (HasAnyChildren) { if (this.AllPartsComplete) { RecomputeLinksetCompound(); } else { DetailLog("{0},BSLinksetCompound.InternalScheduleRebuild,,rescheduling because not all children complete", requestor.LocalID); InternalScheduleRebuild(requestor); } } RebuildScheduled = false; }); } // The object is going dynamic (physical). Do any setup necessary for a dynamic linkset. // Only the state of the passed object can be modified. The rest of the linkset // has not yet been fully constructed. // Return 'true' if any properties updated on the passed object. // Called at taint-time! public override bool MakeDynamic(BSPrimLinkable child) { bool ret = false; DetailLog("{0},BSLinksetCompound.MakeDynamic,call,IsRoot={1}", child.LocalID, IsRoot(child)); if (IsRoot(child)) { // The root is going dynamic. Rebuild the linkset so parts and mass get computed properly. Refresh(LinksetRoot); } return ret; } // The object is going static (non-physical). We do not do anything for static linksets. // Return 'true' if any properties updated on the passed object. // Called at taint-time! public override bool MakeStatic(BSPrimLinkable child) { bool ret = false; DetailLog("{0},BSLinksetCompound.MakeStatic,call,IsRoot={1}", child.LocalID, IsRoot(child)); child.ClearDisplacement(); if (IsRoot(child)) { // Schedule a rebuild to verify that the root shape is set to the real shape. Refresh(LinksetRoot); } return ret; } // 'physicalUpdate' is true if these changes came directly from the physics engine. Don't need to rebuild then. // Called at taint-time. public override void UpdateProperties(UpdatedProperties whichUpdated, BSPrimLinkable updated) { if (!LinksetRoot.IsPhysicallyActive) { // No reason to do this physical stuff for static linksets. DetailLog("{0},BSLinksetCompound.UpdateProperties,notPhysical", LinksetRoot.LocalID); return; } // The user moving a child around requires the rebuilding of the linkset compound shape // One problem is this happens when a border is crossed -- the simulator implementation // stores the position into the group which causes the move of the object // but it also means all the child positions get updated. // What would cause an unnecessary rebuild so we make sure the linkset is in a // region before bothering to do a rebuild. if (!IsRoot(updated) && m_physicsScene.TerrainManager.IsWithinKnownTerrain(LinksetRoot.RawPosition)) { // If a child of the linkset is updating only the position or rotation, that can be done // without rebuilding the linkset. // If a handle for the child can be fetch, we update the child here. If a rebuild was // scheduled by someone else, the rebuild will just replace this setting. bool updatedChild = false; // Anything other than updating position or orientation usually means a physical update // and that is caused by us updating the object. if ((whichUpdated & ~(UpdatedProperties.Position | UpdatedProperties.Orientation)) == 0) { // Find the physical instance of the child if (!RebuildScheduled // if rebuilding, let the rebuild do it && !LinksetRoot.IsIncomplete // if waiting for assets or whatever, don't change && LinksetRoot.PhysShape.HasPhysicalShape // there must be a physical shape assigned && m_physicsScene.PE.IsCompound(LinksetRoot.PhysShape.physShapeInfo)) { // It is possible that the linkset is still under construction and the child is not yet // inserted into the compound shape. A rebuild of the linkset in a pre-step action will // build the whole thing with the new position or rotation. // The index must be checked because Bullet references the child array but does no validity // checking of the child index passed. int numLinksetChildren = m_physicsScene.PE.GetNumberOfCompoundChildren(LinksetRoot.PhysShape.physShapeInfo); if (updated.LinksetChildIndex < numLinksetChildren) { BulletShape linksetChildShape = m_physicsScene.PE.GetChildShapeFromCompoundShapeIndex(LinksetRoot.PhysShape.physShapeInfo, updated.LinksetChildIndex); if (linksetChildShape.HasPhysicalShape) { // Found the child shape within the compound shape m_physicsScene.PE.UpdateChildTransform(LinksetRoot.PhysShape.physShapeInfo, updated.LinksetChildIndex, updated.RawPosition - LinksetRoot.RawPosition, updated.RawOrientation * OMV.Quaternion.Inverse(LinksetRoot.RawOrientation), true /* shouldRecalculateLocalAabb */); updatedChild = true; DetailLog("{0},BSLinksetCompound.UpdateProperties,changeChildPosRot,whichUpdated={1},pos={2},rot={3}", updated.LocalID, whichUpdated, updated.RawPosition, updated.RawOrientation); } else // DEBUG DEBUG { // DEBUG DEBUG DetailLog("{0},BSLinksetCompound.UpdateProperties,couldNotUpdateChild,noChildShape,shape={1}", updated.LocalID, linksetChildShape); } // DEBUG DEBUG } else // DEBUG DEBUG { // DEBUG DEBUG // the child is not yet in the compound shape. This is non-fatal. DetailLog("{0},BSLinksetCompound.UpdateProperties,couldNotUpdateChild,childNotInCompoundShape,numChildren={1},index={2}", updated.LocalID, numLinksetChildren, updated.LinksetChildIndex); } // DEBUG DEBUG } else // DEBUG DEBUG { // DEBUG DEBUG DetailLog("{0},BSLinksetCompound.UpdateProperties,couldNotUpdateChild,noBodyOrNotCompound", updated.LocalID); } // DEBUG DEBUG if (!updatedChild) { // If couldn't do the individual child, the linkset needs a rebuild to incorporate the new child info. // Note: there are several ways through this code that will not update the child if // the linkset is being rebuilt. In this case, scheduling a rebuild is a NOOP since // there will already be a rebuild scheduled. DetailLog("{0},BSLinksetCompound.UpdateProperties,couldNotUpdateChild.schedulingRebuild,whichUpdated={1}", updated.LocalID, whichUpdated); Refresh(updated); } } } } // Routine called when rebuilding the body of some member of the linkset. // If one of the bodies is being changed, the linkset needs rebuilding. // For instance, a linkset is built and then a mesh asset is read in and the mesh is recreated. // Returns 'true' of something was actually removed and would need restoring // Called at taint-time!! public override bool RemoveDependencies(BSPrimLinkable child) { bool ret = false; DetailLog("{0},BSLinksetCompound.RemoveDependencies,refreshIfChild,rID={1},rBody={2},isRoot={3}", child.LocalID, LinksetRoot.LocalID, LinksetRoot.PhysBody, IsRoot(child)); Refresh(child); return ret; } // ================================================================ // Add a new child to the linkset. // Called while LinkActivity is locked. protected override void AddChildToLinkset(BSPrimLinkable child) { if (!HasChild(child)) { m_children.Add(child, new BSLinkInfo(child)); DetailLog("{0},BSLinksetCompound.AddChildToLinkset,call,child={1}", LinksetRoot.LocalID, child.LocalID); // Rebuild the compound shape with the new child shape included Refresh(child); } return; } // Remove the specified child from the linkset. // Safe to call even if the child is not really in the linkset. protected override void RemoveChildFromLinkset(BSPrimLinkable child, bool inTaintTime) { child.ClearDisplacement(); if (m_children.Remove(child)) { DetailLog("{0},BSLinksetCompound.RemoveChildFromLinkset,call,rID={1},rBody={2},cID={3},cBody={4}", child.LocalID, LinksetRoot.LocalID, LinksetRoot.PhysBody.AddrString, child.LocalID, child.PhysBody.AddrString); // Cause the child's body to be rebuilt and thus restored to normal operation child.ForceBodyShapeRebuild(inTaintTime); if (!HasAnyChildren) { // The linkset is now empty. The root needs rebuilding. LinksetRoot.ForceBodyShapeRebuild(inTaintTime); } else { // Rebuild the compound shape with the child removed Refresh(LinksetRoot); } } return; } // Called before the simulation step to make sure the compound based linkset // is all initialized. // Constraint linksets are rebuilt every time. // Note that this works for rebuilding just the root after a linkset is taken apart. // Called at taint time!! private bool UseBulletSimRootOffsetHack = false; // Attempt to have Bullet track the coords of root compound shape private void RecomputeLinksetCompound() { try { Rebuilding = true; // No matter what is being done, force the root prim's PhysBody and PhysShape to get set // to what they should be as if the root was not in a linkset. // Not that bad since we only get into this routine if there are children in the linkset and // something has been updated/changed. // Have to do the rebuild before checking for physical because this might be a linkset // being destructed and going non-physical. LinksetRoot.ForceBodyShapeRebuild(true); // There is no reason to build all this physical stuff for a non-physical or empty linkset. if (!LinksetRoot.IsPhysicallyActive || !HasAnyChildren) { DetailLog("{0},BSLinksetCompound.RecomputeLinksetCompound,notPhysicalOrNoChildren", LinksetRoot.LocalID); return; // Note the 'finally' clause at the botton which will get executed. } // Get a new compound shape to build the linkset shape in. BSShape linksetShape = BSShapeCompound.GetReference(m_physicsScene); // Compute a displacement for each component so it is relative to the center-of-mass. // Bullet presumes an object's origin (relative <0,0,0>) is its center-of-mass OMV.Vector3 centerOfMassW = ComputeLinksetCenterOfMass(); OMV.Quaternion invRootOrientation = OMV.Quaternion.Normalize(OMV.Quaternion.Inverse(LinksetRoot.RawOrientation)); OMV.Vector3 origRootPosition = LinksetRoot.RawPosition; // 'centerDisplacementV' is the vehicle relative distance from the simulator root position to the center-of-mass OMV.Vector3 centerDisplacementV = (centerOfMassW - LinksetRoot.RawPosition) * invRootOrientation; if (UseBulletSimRootOffsetHack || !BSParam.LinksetOffsetCenterOfMass) { // Zero everything if center-of-mass displacement is not being done. centerDisplacementV = OMV.Vector3.Zero; LinksetRoot.ClearDisplacement(); } else { // The actual center-of-mass could have been set by the user. centerDisplacementV = LinksetRoot.SetEffectiveCenterOfMassDisplacement(centerDisplacementV); } DetailLog("{0},BSLinksetCompound.RecomputeLinksetCompound,COM,rootPos={1},com={2},comDisp={3}", LinksetRoot.LocalID, origRootPosition, centerOfMassW, centerDisplacementV); // Add the shapes of all the components of the linkset int memberIndex = 1; ForEachMember((cPrim) => { if (IsRoot(cPrim)) { // Root shape is always index zero. cPrim.LinksetChildIndex = 0; } else { cPrim.LinksetChildIndex = memberIndex; memberIndex++; } // Get a reference to the shape of the child for adding of that shape to the linkset compound shape BSShape childShape = cPrim.PhysShape.GetReference(m_physicsScene, cPrim); // Offset the child shape from the center-of-mass and rotate it to root relative. OMV.Vector3 offsetPos = (cPrim.RawPosition - origRootPosition) * invRootOrientation - centerDisplacementV; OMV.Quaternion offsetRot = OMV.Quaternion.Normalize(cPrim.RawOrientation) * invRootOrientation; // Add the child shape to the compound shape being built if (childShape.physShapeInfo.HasPhysicalShape) { m_physicsScene.PE.AddChildShapeToCompoundShape(linksetShape.physShapeInfo, childShape.physShapeInfo, offsetPos, offsetRot); DetailLog("{0},BSLinksetCompound.RecomputeLinksetCompound,addChild,indx={1},cShape={2},offPos={3},offRot={4}", LinksetRoot.LocalID, cPrim.LinksetChildIndex, childShape, offsetPos, offsetRot); // Since we are borrowing the shape of the child, disable the origional child body if (!IsRoot(cPrim)) { m_physicsScene.PE.AddToCollisionFlags(cPrim.PhysBody, CollisionFlags.CF_NO_CONTACT_RESPONSE); m_physicsScene.PE.ForceActivationState(cPrim.PhysBody, ActivationState.DISABLE_SIMULATION); // We don't want collisions from the old linkset children. m_physicsScene.PE.RemoveFromCollisionFlags(cPrim.PhysBody, CollisionFlags.BS_SUBSCRIBE_COLLISION_EVENTS); cPrim.PhysBody.collisionType = CollisionType.LinksetChild; } } else { // The linkset must be in an intermediate state where all the children have not yet // been constructed. This sometimes happens on startup when everything is getting // built and some shapes have to wait for assets to be read in. // Just skip this linkset for the moment and cause the shape to be rebuilt next tick. // One problem might be that the shape is broken somehow and it never becomes completely // available. This might cause the rebuild to happen over and over. InternalScheduleRebuild(LinksetRoot); DetailLog("{0},BSLinksetCompound.RecomputeLinksetCompound,addChildWithNoShape,indx={1},cShape={2},offPos={3},offRot={4}", LinksetRoot.LocalID, cPrim.LinksetChildIndex, childShape, offsetPos, offsetRot); // Output an annoying warning. It should only happen once but if it keeps coming out, // the user knows there is something wrong and will report it. m_physicsScene.Logger.WarnFormat("{0} Linkset rebuild warning. If this happens more than one or two times, please report in Mantis 7191", LogHeader); m_physicsScene.Logger.WarnFormat("{0} pName={1}, childIdx={2}, shape={3}", LogHeader, LinksetRoot.Name, cPrim.LinksetChildIndex, childShape); // This causes the loop to bail on building the rest of this linkset. // The rebuild operation will fix it up next tick or declare the object unbuildable. return true; } return false; // 'false' says to move onto the next child in the list }); // Replace the root shape with the built compound shape. // Object removed and added to world to get collision cache rebuilt for new shape. LinksetRoot.PhysShape.Dereference(m_physicsScene); LinksetRoot.PhysShape = linksetShape; m_physicsScene.PE.RemoveObjectFromWorld(m_physicsScene.World, LinksetRoot.PhysBody); m_physicsScene.PE.SetCollisionShape(m_physicsScene.World, LinksetRoot.PhysBody, linksetShape.physShapeInfo); m_physicsScene.PE.AddObjectToWorld(m_physicsScene.World, LinksetRoot.PhysBody); DetailLog("{0},BSLinksetCompound.RecomputeLinksetCompound,addBody,body={1},shape={2}", LinksetRoot.LocalID, LinksetRoot.PhysBody, linksetShape); m_physicsScene.PE.ResetBroadphasePool(m_physicsScene.World); // DEBUG DEBUG // With all of the linkset packed into the root prim, it has the mass of everyone. LinksetMass = ComputeLinksetMass(); LinksetRoot.UpdatePhysicalMassProperties(LinksetMass, true); if (UseBulletSimRootOffsetHack) { // Enable the physical position updator to return the position and rotation of the root shape. // This enables a feature in the C++ code to return the world coordinates of the first shape in the // compound shape. This aleviates the need to offset the returned physical position by the // center-of-mass offset. // TODO: either debug this feature or remove it. m_physicsScene.PE.AddToCollisionFlags(LinksetRoot.PhysBody, CollisionFlags.BS_RETURN_ROOT_COMPOUND_SHAPE); } } finally { Rebuilding = false; } // See that the Aabb surrounds the new shape m_physicsScene.PE.RecalculateCompoundShapeLocalAabb(LinksetRoot.PhysShape.physShapeInfo); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using Microsoft.CSharp.RuntimeBinder.Syntax; namespace Microsoft.CSharp.RuntimeBinder.Semantics { internal abstract class CType { private TypeKind _typeKind; private Name _pName; private bool _fHasErrors; // Whether anyituents have errors. This is immutable. public bool IsWindowsRuntimeType() { return (AssociatedSystemType.Attributes & TypeAttributes.WindowsRuntime) == TypeAttributes.WindowsRuntime; } public bool IsCollectionType() { if ((AssociatedSystemType.IsGenericType && (AssociatedSystemType.GetGenericTypeDefinition() == typeof(IList<>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(ICollection<>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(IEnumerable<>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(IReadOnlyList<>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(IReadOnlyCollection<>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(IDictionary<,>) || AssociatedSystemType.GetGenericTypeDefinition() == typeof(IReadOnlyDictionary<,>))) || AssociatedSystemType == typeof(System.Collections.IList) || AssociatedSystemType == typeof(System.Collections.ICollection) || AssociatedSystemType == typeof(System.Collections.IEnumerable) || AssociatedSystemType == typeof(System.Collections.Specialized.INotifyCollectionChanged) || AssociatedSystemType == typeof(System.ComponentModel.INotifyPropertyChanged)) { return true; } return false; } private Type _associatedSystemType; public Type AssociatedSystemType { get { if (_associatedSystemType == null) { _associatedSystemType = CalculateAssociatedSystemType(this); } return _associatedSystemType; } } private static Type CalculateAssociatedSystemType(CType src) { Type result = null; switch (src.GetTypeKind()) { case TypeKind.TK_ArrayType: ArrayType a = (ArrayType)src; Type elementType = a.GetElementType().AssociatedSystemType; result = a.IsSZArray ? elementType.MakeArrayType() : elementType.MakeArrayType(a.rank); break; case TypeKind.TK_NullableType: NullableType n = (NullableType)src; Type underlyingType = n.GetUnderlyingType().AssociatedSystemType; result = typeof(Nullable<>).MakeGenericType(underlyingType); break; case TypeKind.TK_PointerType: PointerType p = (PointerType)src; Type referentType = p.GetReferentType().AssociatedSystemType; result = referentType.MakePointerType(); break; case TypeKind.TK_ParameterModifierType: ParameterModifierType r = (ParameterModifierType)src; Type parameterType = r.GetParameterType().AssociatedSystemType; result = parameterType.MakeByRefType(); break; case TypeKind.TK_AggregateType: result = CalculateAssociatedSystemTypeForAggregate((AggregateType)src); break; case TypeKind.TK_TypeParameterType: TypeParameterType t = (TypeParameterType)src; if (t.IsMethodTypeParameter()) { MethodInfo meth = ((MethodSymbol)t.GetOwningSymbol()).AssociatedMemberInfo as MethodInfo; result = meth.GetGenericArguments()[t.GetIndexInOwnParameters()]; } else { Type parentType = ((AggregateSymbol)t.GetOwningSymbol()).AssociatedSystemType; result = parentType.GetGenericArguments()[t.GetIndexInOwnParameters()]; } break; } Debug.Assert(result != null || src.GetTypeKind() == TypeKind.TK_AggregateType); return result; } private static Type CalculateAssociatedSystemTypeForAggregate(AggregateType aggtype) { AggregateSymbol agg = aggtype.GetOwningAggregate(); TypeArray typeArgs = aggtype.GetTypeArgsAll(); List<Type> list = new List<Type>(); // Get each type arg. for (int i = 0; i < typeArgs.Count; i++) { // Unnamed type parameter types are just placeholders. if (typeArgs[i] is TypeParameterType typeParamArg && typeParamArg.GetTypeParameterSymbol().name == null) { return null; } list.Add(typeArgs[i].AssociatedSystemType); } Type[] systemTypeArgs = list.ToArray(); Type uninstantiatedType = agg.AssociatedSystemType; if (uninstantiatedType.IsGenericType) { try { return uninstantiatedType.MakeGenericType(systemTypeArgs); } catch (ArgumentException) { // If the constraints don't work, just return the type without substituting it. return uninstantiatedType; } } return uninstantiatedType; } public TypeKind GetTypeKind() { return _typeKind; } public void SetTypeKind(TypeKind kind) { _typeKind = kind; } public Name GetName() { return _pName; } public void SetName(Name pName) { _pName = pName; } // This call switches on the kind and dispatches accordingly. This should really only be // used when dereferencing TypeArrays. We should consider refactoring our code to not // need this type of thing - strongly typed handling of TypeArrays would be much better. public CType GetBaseOrParameterOrElementType() { switch (GetTypeKind()) { case TypeKind.TK_ArrayType: return ((ArrayType)this).GetElementType(); case TypeKind.TK_PointerType: return ((PointerType)this).GetReferentType(); case TypeKind.TK_ParameterModifierType: return ((ParameterModifierType)this).GetParameterType(); case TypeKind.TK_NullableType: return ((NullableType)this).GetUnderlyingType(); default: return null; } } public void InitFromParent() { Debug.Assert(!(this is AggregateType)); Debug.Assert(!(this is ErrorType)); _fHasErrors = GetBaseOrParameterOrElementType().HasErrors(); } public bool HasErrors() { return _fHasErrors; } public void SetErrors(bool fHasErrors) { _fHasErrors = fHasErrors; } //////////////////////////////////////////////////////////////////////////////// // Given a symbol, determine its fundamental type. This is the type that // indicate how the item is stored and what instructions are used to reference // if. The fundamental types are: // one of the integral/float types (includes enums with that underlying type) // reference type // struct/value type public FUNDTYPE fundType() { switch (GetTypeKind()) { case TypeKind.TK_AggregateType: { AggregateSymbol sym = ((AggregateType)this).getAggregate(); // Treat enums like their underlying types. if (sym.IsEnum()) { sym = sym.GetUnderlyingType().getAggregate(); } if (sym.IsStruct()) { // Struct type could be predefined (int, long, etc.) or some other struct. if (sym.IsPredefined()) return PredefinedTypeFacts.GetFundType(sym.GetPredefType()); return FUNDTYPE.FT_STRUCT; } return FUNDTYPE.FT_REF; // Interfaces, classes, delegates are reference types. } case TypeKind.TK_TypeParameterType: return FUNDTYPE.FT_VAR; case TypeKind.TK_ArrayType: case TypeKind.TK_NullType: return FUNDTYPE.FT_REF; case TypeKind.TK_PointerType: return FUNDTYPE.FT_PTR; case TypeKind.TK_NullableType: return FUNDTYPE.FT_STRUCT; default: return FUNDTYPE.FT_NONE; } } public ConstValKind constValKind() { if (isPointerLike()) { return ConstValKind.IntPtr; } switch (fundType()) { case FUNDTYPE.FT_I8: case FUNDTYPE.FT_U8: return ConstValKind.Long; case FUNDTYPE.FT_STRUCT: // Here we can either have a decimal type, or an enum // whose fundamental type is decimal. Debug.Assert((getAggregate().IsEnum() && getAggregate().GetUnderlyingType().getPredefType() == PredefinedType.PT_DECIMAL) || (isPredefined() && getPredefType() == PredefinedType.PT_DATETIME) || (isPredefined() && getPredefType() == PredefinedType.PT_DECIMAL)); if (isPredefined() && getPredefType() == PredefinedType.PT_DATETIME) { return ConstValKind.Long; } return ConstValKind.Decimal; case FUNDTYPE.FT_REF: if (isPredefined() && getPredefType() == PredefinedType.PT_STRING) { return ConstValKind.String; } else { return ConstValKind.IntPtr; } case FUNDTYPE.FT_R4: return ConstValKind.Float; case FUNDTYPE.FT_R8: return ConstValKind.Double; case FUNDTYPE.FT_I1: return ConstValKind.Boolean; default: return ConstValKind.Int; } } public CType underlyingType() { if (this is AggregateType && getAggregate().IsEnum()) return getAggregate().GetUnderlyingType(); return this; } //////////////////////////////////////////////////////////////////////////////// // Strips off ArrayType, ParameterModifierType, PointerType, PinnedType and optionally NullableType // and returns the result. public CType GetNakedType(bool fStripNub) { for (CType type = this; ;) { switch (type.GetTypeKind()) { default: return type; case TypeKind.TK_NullableType: if (!fStripNub) return type; type = type.GetBaseOrParameterOrElementType(); break; case TypeKind.TK_ArrayType: case TypeKind.TK_ParameterModifierType: case TypeKind.TK_PointerType: type = type.GetBaseOrParameterOrElementType(); break; } } } public AggregateSymbol GetNakedAgg() { return GetNakedAgg(false); } private AggregateSymbol GetNakedAgg(bool fStripNub) => (GetNakedType(fStripNub) as AggregateType)?.getAggregate(); public AggregateSymbol getAggregate() { Debug.Assert(this is AggregateType); return ((AggregateType)this).GetOwningAggregate(); } public virtual CType StripNubs() => this; public virtual CType StripNubs(out bool wasNullable) { wasNullable = false; return this; } public bool isDelegateType() { return this is AggregateType && getAggregate().IsDelegate(); } //////////////////////////////////////////////////////////////////////////////// // A few types are considered "simple" types for purposes of conversions and so // on. They are the fundamental types the compiler knows about for operators and // conversions. public bool isSimpleType() { return (isPredefined() && PredefinedTypeFacts.IsSimpleType(getPredefType())); } public bool isSimpleOrEnum() { return isSimpleType() || isEnumType(); } public bool isSimpleOrEnumOrString() { return isSimpleType() || isPredefType(PredefinedType.PT_STRING) || isEnumType(); } private bool isPointerLike() { return this is PointerType || isPredefType(PredefinedType.PT_INTPTR) || isPredefType(PredefinedType.PT_UINTPTR); } //////////////////////////////////////////////////////////////////////////////// // A few types are considered "numeric" types. They are the fundamental number // types the compiler knows about for operators and conversions. public bool isNumericType() { return (isPredefined() && PredefinedTypeFacts.IsNumericType(getPredefType())); } public bool isStructOrEnum() { return this is AggregateType && (getAggregate().IsStruct() || getAggregate().IsEnum()) || this is NullableType; } public bool isStructType() { return this is AggregateType && getAggregate().IsStruct() || this is NullableType; } public bool isEnumType() { return this is AggregateType && getAggregate().IsEnum(); } public bool isInterfaceType() { return this is AggregateType && getAggregate().IsInterface(); } public bool isClassType() { return this is AggregateType && getAggregate().IsClass(); } public AggregateType underlyingEnumType() { Debug.Assert(isEnumType()); return getAggregate().GetUnderlyingType(); } public bool isUnsigned() { if (this is AggregateType sym) { if (sym.isEnumType()) { sym = sym.underlyingEnumType(); } if (sym.isPredefined()) { PredefinedType pt = sym.getPredefType(); return pt == PredefinedType.PT_UINTPTR || pt == PredefinedType.PT_BYTE || (pt >= PredefinedType.PT_USHORT && pt <= PredefinedType.PT_ULONG); } else { return false; } } else { return this is PointerType; } } public bool isUnsafe() { // Pointer types are the only unsafe types. // Note that generics may not be instantiated with pointer types return this is PointerType || this is ArrayType arr && arr.GetElementType().isUnsafe(); } public bool isPredefType(PredefinedType pt) { if (this is AggregateType ats) return ats.getAggregate().IsPredefined() && ats.getAggregate().GetPredefType() == pt; return (this is VoidType && pt == PredefinedType.PT_VOID); } public bool isPredefined() { return this is AggregateType && getAggregate().IsPredefined(); } public PredefinedType getPredefType() { //ASSERT(isPredefined()); return getAggregate().GetPredefType(); } public bool isStaticClass() { AggregateSymbol agg = GetNakedAgg(false); if (agg == null) return false; if (!agg.IsStatic()) return false; return true; } public CType GetDelegateTypeOfPossibleExpression() { if (isPredefType(PredefinedType.PT_G_EXPRESSION)) { return ((AggregateType)this).GetTypeArgsThis()[0]; } return this; } // These check for AGGTYPESYMs, TYVARSYMs and others as appropriate. public bool IsValType() { switch (GetTypeKind()) { case TypeKind.TK_TypeParameterType: return ((TypeParameterType)this).IsValueType(); case TypeKind.TK_AggregateType: return ((AggregateType)this).getAggregate().IsValueType(); case TypeKind.TK_NullableType: return true; default: return false; } } public bool IsNonNubValType() { switch (GetTypeKind()) { case TypeKind.TK_TypeParameterType: return ((TypeParameterType)this).IsNonNullableValueType(); case TypeKind.TK_AggregateType: return ((AggregateType)this).getAggregate().IsValueType(); case TypeKind.TK_NullableType: return false; default: return false; } } public bool IsRefType() { switch (GetTypeKind()) { case TypeKind.TK_ArrayType: case TypeKind.TK_NullType: return true; case TypeKind.TK_TypeParameterType: return ((TypeParameterType)this).IsReferenceType(); case TypeKind.TK_AggregateType: return ((AggregateType)this).getAggregate().IsRefType(); default: return false; } } // A few types can be the same pointer value and not actually // be equivalent or convertible (like ANONMETHSYMs) public bool IsNeverSameType() { return this is MethodGroupType || this is ErrorType err && !err.HasParent; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace AspNetIdentityDependencyInjectionSample.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.Runtime.TargetPlatform.ARMv5 { using System; using System.Runtime.InteropServices; using Microsoft.Zelig.TargetModel.ArmProcessor; using TS = Microsoft.Zelig.Runtime.TypeSystem; using EncDef = Microsoft.Zelig.TargetModel.ArmProcessor.EncodingDefinition_ARM; using EncDef_VFP = Microsoft.Zelig.TargetModel.ArmProcessor.EncodingDefinition_VFP_ARM; public abstract partial class ProcessorARMv5 { public abstract new class Context : Processor.Context { // // WARNING: Don't assume the actual layout of the structure is sequential!!! // WARNING: The code generator rearranges the fields to minimize the cost of a context switch!!! // [TS.WellKnownType( "Microsoft_Zelig_ProcessorARMv5_RegistersOnStack" )] [StructLayout( LayoutKind.Sequential, Pack = 4 )] public struct RegistersOnStack { public const uint StackRegister = EncDef.c_register_sp; public const uint LinkRegister = EncDef.c_register_lr; public const uint ProgramCounterRegister = EncDef.c_register_pc; // // State // [TS.AssumeReferenced] public uint CPSR; [TS.AssumeReferenced] public UIntPtr SP; [TS.AssumeReferenced] public UIntPtr LR; [TS.AssumeReferenced] public UIntPtr R0; [TS.AssumeReferenced] public UIntPtr R1; [TS.AssumeReferenced] public UIntPtr R2; [TS.AssumeReferenced] public UIntPtr R3; [TS.AssumeReferenced] public UIntPtr R4; [TS.AssumeReferenced] public UIntPtr R5; [TS.AssumeReferenced] public UIntPtr R6; [TS.AssumeReferenced] public UIntPtr R7; [TS.AssumeReferenced] public UIntPtr R8; [TS.AssumeReferenced] public UIntPtr R9; [TS.AssumeReferenced] public UIntPtr R10; [TS.AssumeReferenced] public UIntPtr R11; [TS.AssumeReferenced] public UIntPtr R12; [TS.AssumeReferenced] public UIntPtr PC; // // Helper Methods // internal unsafe UIntPtr* GetRegisterPointer( uint idx ) { switch(idx) { case 0: fixed(UIntPtr* ptr = &this.R0 ) { return ptr; }; case 1: fixed(UIntPtr* ptr = &this.R1 ) { return ptr; }; case 2: fixed(UIntPtr* ptr = &this.R2 ) { return ptr; }; case 3: fixed(UIntPtr* ptr = &this.R3 ) { return ptr; }; case 4: fixed(UIntPtr* ptr = &this.R4 ) { return ptr; }; case 5: fixed(UIntPtr* ptr = &this.R5 ) { return ptr; }; case 6: fixed(UIntPtr* ptr = &this.R6 ) { return ptr; }; case 7: fixed(UIntPtr* ptr = &this.R7 ) { return ptr; }; case 8: fixed(UIntPtr* ptr = &this.R8 ) { return ptr; }; case 9: fixed(UIntPtr* ptr = &this.R9 ) { return ptr; }; case 10: fixed(UIntPtr* ptr = &this.R10) { return ptr; }; case 11: fixed(UIntPtr* ptr = &this.R11) { return ptr; }; case 12: fixed(UIntPtr* ptr = &this.R12) { return ptr; }; case 13: fixed(UIntPtr* ptr = &this.SP ) { return ptr; }; case 14: fixed(UIntPtr* ptr = &this.LR ) { return ptr; }; case 15: fixed(UIntPtr* ptr = &this.PC ) { return ptr; }; } return null; } public void Assign( ref RegistersOnStack other ) { this.R0 = other.R0; this.R1 = other.R1; this.R2 = other.R2; this.R3 = other.R3; this.R4 = other.R4; this.R5 = other.R5; this.R6 = other.R6; this.R7 = other.R7; this.R8 = other.R8; this.R9 = other.R9; this.R10 = other.R10; this.R11 = other.R11; this.R12 = other.R12; this.SP = other.SP; this.LR = other.LR; this.PC = other.PC; this.CPSR = other.CPSR; } } // // State // const uint c_STMFD_Mask = 0xFFFF0000; const uint c_STMFD_Opcode = 0xE92D0000; const uint c_SUBSP_Mask = 0xFFFFF000; const uint c_SUBSP_Opcode = 0xE24DD000; //--// const uint c_LDMFD_Mask = 0xFFFF0000; const uint c_LDMFD_Opcode = 0xE8BD0000; //--// public RegistersOnStack Registers; // // Constructor Methods // public Context(ThreadImpl owner) : base(owner) { } // // Helper Methods // [NoInline] public override void Populate() { GetAllTheRegisters(); // // Now we have all the registers at this method boundary, unwind one more time and we have the state at the caller's site. // Unwind(); } [NoInline] public override void Populate( Processor.Context context ) { Context ctx = (Context)context; this.Registers.Assign( ref ctx.Registers ); } [NoInline] public unsafe override void PopulateFromDelegate( Delegate dlg, uint[] stack ) { DelegateImpl dlgImpl = (DelegateImpl)(object)dlg; ArrayImpl stackImpl = (ArrayImpl )(object)stack; ObjectImpl objImpl = (ObjectImpl )(object)dlg.Target; this.Registers.CPSR = c_psr_mode_SYS; this.Registers.PC = new UIntPtr( dlgImpl.InnerGetCodePointer().Target.ToPointer() ); this.Registers.SP = new UIntPtr( stackImpl.GetEndDataPointer() ); this.Registers.R0 = objImpl.ToPointer(); } [NoInline] public override void SetupForExceptionHandling( uint mode ) { using(Runtime.SmartHandles.InterruptState.DisableAll()) { UIntPtr stack = this.Registers.SP; uint oldMode = GetStatusRegister(); // // Enter target mode, with interrupts disabled. // SetStatusRegister( c_psr_field_c, c_psr_I | c_psr_F | mode ); SetRegister( Context.RegistersOnStack.StackRegister, stack ); // // Switch back to original mode. // SetStatusRegister( c_psr_field_c, oldMode ); } } [NoInline] public unsafe override bool Unwind() { UIntPtr pc = this.ProgramCounter; TS.CodeMap cm = TS.CodeMap.ResolveAddressToCodeMap( pc ); this.InPrologue = false; this.InEpilogue = false; if(cm != null) { for(int i = 0; i < cm.Ranges.Length; i++) { TS.CodeMap.Range rng = cm.Ranges[i]; if((rng.Flags & TS.CodeMap.Flags.EntryPoint) != 0) { if((rng.Flags & TS.CodeMap.Flags.BottomOfCallStack) != 0) { return false; } UIntPtr sp = this.StackPointer; UIntPtr address = rng.Start; uint regRestoreMap = 0; uint stackAdjustment = 0; bool fReturnAddressinLR = false; bool fDone = false; if(pc == address) { // // We are at the beginning of a method, the return address is in LR for sure. // fReturnAddressinLR = true; // // The PC has not executed the next prologue instruction, stop processing. // fDone = true; this.InPrologue = true; } if(fDone == false) { if((rng.Flags & TS.CodeMap.Flags.HasIntRegisterSave) != 0) { uint* ptr = (uint*)address.ToPointer(); uint opcode_STMFD = DebuggerAwareRead( ptr++ ); address = new UIntPtr( ptr ); if((opcode_STMFD & c_STMFD_Mask) == c_STMFD_Opcode) { regRestoreMap = opcode_STMFD & 0xFFFF; } else { //CHECKS.ASSERT( false, "Expecting a STMFD opcode, got 0x{0:X8}", opcode_STMFD ); return false; } } else { // // No register push, the return address is in LR for sure. // fReturnAddressinLR = true; } } if(pc == address) { // // The PC has not executed the next prologue instruction, stop processing. // fDone = true; this.InPrologue = true; } if(pc == address) { // // The PC has not executed the next prologue instruction, stop processing. // fDone = true; this.InPrologue = true; } if(fDone == false) { if((rng.Flags & TS.CodeMap.Flags.HasStackAdjustment) != 0) { uint* ptr = (uint*)address.ToPointer(); uint opcode_SUBSP = DebuggerAwareRead( ptr ); if((opcode_SUBSP & c_SUBSP_Mask) == c_SUBSP_Opcode) { stackAdjustment = s_Encoding.get_DataProcessing_ImmediateValue( opcode_SUBSP ); } else { //CHECKS.ASSERT( false, "Expecting a STMFD opcode, got 0x{0:X8}", opcode_SUBSP ); return false; } } } // // Deal with method epilogue: if we are on one of the return instructions, we need to restore less state. // uint opcode = *(uint*)pc.ToPointer(); if((opcode & c_LDMFD_Mask) == c_LDMFD_Opcode) { stackAdjustment = 0; } //--// sp = AddressMath.Increment( sp, stackAdjustment ); if(fReturnAddressinLR) { this.Registers.PC = this.Registers.LR; } else { UIntPtr* src = (UIntPtr*)sp.ToPointer(); for(uint idx = 0; idx < 16; idx++) { if((regRestoreMap & (1u << (int)idx)) != 0) { // // Prologue saves LR, but we need to restore it as PC. // uint regIdx = (idx == RegistersOnStack.LinkRegister) ? RegistersOnStack.ProgramCounterRegister : idx; UIntPtr* dst = this.Registers.GetRegisterPointer( regIdx ); *dst = *src++; } } sp = new UIntPtr( src ); } this.StackPointer = sp; return true; } } } return false; } [NoInline] public override void SwitchTo() { LongJump( ref this.Registers ); } public override unsafe UIntPtr GetRegisterByIndex( uint idx ) { return *(this.Registers.GetRegisterPointer( idx )); } public override unsafe void SetRegisterByIndex( uint idx , UIntPtr value ) { *(this.Registers.GetRegisterPointer( idx )) = value; } //--// [NoInline] [SaveFullProcessorContext] private unsafe void GetAllTheRegisters() { this.Registers.SP = GetRegister( RegistersOnStack.StackRegister ); this.Registers.PC = GetRegister( RegistersOnStack.ProgramCounterRegister ); Unwind(); } //--// [NoInline] [NoReturn()] [HardwareExceptionHandler(HardwareException.LongJump)] static void LongJump( ref RegistersOnStack registers ) { // // WARNING! // WARNING! Keep this method empty!!!! // WARNING! // // We need a way to make a long jump as part of the exception handling code. // // The code responsible for emitting the prologue of the method will detect that // this method is decorated with the ContextSwitch flag and it will generate the proper code. // // WARNING! // WARNING! Keep this method empty!!!! // WARNING! // } //--// static uint fault_DFSR; static uint fault_IFSR; static uint fault_FAR; [NoInline] [NoReturn()] [HardwareExceptionHandler(HardwareException.UndefinedInstruction)] [MemoryUsage(MemoryUsage.Bootstrap)] static void UndefinedInstruction() { fault_DFSR = MoveFromCoprocessor( 15, 0, 5, 0, 0 ); fault_IFSR = MoveFromCoprocessor( 15, 0, 5, 0, 1 ); fault_FAR = MoveFromCoprocessor( 15, 0, 6, 0, 0 ); Processor.Instance.Breakpoint(); } [NoInline] [NoReturn()] [HardwareExceptionHandler(HardwareException.PrefetchAbort)] [MemoryUsage(MemoryUsage.Bootstrap)] static void PrefetchAbort() { fault_DFSR = MoveFromCoprocessor( 15, 0, 5, 0, 0 ); fault_IFSR = MoveFromCoprocessor( 15, 0, 5, 0, 1 ); fault_FAR = MoveFromCoprocessor( 15, 0, 6, 0, 0 ); Processor.Instance.Breakpoint(); } [NoInline] [NoReturn()] [HardwareExceptionHandler(HardwareException.DataAbort)] [MemoryUsage(MemoryUsage.Bootstrap)] static void DataAbort() { fault_DFSR = MoveFromCoprocessor( 15, 0, 5, 0, 0 ); fault_IFSR = MoveFromCoprocessor( 15, 0, 5, 0, 1 ); fault_FAR = MoveFromCoprocessor( 15, 0, 6, 0, 0 ); Processor.Instance.Breakpoint(); } //--// [Inline] public static void InterruptHandlerWithContextSwitch( ref RegistersOnStack registers ) { Peripherals.Instance.ProcessInterrupt(); ThreadManager tm = ThreadManager.Instance; // // We keep looping until the current and next threads are the same, // because when swapping out a dead thread, we might wake up a different thread. // while(tm.ShouldContextSwitch) { ContextSwitch( tm, ref registers ); } } [Inline] public static void InterruptHandlerWithoutContextSwitch() { Peripherals.Instance.ProcessInterrupt(); } [Inline] public static void FastInterruptHandlerWithoutContextSwitch() { Peripherals.Instance.ProcessFastInterrupt(); } [Inline] public static void GenericSoftwareInterruptHandler( ref RegistersOnStack registers ) { } //--// private static void ContextSwitch( ThreadManager tm , ref RegistersOnStack registers ) { ThreadImpl currentThread = tm.CurrentThread; ThreadImpl nextThread = tm.NextThread; Context ctx; if(currentThread != null) { ctx = (Context)currentThread.SwappedOutContext; ctx.Registers.Assign( ref registers ); } ctx = (Context)nextThread.SwappedOutContext; registers.Assign( ref ctx.Registers ); tm.CurrentThread = nextThread; ThreadImpl.CurrentThread = nextThread; } // // Access Methods // public override UIntPtr StackPointer { get { return this.Registers.SP; } set { this.Registers.SP = value; } } public override UIntPtr BaseStackPointer { get { return (UIntPtr)0; } } public override UIntPtr ProgramCounter { get { return this.Registers.PC; } set { this.Registers.PC = value; } } public override uint ScratchedIntegerRegisters { get { return MethodWrapperHelpers.ScratchedRegisters(); } } } } }
using System; using Microsoft.Data.Entity; using Microsoft.Data.Entity.Infrastructure; using Microsoft.Data.Entity.Metadata; using Microsoft.Data.Entity.Migrations; using EverReader.Models; namespace EverReader.Migrations { [DbContext(typeof(EverReaderContext))] [Migration("20151127133301_NotebookChanges")] partial class NotebookChanges { protected override void BuildTargetModel(ModelBuilder modelBuilder) { modelBuilder .Annotation("ProductVersion", "7.0.0-beta8-15964") .Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("EverReader.Models.ApplicationUser", b => { b.Property<string>("Id"); b.Property<int>("AccessFailedCount"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Email") .Annotation("MaxLength", 256); b.Property<bool>("EmailConfirmed"); b.Property<int?>("EvernoteCredentialsId"); b.Property<bool>("HasAuthorisedEvernote"); b.Property<bool>("LockoutEnabled"); b.Property<DateTimeOffset?>("LockoutEnd"); b.Property<string>("NormalizedEmail") .Annotation("MaxLength", 256); b.Property<string>("NormalizedUserName") .Annotation("MaxLength", 256); b.Property<string>("PasswordHash"); b.Property<string>("PhoneNumber"); b.Property<bool>("PhoneNumberConfirmed"); b.Property<string>("SecurityStamp"); b.Property<bool>("TwoFactorEnabled"); b.Property<string>("UserName") .Annotation("MaxLength", 256); b.HasKey("Id"); b.Index("NormalizedEmail") .Annotation("Relational:Name", "EmailIndex"); b.Index("NormalizedUserName") .Annotation("Relational:Name", "UserNameIndex"); b.Annotation("Relational:TableName", "AspNetUsers"); }); modelBuilder.Entity("EverReader.Models.Bookmark", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("BookmarkTitle"); b.Property<DateTime>("NoteCreated"); b.Property<string>("NoteGuid"); b.Property<int>("NoteLength"); b.Property<string>("NoteTitle"); b.Property<DateTime>("NoteUpdated"); b.Property<decimal>("PercentageRead"); b.Property<int>("Type"); b.Property<DateTime>("Updated"); b.Property<string>("UserId"); b.HasKey("Id"); }); modelBuilder.Entity("EverReader.Models.EFDbEvernoteCredentials", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("AuthToken"); b.Property<DateTime>("Expires"); b.Property<string>("NotebookUrl"); b.Property<string>("Shard"); b.Property<string>("UserId"); b.Property<string>("WebApiUrlPrefix"); b.HasKey("Id"); }); modelBuilder.Entity("EverReader.Models.NotebookData", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("Guid"); b.Property<string>("Name"); b.Property<string>("UserId"); b.HasKey("Id"); }); modelBuilder.Entity("EverReader.Models.PrivateBetaUser", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("Email"); b.HasKey("Id"); }); modelBuilder.Entity("EverReader.Models.TagData", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("Guid"); b.Property<string>("Name"); b.Property<string>("UserId"); b.HasKey("Id"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", b => { b.Property<string>("Id"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Name") .Annotation("MaxLength", 256); b.Property<string>("NormalizedName") .Annotation("MaxLength", 256); b.HasKey("Id"); b.Index("NormalizedName") .Annotation("Relational:Name", "RoleNameIndex"); b.Annotation("Relational:TableName", "AspNetRoles"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("RoleId"); b.HasKey("Id"); b.Annotation("Relational:TableName", "AspNetRoleClaims"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("UserId"); b.HasKey("Id"); b.Annotation("Relational:TableName", "AspNetUserClaims"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.Property<string>("LoginProvider"); b.Property<string>("ProviderKey"); b.Property<string>("ProviderDisplayName"); b.Property<string>("UserId"); b.HasKey("LoginProvider", "ProviderKey"); b.Annotation("Relational:TableName", "AspNetUserLogins"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.Property<string>("UserId"); b.Property<string>("RoleId"); b.HasKey("UserId", "RoleId"); b.Annotation("Relational:TableName", "AspNetUserRoles"); }); modelBuilder.Entity("EverReader.Models.ApplicationUser", b => { b.HasOne("EverReader.Models.EFDbEvernoteCredentials") .WithMany() .ForeignKey("EvernoteCredentialsId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.HasOne("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .WithMany() .ForeignKey("RoleId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.HasOne("EverReader.Models.ApplicationUser") .WithMany() .ForeignKey("UserId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.HasOne("EverReader.Models.ApplicationUser") .WithMany() .ForeignKey("UserId"); }); modelBuilder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.HasOne("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .WithMany() .ForeignKey("RoleId"); b.HasOne("EverReader.Models.ApplicationUser") .WithMany() .ForeignKey("UserId"); }); } } }
/********************************************************************++ * Copyright (c) Microsoft Corporation. All rights reserved. * --********************************************************************/ /* * Contains definition for PSSenderInfo, PSPrincipal, PSIdentity which are * used to provide remote user information to different plugin snapins * like Exchange. */ using System.Security.Principal; using System.Diagnostics.CodeAnalysis; using System.Runtime.Serialization; using Microsoft.PowerShell; #if CORECLR // Use stub for SerializableAttribute, SerializationInfo and ISerializable related types. using Microsoft.PowerShell.CoreClr.Stubs; #endif namespace System.Management.Automation.Remoting { /// <summary> /// This class is used in the server side remoting scenarios. This class /// holds information about the incoming connection like: /// (a) Client's TimeZone /// (b) Connecting User information /// (c) Connection String used by the user to connect to the server. /// </summary> [Serializable] public sealed class PSSenderInfo : ISerializable { #region Private Data private PSPrimitiveDictionary _applicationArguments; #endregion #region Serialization /// <summary> /// Serialization /// </summary> /// <param name="info"></param> /// <param name="context"></param> public void GetObjectData(SerializationInfo info, StreamingContext context) { PSObject psObject = PSObject.AsPSObject(this); psObject.GetObjectData(info, context); } /// <summary> /// Deserialization constructor /// </summary> /// <param name="info"></param> /// <param name="context"></param> private PSSenderInfo(SerializationInfo info, StreamingContext context) { if (info == null) { return; } string serializedData = null; try { serializedData = info.GetValue("CliXml", typeof(string)) as string; } catch (Exception) { // When a workflow is run locally, there won't be PSSenderInfo return; } if (serializedData == null) { return; } try { PSObject result = PSObject.AsPSObject(PSSerializer.Deserialize(serializedData)); PSSenderInfo senderInfo = DeserializingTypeConverter.RehydratePSSenderInfo(result); UserInfo = senderInfo.UserInfo; ConnectionString = senderInfo.ConnectionString; _applicationArguments = senderInfo._applicationArguments; #if !CORECLR // TimeZone Not In CoreCLR this.clientTimeZone = senderInfo.ClientTimeZone; #endif } catch (Exception) { // Ignore conversion errors return; } } #endregion #region Public Constructors /// <summary> /// Constructs PSPrincipal using PSIdentity and a token (used to construct WindowsIdentity) /// </summary> /// <param name="userPrincipal"> /// Connecting User Information /// </param> /// <param name="httpUrl"> /// httpUrl element (from WSMAN_SENDER_DETAILS struct). /// </param> [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings", MessageId = "1#")] public PSSenderInfo(PSPrincipal userPrincipal, string httpUrl) { UserInfo = userPrincipal; ConnectionString = httpUrl; } #endregion #region Properties /// <summary> /// Contains information related to the user connecting to the server /// </summary> public PSPrincipal UserInfo { get; // No public set because PSSenderInfo/PSPrincipal is used by PSSessionConfiguration's // and usually they dont cache this data internally..so did not want to give // cmdlets/scripts a chance to modify these. } #if !CORECLR // TimeZone Not In CoreCLR /// <summary> /// Contains the TimeZone information from the client machine. /// </summary> public TimeZone ClientTimeZone { get { return clientTimeZone; } internal set { clientTimeZone = value; } } private TimeZone clientTimeZone; #endif /// <summary> /// Connection string used by the client to connect to the server. This is /// directly taken from WSMAN_SENDER_DETAILS struct (from wsman.h) /// </summary> public string ConnectionString { get; // No public set because PSSenderInfo/PSPrincipal is used by PSSessionConfiguration's // and usually they dont cache this data internally..so did not want to give // cmdlets/scripts a chance to modify these. } /// <summary> /// Application arguments (i.e. specified in New-PSSessionOptions -ApplicationArguments) /// </summary> public PSPrimitiveDictionary ApplicationArguments { get { return _applicationArguments; } internal set { _applicationArguments = value; } } #endregion } /// <summary> /// Defines the basic functionality of a PSPrincipal object. /// </summary> public sealed class PSPrincipal : IPrincipal { #region Private Data #endregion /// <summary> /// Gets the identity of the current user principal. /// </summary> public PSIdentity Identity { get; // No public set because PSSenderInfo/PSPrincipal is used by PSSessionConfiguration's // and usually they dont cache this data internally..so did not want to give // cmdlets/scripts a chance to modify these. } /// <summary> /// Gets the WindowsIdentity (if possible) representation of the current Identity. /// PSPrincipal can represent any user for example a LiveID user, network user within /// a domain etc. This property tries to convert the Identity to WindowsIdentity /// using the user token supplied. /// </summary> public WindowsIdentity WindowsIdentity { get; // No public set because PSSenderInfo/PSPrincipal is used by PSSessionConfiguration's // and usually they dont cache this data internally..so did not want to give // cmdlets/scripts a chance to modify these. } /// <summary> /// Gets the identity of the current principal. /// </summary> IIdentity IPrincipal.Identity { get { return this.Identity; } } /// <summary> /// Determines if the current principal belongs to a specified rule. /// If we were able to get a WindowsIdentity then this will perform the /// check using the WindowsIdentity otherwise this will return false. /// </summary> /// <param name="role"></param> /// <returns> /// If we were able to get a WindowsIdentity then this will perform the /// check using the WindowsIdentity otherwise this will return false. /// </returns> public bool IsInRole(string role) { if (null != WindowsIdentity) { // Get Windows Principal for this identity WindowsPrincipal windowsPrincipal = new WindowsPrincipal(WindowsIdentity); return windowsPrincipal.IsInRole(role); } else { return false; } } /// <summary> /// Internal overload of IsInRole() taking a WindowsBuiltInRole enum value /// </summary> internal bool IsInRole(WindowsBuiltInRole role) { if (null != WindowsIdentity) { // Get Windows Principal for this identity WindowsPrincipal windowsPrincipal = new WindowsPrincipal(WindowsIdentity); return windowsPrincipal.IsInRole(role); } else { return false; } } #region Constructor /// <summary> /// Constructs PSPrincipal using PSIdentity and a WindowsIdentity /// </summary> /// <param name="identity"> /// An instance of PSIdentity /// </param> /// <param name="windowsIdentity"> /// An instance of WindowsIdentity, if psIdentity represents a windows user. This can be /// null. /// </param> public PSPrincipal(PSIdentity identity, WindowsIdentity windowsIdentity) { Identity = identity; WindowsIdentity = windowsIdentity; } #endregion } /// <summary> /// Defines the basic functionality of a PSIdentity object. /// </summary> public sealed class PSIdentity : IIdentity { #region Private Data #endregion /// <summary> /// Gets the type of authentication used. /// For a WSMan service autheticated user this will be one of the following: /// WSMAN_DEFAULT_AUTHENTICATION /// WSMAN_NO_AUTHENTICATION /// WSMAN_AUTH_DIGEST /// WSMAN_AUTH_NEGOTIATE /// WSMAN_AUTH_BASIC /// WSMAN_AUTH_KERBEROS /// WSMAN_AUTH_CLIENT_CERTIFICATE /// WSMAN_AUTH_LIVEID /// </summary> public string AuthenticationType { get; } /// <summary> /// Gets a value that indicates whether the user has been authenticated. /// </summary> public bool IsAuthenticated { get; } /// <summary> /// Gets the name of the user. /// </summary> public string Name { get; } /// <summary> /// Gets the certificate details of the user if supported, null otherwise. /// </summary> public PSCertificateDetails CertificateDetails { get; } #region Public Constructor /// <summary> /// Constructor used to construt a PSIdentity object /// </summary> /// <param name="authType"> /// Type of authentication used to authenticate this user. /// For a WSMan service autheticated user this will be one of the following: /// WSMAN_DEFAULT_AUTHENTICATION /// WSMAN_NO_AUTHENTICATION /// WSMAN_AUTH_DIGEST /// WSMAN_AUTH_NEGOTIATE /// WSMAN_AUTH_BASIC /// WSMAN_AUTH_KERBEROS /// WSMAN_AUTH_CLIENT_CERTIFICATE /// WSMAN_AUTH_LIVEID /// </param> /// <param name="isAuthenticated"> /// true if this user is authenticated. /// </param> /// <param name="userName"> /// Name of the user /// </param> /// <param name="cert"> /// Certificate details if Certifiate authentication is used. /// </param> public PSIdentity(string authType, bool isAuthenticated, string userName, PSCertificateDetails cert) { AuthenticationType = authType; IsAuthenticated = isAuthenticated; Name = userName; CertificateDetails = cert; } #endregion } /// <summary> /// Represents the certificate of a user. /// </summary> public sealed class PSCertificateDetails { #region Private Data #endregion /// <summary> /// Gets Subject of the certificate. /// </summary> public string Subject { get; } /// <summary> /// Gets the issuer name of the certificate. /// </summary> public string IssuerName { get; } /// <summary> /// Gets the issuer thumb print. /// </summary> public string IssuerThumbprint { get; } #region Constructor /// <summary> /// Constructor used to construt a PSCertificateDetails object /// </summary> /// <param name="subject"> /// Subject of the certificate. /// </param> /// <param name="issuerName"> /// Issuer name of the certificate. /// </param> /// <param name="issuerThumbprint"> /// Issuer thumb print of the certificate. /// </param> public PSCertificateDetails(string subject, string issuerName, string issuerThumbprint) { Subject = subject; IssuerName = issuerName; IssuerThumbprint = issuerThumbprint; } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace OCR.Web.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
<<<<<<< HEAD using System.IO; // Well, here we are! The other main file! using System.Collections.Generic; using System.Text.RegularExpressions; using Buffer=System.Text.StringBuilder; using DateTime=System.DateTime; using Type=System.Type; using YamlDotNet.Serialization; using UnityEngine; public static class yml { public static Dictionary<string,Message> messages = new Dictionary<string,Message>(); static Deserializer deserializer = new Deserializer(); /** `yml` : **`constructor`** * * Instantiates a `Deserializer`, registers tags, & * reads data from the specified files. While the * usage of `static`s *and* `constructor`s aren't * kosher in `Unity`, but in this case, it's ok, as * this has nothing to do with the `MonoBehaviour` * loading / instantiation process. **/ static yml() { string pre = "tag:yaml.org,2002:", ext = ".yml", dir = #if UNITY_EDITOR Directory.GetCurrentDirectory() +"/Assets/Resources/"; #else Application.dataPath+"/Resources/"; #endif // mapping of all the tags to their types var tags = new Dictionary<string,Type> { { "regex", typeof(Regex) }, { "date", typeof(DateTime) }, { "message", typeof(Message) }}; foreach (var tag in tags) deserializer.RegisterTagMapping( pre+tag.Key, tag.Value); var files = new[] { "messages" }; foreach (var file in files) foreach (var kvp in deserializer.Deserialize<Dictionary<string,Message>>( GetReader(Path.Combine(dir,file)+ext))) messages[kvp.Key] = kvp.Value; } /** `GetReader()` : **`StringReader`** * * Gets the `*.yml` file in the main directory only * if it exists and has the proper extension. * * - `throw` : **`Exception`** * if the file does not exist **/ static StringReader GetReader(string file) { if (!File.Exists(file)) throw new System.Exception("404"); //$"YAML 404: {file}"); var buffer = new Buffer(); foreach (var line in File.ReadAllLines(file)) buffer.AppendLine(line); return new StringReader(buffer.ToString()); } /** `Deserialize()` : **`function`** * * Called without type arguments, this will simply * deserialize into the `data` object. This is used * only by the `static` constructor to get data out * of the rest of the files (skipping the few files * which are specified above). * * - `file` : **`string`** * filename to look for * * - `throw` : **`IOException`** **/ //static void Deserialize(string file) { // foreach (var kvp in deserializer.Deserialize<Dictionary<string,object>>(GetReader(file))) messages[kvp.Key] = kvp.Value; } /** `Deserialize<T>()` : **`<T>`** * * Returns an object of type `<T>` from the * dictionary if it exists. * * - `<T>` : **`Type`** * type to look for, and then to cast to, when * deserializing the data from the file. * * - `s` : **`string`** * key to look for * * - `throw` : **`Exception`** * There is no key at `data[s]`, or some other * problem occurs when attempting to cast the * object to `<T>`. **/ public static Message DeserializeMessage(string s) { Message o; if (!messages.TryGetValue(s,out o)) throw new System.Exception("badcast"); //$"Bad cast: {typeof(T)} as {s}"); return o; } /** `md()` : **`string`** * * Adds support for `Markdown`, and can be called on * any `string`. Formats the `Markdown` syntax into * `HTML`. Currently removes all `<p>` tags. * * - `s` : **`string`** * `string` to be formatted. **/ public static string md(this string s) { return new Buffer(Markdown.Transform(s)) .Replace("<em>","<i>") .Replace("</em>","</i>") .Replace("<blockquote>","<i>") .Replace("</blockquote>","</i>") .Replace("<strong>","<b>") .Replace("</strong>","</b>") .Replace("<h1>","<size=48><color=#98C8FC>") .Replace("</h1>","</color></size>") .Replace("<h2>","<size={36><color=#EEEEE>") .Replace("</h2>","</color></size>") .Replace("<h3>","<size=24><color=#DDDDDD>") .Replace("</h3>","</color></size>") .Replace("<pre>").Replace("</pre>") .Replace("<code>").Replace("</code>") .Replace("<ul>").Replace("</ul>") .Replace("<li>").Replace("</li>") .Replace("<p>").Replace("</p>") /* custom tags */ .Replace("<help>","<color=#9CDF91>") .Replace("</help>","</color>") .Replace("<cmd>", "<color=#BBBBBB>") .Replace("</cmd>","</color>") .Replace("<warn>","<color=#FA2363>") .Replace("</warn>","</color>") .Replace("<cost>","<color=#FFDBBB>") .Replace("</cost>","</color>") .ToString(); } /** `Replace()` : **`string`** * * Adds an overload to the existing `Replace()` that * takes a single argument, for removing things instead * of replacing them. * * - `s` : **`string`** * `string` to be formatted. * * - `newValue` : **`string`** * replacement `string` to insert. **/ public static string Replace( this string s, string newValue) { return s.Replace(newValue,""); } public static Buffer Replace( this Buffer sb, string s) { return sb.Replace(s,""); } } ======= using System.IO; // Well, here we are! The other main file! using System.Collections.Generic; using System.Text.RegularExpressions; using Buffer=System.Text.StringBuilder; using DateTime=System.DateTime; using Type=System.Type; using YamlDotNet.Serialization; using UnityEngine; public static class yml { public static Dictionary<string,Message> messages = new Dictionary<string,Message>(); static Deserializer deserializer = new Deserializer(); /** `yml` : **`constructor`** * * Instantiates a `Deserializer`, registers tags, & * reads data from the specified files. While the * usage of `static`s *and* `constructor`s aren't * kosher in `Unity`, but in this case, it's ok, as * this has nothing to do with the `MonoBehaviour` * loading / instantiation process. **/ static yml() { string pre = "tag:yaml.org,2002:", ext = ".yml", dir = #if UNITY_EDITOR Directory.GetCurrentDirectory() +"/Assets/Resources/"; #else Application.dataPath+"/Resources/"; #endif // mapping of all the tags to their types var tags = new Dictionary<string,Type> { { "regex", typeof(Regex) }, { "date", typeof(DateTime) }, { "message", typeof(Message) }}; foreach (var tag in tags) deserializer.RegisterTagMapping( pre+tag.Key, tag.Value); var files = new[] { "messages" }; foreach (var file in files) foreach (var kvp in deserializer.Deserialize<Dictionary<string,Message>>( GetReader(Path.Combine(dir,file)+ext))) messages[kvp.Key] = kvp.Value; } /** `GetReader()` : **`StringReader`** * * Gets the `*.yml` file in the main directory only * if it exists and has the proper extension. * * - `throw` : **`Exception`** * if the file does not exist **/ static StringReader GetReader(string file) { if (!File.Exists(file)) throw new System.Exception("404"); //$"YAML 404: {file}"); var buffer = new Buffer(); foreach (var line in File.ReadAllLines(file)) buffer.AppendLine(line); return new StringReader(buffer.ToString()); } /** `Deserialize()` : **`function`** * * Called without type arguments, this will simply * deserialize into the `data` object. This is used * only by the `static` constructor to get data out * of the rest of the files (skipping the few files * which are specified above). * * - `file` : **`string`** * filename to look for * * - `throw` : **`IOException`** **/ //static void Deserialize(string file) { // foreach (var kvp in deserializer.Deserialize<Dictionary<string,object>>(GetReader(file))) messages[kvp.Key] = kvp.Value; } /** `Deserialize<T>()` : **`<T>`** * * Returns an object of type `<T>` from the * dictionary if it exists. * * - `<T>` : **`Type`** * type to look for, and then to cast to, when * deserializing the data from the file. * * - `s` : **`string`** * key to look for * * - `throw` : **`Exception`** * There is no key at `data[s]`, or some other * problem occurs when attempting to cast the * object to `<T>`. **/ public static Message DeserializeMessage(string s) { Message o; if (!messages.TryGetValue(s,out o)) throw new System.Exception("badcast"); //$"Bad cast: {typeof(T)} as {s}"); return o; } /** `md()` : **`string`** * * Adds support for `Markdown`, and can be called on * any `string`. Formats the `Markdown` syntax into * `HTML`. Currently removes all `<p>` tags. * * - `s` : **`string`** * `string` to be formatted. **/ public static string md(this string s) { return new Buffer(Markdown.Transform(s)) .Replace("<em>","<i>") .Replace("</em>","</i>") .Replace("<blockquote>","<i>") .Replace("</blockquote>","</i>") .Replace("<strong>","<b>") .Replace("</strong>","</b>") .Replace("<h1>","<size=48><color=#98C8FC>") .Replace("</h1>","</color></size>") .Replace("<h2>","<size={36><color=#EEEEE>") .Replace("</h2>","</color></size>") .Replace("<h3>","<size=24><color=#DDDDDD>") .Replace("</h3>","</color></size>") .Replace("<pre>").Replace("</pre>") .Replace("<code>").Replace("</code>") .Replace("<ul>").Replace("</ul>") .Replace("<li>").Replace("</li>") .Replace("<p>").Replace("</p>") /* custom tags */ .Replace("<help>","<color=#9CDF91>") .Replace("</help>","</color>") .Replace("<cmd>", "<color=#BBBBBB>") .Replace("</cmd>","</color>") .Replace("<warn>","<color=#FA2363>") .Replace("</warn>","</color>") .Replace("<cost>","<color=#FFDBBB>") .Replace("</cost>","</color>") .Replace("<red>","<color=#f26a6a>") .Replace("</red>","</color>") .Replace("<orange>", "<color=#f6a72e>") .Replace("</orange>", "</color>") .Replace("<yellow>", "<color=#fff16b>") .Replace("</yellow>", "</color>") .Replace("<green>", "<color=#73d279>") .Replace("</green>", "</color>") .Replace("<blue>", "<color=#73aed2>") .Replace("</blue>", "</color>") .Replace("<indigo>", "<color=#b695ea") .Replace("</indigo>", "</color>") .Replace("<violet>", "<color=#d495ea>") .Replace("</violet>", "</color>") .ToString(); } /** `Replace()` : **`string`** * * Adds an overload to the existing `Replace()` that * takes a single argument, for removing things instead * of replacing them. * * - `s` : **`string`** * `string` to be formatted. * * - `newValue` : **`string`** * replacement `string` to insert. **/ public static string Replace( this string s, string newValue) { return s.Replace(newValue,""); } public static Buffer Replace( this Buffer sb, string s) { return sb.Replace(s,""); } } >>>>>>> ed9394143197b28cd8088ee63d8ec31daa566dff
using System; namespace SharpVectors.Dom.Svg { /// <summary> /// This is an implementation of the 3-by-3 affine matrix that represents /// a geometric transform. /// </summary> public class SvgTransformF : ICloneable { #region Private Fields private float m11; private float m12; private float m21; private float m22; private float dx; private float dy; #endregion #region Constructors and Destructor /// <overloads> /// Initializes a new instance of the <see cref="SvgTransformF"/> class. /// </overloads> /// <summary> /// Initializes a new instance of the <see cref="SvgTransformF"/> class /// as the identity transform or matrix. /// </summary> public SvgTransformF() { m11 = 1.0f; m12 = 0.0f; m21 = 0.0f; m22 = 1.0f; dx = 0.0f; dy = 0.0f; } /// <summary> /// Initializes a new instance of the <see cref="SvgTransformF"/> class /// to the geometric transform defined by the specified rectangle and /// array of points. /// </summary> /// <param name="rect"> /// A <see cref="SvgRectF"/> structure that represents the rectangle /// to be transformed. /// </param> /// <param name="plgpts"> /// An array of three <see cref="SvgPointF"/> structures that represents the /// points of a parallelogram to which the upper-left, upper-right, and /// lower-left corners of the rectangle is to be transformed. The /// lower-right corner of the parallelogram is implied by the first three /// corners. /// </param> /// <exception cref="ArgumentNullException"> /// If the <paramref name="plgpts"/> is <see langword="null"/>. /// </exception> /// <exception cref="ArgumentException"> /// If the length of the <paramref name="plgpts"/> array is not equal /// to 3. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// If the width or height of the <paramref name="rect"/> is zero. /// </exception> public SvgTransformF(SvgRectF rect, SvgPointF[] plgpts) { if (plgpts == null) { throw new ArgumentNullException("plgpts"); } if (plgpts.Length != 3) { throw new ArgumentException("plgpts"); } if ((rect.Width == 0) || (rect.Height == 0)) { throw new ArgumentOutOfRangeException("rect"); } MapRectToRect(rect, plgpts); } /// <summary> /// Initializes a new instance of the <see cref="SvgTransformF"/> class /// with the specified elements. /// </summary> /// <param name="elements"> /// An array of six items defining the transform. /// </param> /// <exception cref="ArgumentNullException"> /// If the <paramref name="elements"/> is <see langword="null"/>. /// </exception> /// <exception cref="ArgumentException"> /// If the length of the <paramref name="elements"/> array is not equal /// to 6. /// </exception> public SvgTransformF(float[] elements) { if (elements == null) { throw new ArgumentNullException("elements"); } if (elements.Length != 6) { throw new ArgumentException("elements"); } this.m11 = elements[0]; this.m12 = elements[1]; this.m21 = elements[2]; this.m22 = elements[3]; this.dx = elements[4]; this.dy = elements[5]; } /// <summary> /// Initializes a new instance of the <see cref="SvgTransformF"/> class /// with the specified elements. /// </summary> /// <param name="m11"> /// The value in the first row and first column of the new <see cref="SvgTransformF"/>. /// </param> /// <param name="m12"> /// The value in the first row and second column of the new <see cref="SvgTransformF"/>. /// </param> /// <param name="m21"> /// The value in the second row and first column of the new <see cref="SvgTransformF"/>. /// </param> /// <param name="m22"> /// The value in the second row and second column of the new <see cref="SvgTransformF"/>. /// </param> /// <param name="dx"> /// The value in the third row and first column of the new <see cref="SvgTransformF"/>. /// </param> /// <param name="dy"> /// The value in the third row and second column of the new <see cref="SvgTransformF"/>. /// </param> public SvgTransformF(float m11, float m12, float m21, float m22, float dx, float dy) { this.m11 = m11; this.m12 = m12; this.m21 = m21; this.m22 = m22; this.dx = dx; this.dy = dy; } /// <summary> /// Initializes a new instance of the <see cref="SvgTransformF"/> class /// with parameters copied from the specified parameter, a copy /// constructor. /// </summary> /// <param name="source"> /// The <see cref="SvgTransformF"/> instance from which the parameters /// are to be copied. /// </param> /// <exception cref="ArgumentNullException"> /// If the <paramref name="source"/> is <see langword="null"/>. /// </exception> public SvgTransformF(SvgTransformF source) { if (source == null) { throw new ArgumentNullException("source"); } this.m11 = source.m11; this.m12 = source.m12; this.m21 = source.m21; this.m22 = source.m22; this.dx = source.dx; this.dy = source.dy; } #endregion #region Public Properties /// <summary> /// Gets an array of floating-point values that represents the elements /// of this <see cref="SvgTransformF"/>. /// </summary> /// <value> /// An array of floating-point values that represents the elements /// of this <see cref="SvgTransformF"/>. /// </value> public float[] Elements { get { return new float[] {m11, m12, m21, m22, dx, dy}; } } /// <summary> /// Gets a value indicating whether this <see cref="SvgTransformF"/> is the /// identity matrix. /// </summary> /// <value> /// This property is <see langword="true"/> if this /// <see cref="SvgTransformF"/> is identity; otherwise, <see langword="false"/>. /// </value> public bool IsIdentity { get { return (m11 == 1.0f && m12 == 0.0f && m21 == 0.0f && m22 == 1.0f && dx == 0.0f && dy == 0.0f); } } /// <summary> /// Gets a value indicating whether this <see cref="SvgTransformF"/> is /// invertible. /// </summary> /// <value> /// This property is <see langword="true"/> if this /// <see cref="SvgTransformF"/> is invertible; otherwise, <see langword="false"/>. /// </value> public bool IsInvertible { get { return ((m11 * m22 - m21 * m11) != 0.0f); } } /// <summary> /// Gets the <c>x</c> translation value (the <c>dx</c> value, or the /// element in the third row and first column) of this <see cref="SvgTransformF"/>. /// </summary> /// <value> /// The <c>x</c> translation value of this <see cref="SvgTransformF"/>. /// </value> public float OffsetX { get { return dx; } } /// <summary> /// Gets the <c>y</c> translation value (the <c>dy</c> value, or the /// element in the third row and second column) of this <see cref="SvgTransformF"/>. /// </summary> /// <value> /// The <c>y</c> translation value of this <see cref="SvgTransformF"/>. /// </value> public float OffsetY { get { return dy; } } #endregion #region Public Methods /// <summary> /// Determine whether the specified object is a <see cref="SvgTransformF"/> /// and is identical to this <see cref="SvgTransformF"/>. /// </summary> /// <param name="obj">The object to test.</param> /// <returns> /// This method returns <see langword="true"/> if obj is the specified /// <see cref="SvgTransformF"/> identical to this /// <see cref="SvgTransformF"/>; otherwise, <see langword="false"/>. /// </returns> public override bool Equals(Object obj) { SvgTransformF other = (obj as SvgTransformF); if (other != null) { return (other.m11 == m11 && other.m12 == m12 && other.m21 == m21 && other.m22 == m22 && other.dx == dx && other.dy == dy); } return false; } /// <summary> /// Returns a hash code. /// </summary> /// <returns>The hash code for this <see cref="SvgTransformF"/>.</returns> public override int GetHashCode() { return (int)(m11 + m12 + m21 + m22 + dx + dy); } /// <summary> /// Inverts this <see cref="SvgTransformF"/>, if it is invertible. /// </summary> public void Invert() { float determinant = this.m11 * this.m22 - this.m21 * this.m11; if (determinant != 0.0f) { float nm11 = this.m22 / determinant; float nm12 = -(this.m12 / determinant); float nm21 = -(this.m21 / determinant); float nm22 = this.m11 / determinant; float ndx = (this.m12 * this.dy - this.m22 * this.dx) / determinant; float ndy = (this.m21 * this.dx - this.m11 * this.dy) / determinant; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; this.dx = ndx; this.dy = ndy; } } /// <overloads> /// Multiplies this <see cref="SvgTransformF"/> by the specified /// <see cref="SvgTransformF"/> by appending or prepending the specified /// <see cref="SvgTransformF"/>. /// </overloads> /// <summary> /// Multiplies this <see cref="SvgTransformF"/> by the specified /// <see cref="SvgTransformF"/> by prepending the specified /// <see cref="SvgTransformF"/>. /// </summary> /// <param name="matrix"> /// The <see cref="SvgTransformF"/> by which this <see cref="SvgTransformF"/> /// is to be multiplied. /// </param> public void Multiply(SvgTransformF matrix) { if (matrix == null) { throw new ArgumentNullException("matrix"); } Multiply((SvgTransformF)matrix, this); } /// <summary> /// Multiplies this <see cref="SvgTransformF"/> by the matrix specified in /// the matrix parameter, and in the order specified in the order parameter. /// </summary> /// <param name="matrix"> /// The <see cref="SvgTransformF"/> by which this <see cref="SvgTransformF"/> /// is to be multiplied. /// </param> /// <param name="order"> /// The <see cref="TransformOrder"/> that represents the order of the /// multiplication. /// </param> public void Multiply(SvgTransformF matrix, SvgTransformOrder order) { if (matrix == null) { throw new ArgumentNullException("matrix"); } if (order == SvgTransformOrder.Prepend) { Multiply((SvgTransformF)matrix, this); } else { Multiply(this, (SvgTransformF)matrix); } } /// <summary> /// Multiplies this <see cref="SvgTransformF"/> by the specified /// <see cref="SvgTransformF"/> by prepending the specified /// <see cref="SvgTransformF"/>. /// </summary> /// <param name="matrix"> /// The <see cref="SvgTransformF"/> by which this <see cref="SvgTransformF"/> /// is to be multiplied. /// </param> //public void Multiply(SvgTransformF matrix) //{ // if (matrix == null) // { // throw new ArgumentNullException("matrix"); // } // Multiply(matrix, this); //} /// <summary> /// Multiplies this <see cref="SvgTransformF"/> by the matrix specified in /// the matrix parameter, and in the order specified in the order parameter. /// </summary> /// <param name="matrix"> /// The <see cref="SvgTransformF"/> by which this <see cref="SvgTransformF"/> /// is to be multiplied. /// </param> /// <param name="order"> /// The <see cref="TransformOrder"/> that represents the order of the /// multiplication. /// </param> //public void Multiply(SvgTransformF matrix, TransformOrder order) //{ // if (matrix == null) // { // throw new ArgumentNullException("matrix"); // } // if (order == TransformOrder.Prepend) // { // Multiply(matrix, this); // } // else // { // Multiply(this, matrix); // } //} /// <summary> /// Resets this <see cref="SvgTransformF"/> to have the elements of the /// identity matrix. /// </summary> public void Reset() { m11 = 1.0f; m12 = 0.0f; m21 = 0.0f; m22 = 0.1f; dx = 0.0f; dy = 0.0f; } /// <overloads> /// Applies a clockwise rotation of the specified angle about the /// origin to this <see cref="SvgTransformF"/>. /// </overloads> /// <summary> /// Applies a clockwise rotation of the specified angle about the /// origin to this <see cref="SvgTransformF"/>. /// </summary> /// <param name="angle"> /// The angle (extent) of the rotation, in degrees. /// </param> public void Rotate(float angle) { double radians = (angle * (Math.PI / 180.0)); float cos = (float)(Math.Cos(radians)); float sin = (float)(Math.Sin(radians)); float nm11 = cos * this.m11 + sin * this.m21; float nm12 = cos * this.m12 + sin * this.m22; float nm21 = cos * this.m21 - sin * this.m11; float nm22 = cos * this.m22 - sin * this.m12; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; } /// <summary> /// Applies a clockwise rotation of the specified angle about the /// origin to this <see cref="SvgTransformF"/>, and in the order specified /// in the order parameter. /// </summary> /// <param name="angle"> /// The angle (extent) of the rotation, in degrees. /// </param> /// <param name="order"> /// A <see cref="TransformOrder"/> that specifies the order (append or /// prepend) in which the rotation is applied to this /// <see cref="SvgTransformF"/>. /// </param> public void Rotate(float angle, SvgTransformOrder order) { double radians = (angle * (Math.PI / 180.0)); float cos = (float)(Math.Cos(radians)); float sin = (float)(Math.Sin(radians)); if (order == SvgTransformOrder.Prepend) { float nm11 = cos * this.m11 + sin * this.m21; float nm12 = cos * this.m12 + sin * this.m22; float nm21 = cos * this.m21 - sin * this.m11; float nm22 = cos * this.m22 - sin * this.m12; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; } else { float nm11 = this.m11 * cos - this.m12 * sin; float nm12 = this.m11 * sin + this.m12 * cos; float nm21 = this.m21 * cos - this.m22 * sin; float nm22 = this.m21 * sin + this.m22 * cos; float ndx = this.dx * cos - this.dy * sin; float ndy = this.dx * sin + this.dy * cos; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; this.dx = ndx; this.dy = ndy; } } /// <overloads> /// Applies a clockwise rotation about the specified point to this /// <see cref="SvgTransformF"/> by appending or prepending the rotation. /// </overloads> /// <summary> /// Applies a clockwise rotation about the specified point to this /// <see cref="SvgTransformF"/> by prepending the rotation. /// </summary> /// <param name="angle"> /// The angle (extent) of the rotation, in degrees. /// </param> /// <param name="point"> /// A <see cref="SvgPointF"/> that represents the center of the rotation. /// </param> public void RotateAt(float angle, SvgPointF point) { Translate(point.X, point.Y); Rotate(angle); Translate(-point.X, -point.Y); } /// <summary> /// Applies a clockwise rotation about the specified point to this /// <see cref="SvgTransformF"/> in the specified order. /// </summary> /// <param name="angle"> /// The angle (extent) of the rotation, in degrees. /// </param> /// <param name="point"> /// A <see cref="SvgPointF"/> that represents the center of the rotation. /// </param> /// <param name="order"> /// A <see cref="TransformOrder"/> that specifies the order (append or /// prepend) in which the rotation is applied. /// </param> public void RotateAt(float angle, SvgPointF point, SvgTransformOrder order) { if (order == SvgTransformOrder.Prepend) { Translate(point.X, point.Y); Rotate(angle); Translate(-point.X, -point.Y); } else { Translate(-point.X, -point.Y); Rotate(angle, SvgTransformOrder.Append); Translate(point.X, point.Y); } } /// <overloads> /// Applies the specified scale vector to this <see cref="SvgTransformF"/> /// by appending or prepending the scale vector. /// </overloads> /// <summary> /// Applies the specified scale vector to this <see cref="SvgTransformF"/> /// by prepending the scale vector. /// </summary> /// <param name="scaleX"> /// The value by which to scale this <see cref="SvgTransformF"/> in the /// x-axis direction. /// </param> /// <param name="scaleY"> /// The value by which to scale this <see cref="SvgTransformF"/> in the /// y-axis direction. /// </param> public void Scale(float scaleX, float scaleY) { m11 *= scaleX; m12 *= scaleX; m21 *= scaleY; m22 *= scaleY; } /// <summary> /// Applies the specified scale vector to this <see cref="SvgTransformF"/> /// using the specified order. /// </summary> /// <param name="scaleX"> /// The value by which to scale this <see cref="SvgTransformF"/> in the /// x-axis direction. /// </param> /// <param name="scaleY"> /// The value by which to scale this <see cref="SvgTransformF"/> in the /// y-axis direction. /// </param> /// <param name="order"> /// A <see cref="TransformOrder"/> that specifies the order (append or /// prepend) in which the scale vector is applied to this /// <see cref="SvgTransformF"/>. /// </param> public void Scale(float scaleX, float scaleY, SvgTransformOrder order) { if (order == SvgTransformOrder.Prepend) { m11 *= scaleX; m12 *= scaleX; m21 *= scaleY; m22 *= scaleY; } else { m11 *= scaleX; m12 *= scaleY; m21 *= scaleX; m22 *= scaleY; dx *= scaleX; dy *= scaleY; } } /// <overloads> /// Applies the specified shear vector to this <see cref="SvgTransformF"/> by /// appending or prepending the shear vector. /// </overloads> /// <summary> /// Applies the specified shear vector to this <see cref="SvgTransformF"/> by /// prepending the shear vector. /// </summary> /// <param name="shearX"> /// The horizontal shear factor. /// </param> /// <param name="shearY"> /// The vertical shear factor. /// </param> public void Shear(float shearX, float shearY) { float nm11 = this.m11 + this.m21 * shearY; float nm12 = this.m12 + this.m22 * shearY; float nm21 = this.m11 * shearX + this.m21; float nm22 = this.m12 * shearX + this.m22; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; } /// <summary> /// Applies the specified shear vector to this <see cref="SvgTransformF"/> in /// the specified order. /// </summary> /// <param name="shearX"> /// The horizontal shear factor. /// </param> /// <param name="shearY"> /// The vertical shear factor. /// </param> /// <param name="order"> /// A <see cref="TransformOrder"/> that specifies the order (append or /// prepend) in which the shear is applied. /// </param> public void Shear(float shearX, float shearY, SvgTransformOrder order) { if (order == SvgTransformOrder.Prepend) { float nm11 = this.m11 + this.m21 * shearY; float nm12 = this.m12 + this.m22 * shearY; float nm21 = this.m11 * shearX + this.m21; float nm22 = this.m12 * shearX + this.m22; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; } else { float nm11 = this.m11 + this.m12 * shearX; float nm12 = this.m11 * shearY + this.m12; float nm21 = this.m21 + this.m22 * shearX; float nm22 = this.m21 * shearY + this.m22; float ndx = this.dx + this.dy * shearX; float ndy = this.dx * shearY + this.dy; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; this.dx = ndx; this.dy = ndy; } } /// <overloads> /// Applies the specified translation vector to this <see cref="SvgTransformF"/> /// by appending or prepending the translation vector. /// </overloads> /// <summary> /// Applies the specified translation vector to this <see cref="SvgTransformF"/> /// by prepending the translation vector. /// </summary> /// <param name="offsetX"> /// The <c>x</c> value by which to translate this <see cref="SvgTransformF"/>. /// </param> /// <param name="offsetY"> /// The <c>y</c> value by which to translate this <see cref="SvgTransformF"/>. /// </param> public void Translate(float offsetX, float offsetY) { dx += offsetX * m11 + offsetY * m21; dy += offsetX * m12 + offsetY * m22; } /// <summary> /// Applies the specified translation vector to this <see cref="SvgTransformF"/> /// in the specified order. /// </summary> /// <param name="offsetX"> /// The <c>x</c> value by which to translate this <see cref="SvgTransformF"/>. /// </param> /// <param name="offsetY"> /// The <c>y</c> value by which to translate this <see cref="SvgTransformF"/>. /// </param> /// <param name="order"> /// A <see cref="TransformOrder"/> that specifies the order (append or /// prepend) in which the translation is applied to this <see cref="SvgTransformF"/>. /// </param> public void Translate(float offsetX, float offsetY, SvgTransformOrder order) { if (order == SvgTransformOrder.Prepend) { dx += offsetX * m11 + offsetY * m21; dy += offsetX * m12 + offsetY * m22; } else { dx += offsetX; dy += offsetY; } } /// <summary> /// Applies the geometric transform represented by this /// <see cref="SvgTransformF"/> to a specified point. /// </summary> /// <param name="x">The input <c>x</c> value of the point.</param> /// <param name="y">The input <c>y</c> value of the point.</param> /// <param name="ox">The transformed <c>x</c> value of the point.</param> /// <param name="oy">The transformed <c>y</c> value of the point.</param> public void Transform(float x, float y, out float ox, out float oy) { ox = x * m11 + y * m21 + dx; oy = x * m12 + y * m22 + dy; } /// <summary> /// Applies the reverse geometric transform represented by this /// <see cref="SvgTransformF"/> to a specified point. /// </summary> /// <param name="x">The input <c>x</c> value of the point.</param> /// <param name="y">The input <c>y</c> value of the point.</param> /// <param name="ox">The transformed <c>x</c> value of the point.</param> /// <param name="oy">The transformed <c>y</c> value of the point.</param> public void ReverseTransform(float x, float y, out float ox, out float oy) { float determinant = this.m11 * this.m22 - this.m21 * this.m11; if (determinant != 0.0f) { float nm11 = this.m22 / determinant; float nm12 = -(this.m12 / determinant); float nm21 = -(this.m21 / determinant); float nm22 = this.m11 / determinant; ox = x * nm11 + y * nm21; oy = x * nm12 + y * nm22; } else { ox = x; oy = y; } } /// <summary> /// Applies the geometric transform represented by this /// <see cref="SvgTransformF"/> to a specified array of points. /// </summary> /// <param name="pts"> /// An array of <see cref="SvgPointF"/> structures that represents the points /// to transform. /// </param> public void TransformPoints(SvgPointF[] pts) { if (pts == null) { throw new ArgumentNullException("pts"); } int nLength = pts.Length; for (int i = nLength - 1; i >= 0; --i) { float x = pts[i].X; float y = pts[i].Y; pts[i].ValueX = x * m11 + y * m21 + dx; pts[i].ValueY = x * m12 + y * m22 + dy; } } /// <summary> /// Multiplies each vector in an array by the matrix. The translation /// elements of this matrix (third row) are ignored. /// </summary> /// <param name="pts"> /// An array of <see cref="SvgPointF"/> structures that represents the points /// to transform. /// </param> public void TransformVectors(SvgPointF[] pts) { if (pts == null) { throw new ArgumentNullException("pts"); } int nLength = pts.Length; for (int i = nLength - 1; i >= 0; --i) { float x = pts[i].X; float y = pts[i].Y; pts[i].ValueX = x * m11 + y * m21; pts[i].ValueY = x * m12 + y * m22; } } #endregion #region Private Methods private void Multiply(SvgTransformF a, SvgTransformF b) { float nm11 = a.m11 * b.m11 + a.m12 * b.m21; float nm12 = a.m11 * b.m12 + a.m12 * b.m22; float nm21 = a.m21 * b.m11 + a.m22 * b.m21; float nm22 = a.m21 * b.m12 + a.m22 * b.m22; float ndx = a.dx * b.m11 + a.dy * b.m21 + b.dx; float ndy = a.dx * b.m12 + a.dy * b.m22 + b.dy; this.m11 = nm11; this.m12 = nm12; this.m21 = nm21; this.m22 = nm22; this.dx = ndx; this.dy = ndy; } private void MapRectToRect(SvgRectF rect, SvgPointF[] plgpts) { SvgPointF pt1 = new SvgPointF(plgpts[1].X - plgpts[0].X, plgpts[1].Y - plgpts[0].Y); SvgPointF pt2 = new SvgPointF(plgpts[2].X - plgpts[0].X, plgpts[2].Y - plgpts[0].Y); this.m11 = pt1.X / rect.Width; this.m12 = pt1.Y / rect.Width; this.m21 = pt2.X / rect.Height; this.m22 = pt2.Y / rect.Height; this.dx = plgpts[0].X - rect.X / rect.Width * pt1.X - rect.Y / rect.Height * pt2.X; this.dy = plgpts[0].Y - rect.X / rect.Width * pt1.Y - rect.Y / rect.Height * pt2.Y; } #endregion #region ICloneable Members /// <summary> /// This creates a new <see cref="SvgTransformF"/> that is a deep /// copy of the current instance. /// </summary> /// <returns>A new object that is a copy of this instance.</returns> public SvgTransformF Clone() { return new SvgTransformF(this.m11, this.m12, this.m21, this.m22, this.dx, this.dy); } /// <summary> /// This creates a new <see cref="SvgTransformF"/> that is a deep /// copy of the current instance. /// </summary> /// <returns>A new object that is a copy of this instance.</returns> //ITransform ITransform.Clone() //{ // return this.Clone(); //} /// <summary> /// This creates a new <see cref="SvgTransformF"/> that is a deep /// copy of the current instance. /// </summary> /// <returns>A new object that is a copy of this instance.</returns> object ICloneable.Clone() { return this.Clone(); } #endregion } }
using System; using System.Collections; using System.Data; using System.Data.OleDb; using System.Drawing; using System.Drawing.Printing; using System.Reflection; using System.Text; using PCSComUtils.Common; using PCSUtils.Utils; using C1.Win.C1Preview; using C1.C1Report; using PCSUtils.Framework.ReportFrame; using C1PrintPreviewDialog = PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog; namespace PurchaseReportImportPartAsItem { public class PurchaseReportImportPartAsItem : MarshalByRefObject, IDynamicReport { #region IDynamicReport Members private string mConnectionString; /// <summary> /// ConnectionString, provide for the Dynamic Report /// ALlow Dynamic Report to access the DataBase of PCS /// </summary> public string PCSConnectionString { get { return mConnectionString; } set { mConnectionString = value; } } private ReportBuilder mReportBuilder; /// <summary> /// Report Builder Utility Object /// Dynamic Report can use this object to render, modify, layout the report /// </summary> public ReportBuilder PCSReportBuilder { get { return mReportBuilder; } set { mReportBuilder = value; } } private C1PrintPreviewControl mViewer; /// <summary> /// ReportViewer Object, provide for the DynamicReport, /// allow Dynamic Report to manipulate with the REportViewer, /// modify the report after rendered if needed /// </summary> public C1PrintPreviewControl PCSReportViewer { get { return mViewer; } set { mViewer = value; } } private object mResult; /// <summary> /// Store other result if any. Ussually we store return DataTable here to display on the ReportViewer Form's Grid /// </summary> public object Result { get { return mResult; } set { mResult = value; } } private bool mUseEngine; /// <summary> /// Notify PCS whether the rendering report process is run by /// this IDynamicReport /// or the ReportViewer Engine (in the ReportViewer form) /// </summary> public bool UseReportViewerRenderEngine { get { return mUseEngine; } set { mUseEngine = value; } } private string mReportFolder; /// <summary> /// Inform External Process where to find out the ReportLayout ( the PCS' ReportDefinition Folder Path ) /// </summary> public string ReportDefinitionFolder { get { return mReportFolder; } set { mReportFolder = value; } } private string mLayoutFile; /// <summary> /// Inform External Process about the Layout file /// in which PCS instruct to use /// (PCS will assign this property while ReportViewer Form execute, /// ReportVIewer form will use the layout file in the report config entry to put in this property) /// </summary> public string ReportLayoutFile { get { return mLayoutFile; } set { mLayoutFile = value; } } /// <summary> /// /// </summary> /// <param name="pstrMethod">name of the method to call (which declare in the DynamicReport C# file)</param> /// <param name="pobjParameters">Array of parameters provide to call the Method with method name = pstrMethod</param> /// <returns></returns> public object Invoke(string pstrMethod, object[] pobjParameters) { return this.GetType().InvokeMember(pstrMethod, BindingFlags.InvokeMethod, null, this, pobjParameters); } #endregion public DataTable ExecuteReport(string pstrCCNID, string pstrYear, string pstrMonth, string pstrMakerID, string pstrTarget, string pstrProductID, string pstrCurrencyID, string pstrExRate) { // start of month DateTime dtmStartOfMonth = new DateTime(Convert.ToInt32(pstrYear), Convert.ToInt32(pstrMonth), 1); // end of month DateTime dtmEndOfMonth = dtmStartOfMonth.AddMonths(1).AddDays(-1).AddHours(23).AddMinutes(59).AddSeconds(59); DataTable dtbData = GetInvoice(pstrCCNID, pstrMakerID, dtmStartOfMonth, dtmEndOfMonth, pstrProductID, pstrTarget, pstrExRate); #region report C1Report rptReport = new C1Report(); mLayoutFile = "PurchaseReportImportPartAsItem.xml"; rptReport.Load(mReportFolder + "\\" + mLayoutFile, rptReport.GetReportInfo(mReportFolder + "\\" + mLayoutFile)[0]); rptReport.Layout.PaperSize = PaperKind.A4; #region report parameter try { rptReport.Fields["fldCCN"].Text = GetCCN(pstrCCNID); } catch{} try { rptReport.Fields["fldSupplier"].Text = GetSupplier(pstrMakerID); } catch{} try { rptReport.Fields["fldMonth"].Text = dtmStartOfMonth.ToString("MMM-yyyy"); } catch{} try { rptReport.Fields["fldTarget"].Text = pstrTarget + "%"; } catch{} try { if (pstrProductID.Length > 0 && pstrProductID.Split(",".ToCharArray()).Length > 0) rptReport.Fields["fldPartNo"].Text = "Multi-Selection"; else if (pstrProductID.Length > 0) rptReport.Fields["fldPartNo"].Text = GetPartNo(pstrProductID); } catch{} try { rptReport.Fields["fldCurrency"].Text = GetCurrency(pstrCurrencyID); } catch{} try { rptReport.Fields["fldExRate"].Text = pstrExRate; } catch{} #endregion // set datasource object that provides data to report. rptReport.DataSource.Recordset = dtbData; // render report rptReport.Render(); // render the report into the PrintPreviewControl C1PrintPreviewDialog ppvViewer = new C1PrintPreviewDialog(); ppvViewer.FormTitle = "Purchase Report Import Part As Items"; ppvViewer.ReportViewer.Document = rptReport.Document; ppvViewer.Show(); #endregion return dtbData; } private DataTable GetInvoice(string pstrCCNID, string pstrMakerID, DateTime pdtmStartOfMonth, DateTime pdtmEndOfMonth, string pstrProductID, string pstrTarget, string pstrExRate) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = "SELECT SUM(CIPAmount * PO_InvoiceMaster.ExchangeRate)/? AS CIP, " + " SUM(InvoiceQuantity) AS Quantity, " + pstrTarget + " AS Target," + " SUM(InvoiceQuantity * PO_PurchaseOrderDetail.UnitPrice * PO_PurchaseOrderMaster.ExchangeRate)/? AS EXGO," + " PO_InvoiceDetail.ProductID, ITM_Product.Code AS PartNo," + " ITM_Product.Description AS PartName, ITM_Product.Revision AS Model" + " FROM PO_InvoiceDetail JOIN PO_InvoiceMaster" + " ON PO_InvoiceDetail.InvoiceMasterID = PO_InvoiceMaster.InvoiceMasterID" + " JOIN PO_PurchaseOrderMaster" + " ON PO_InvoiceDetail.PurchaseOrderMasterID = PO_PurchaseOrderMaster.PurchaseOrderMasterID" + " JOIN PO_PurchaseOrderDetail" + " ON PO_InvoiceDetail.PurchaseOrderDetailID = PO_PurchaseOrderDetail.PurchaseOrderDetailID" + " JOIN ITM_Product" + " ON PO_InvoiceDetail.ProductID = ITM_Product.ProductID" + " WHERE PO_InvoiceMaster.CCNID = " + pstrCCNID + " AND PO_PurchaseOrderMaster.MakerID = " + pstrMakerID + " AND PO_InvoiceMaster.PostDate >= ?" + " AND PO_InvoiceMaster.PostDate <= ?"; if (pstrProductID.Length > 0) strSql += " AND PO_InvoiceDetail.ProductID IN (" + pstrProductID + ")"; strSql += " GROUP BY PO_InvoiceDetail.ProductID, ITM_Product.Code, ITM_Product.Description, ITM_Product.Revision" + " ORDER BY ITM_Product.Code, ITM_Product.Description, ITM_Product.Revision"; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Parameters.Add(new OleDbParameter("FirstNum", OleDbType.Decimal)).Value = pstrExRate; ocmdPCS.Parameters.Add(new OleDbParameter("NextNum", OleDbType.Decimal)).Value = pstrExRate; ocmdPCS.Parameters.Add(new OleDbParameter("StartOfMonth", OleDbType.Date)).Value = pdtmStartOfMonth; ocmdPCS.Parameters.Add(new OleDbParameter("EndOfMonth", OleDbType.Date)).Value = pdtmEndOfMonth; ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetSupplier(string pstrMakerID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_Party WHERE PartyID = " + pstrMakerID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetCCN(string pstrCCNID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = "SELECT Code + ' (' + Description + ')' FROM MST_CCN WHERE CCNID = " + pstrCCNID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetPartNo(string pstrProductID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = "SELECT Code + ' (' + Description + ')' FROM ITM_Product WHERE ProductID = " + pstrProductID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } private string GetCurrency(string pstrCurrencyID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_Currency WHERE CurrencyID = " + pstrCurrencyID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); object objResult = ocmdPCS.ExecuteScalar(); return objResult.ToString(); } catch (Exception ex) { throw ex; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } } }
//------------------------------------------------------------------------------ // Symbooglix // // // Copyright 2014-2017 Daniel Liew // // This file is licensed under the MIT license. // See LICENSE.txt for details. //------------------------------------------------------------------------------ using Microsoft.Boogie; using NUnit.Framework; using System; using System.Linq; namespace TransformTests { [TestFixture()] public class GlobalDDE { [Test()] public void FuncUsedInImpl() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is live so it should not be removed function foo(x:int) returns(bool); procedure main() { assert foo(5); } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(1, FunctionCount(prog)); } [Test()] public void FuncUsedInRequires() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is live so it should not be removed function foo(x:int) returns(bool); procedure main() requires foo(5) == true; { return; } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(1, FunctionCount(prog)); } [Test()] public void FuncUsedInEnsures() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is live so it should not be removed function foo(x:int) returns(bool); procedure main() ensures foo(5) == true; // This doesn't make sense but we only care about uses { return; } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(1, FunctionCount(prog)); } [Test()] public void FuncNotUsed() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is dead so it should be removed function foo(x:int) returns(bool); procedure main() { assert true; } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(0, FunctionCount(prog)); } [Test()] public void RecursiveFuncUsed() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is alive and is recursive function foo(x:int) returns(int) { if x !=0 && x > 0 then foo(x-1) else 0 } procedure main() { assert foo(1) == 0; } " , "test.bpl"); Assert.AreEqual(1 , FunctionCount(prog)); Assert.AreEqual(1 , AxiomCount(prog)); // Implicit axiom on foo() RunGDDE(prog); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); // Implicit axiom on foo() } [Test()] public void InDirectFuncUseWithImplicitAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is not dead because it's used by bar function foo(x:int) returns(bool); // This gets converted to an axiom during parsing so it is considered to be used function bar() returns (bool) { foo(5) } // Baz is dead function baz() returns (bool) { true } procedure main() { assert bar(); } ", "test.bpl"); Assert.AreEqual(3, FunctionCount(prog)); Assert.AreEqual(2 , AxiomCount(prog)); // Implicit RunGDDE(prog); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(1 , AxiomCount(prog)); // Implicit } [Test()] public void InDirectFuncIsDeadUseWithImplicitAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is dead because it's used by bar which is dead function foo(x:int) returns(bool); // This gets converted to an axiom during parsing function bar() returns (bool) { foo(5) } // Baz is dead. This gets converted to an axiom during parsing function baz() returns (bool) { true } procedure main() { assert true; } " , "test.bpl"); Assert.AreEqual(2 , AxiomCount(prog)); // Implicit Assert.AreEqual(3 , FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(0 , FunctionCount(prog)); Assert.AreEqual(0 , AxiomCount(prog)); } [Test()] public void InDirectFuncUseWithoutImplicitAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is not dead because it's used by bar function foo(x:int) returns(bool); // No axiom is used here function {:inline } bar() returns (bool) { foo(5) } // Baz is dead function baz() returns (bool) { true } procedure main() { assert bar(); } ", "test.bpl"); Assert.AreEqual(3, FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(2, FunctionCount(prog)); } [Test()] public void FuncNotInCodeButUsedInDeadAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo is dead so it should be removed function foo(x:int) returns(bool); // Axiom is dead so it should be removed axiom foo(5) == true; procedure main() { assert true; } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(0, FunctionCount(prog)); Assert.AreEqual(0, AxiomCount(prog)); } [Test()] public void InDirectFuncIsDeadUseWithoutImplicitAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // foo is dead because it's used by bar function foo(x:int) returns(bool); // No axiom is used here, bar is dead function {:inline } bar() returns (bool) { foo(5) } procedure main() { assert true; } " , "test.bpl"); Assert.AreEqual(2 , FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(0 , FunctionCount(prog)); } [Test()] public void DoubleInDirectFuncIsDeadUseWithoutImplicitAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // baz is dead because bar() is dead function baz(x:int) returns(bool); // foo is dead because it's used by bar function {:inline } foo(x:int) returns(bool) { baz(2) } // No axiom is used here, bar is dead function {:inline } bar() returns (bool) { foo(5) } procedure main() { assert true; } " , "test.bpl"); Assert.AreEqual(3 , FunctionCount(prog)); RunGDDE(prog); Assert.AreEqual(0 , FunctionCount(prog)); } [Test()] public void DeadAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Axiom's that don't reference any globals variables // or functions are considered to be dead. // FIXME: This means ""axiom false;"" is considered to be dead // and will be removed. This might not be desirable axiom true; procedure main() { assert true; } ", "test.bpl"); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(0, AxiomCount(prog)); } [Test()] public void FuncNotInCodeButUsedInLiveAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" // Foo looks dead but we can't remove it // due to axiom function foo(x:int) returns(bool); const g:bool; // Axiom is not dead due to global variable use axiom (foo(5) == true) && g; procedure main() { assert g; } ", "test.bpl"); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableNotUsed() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; procedure main() { return; } ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(0, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableUsedInImpl() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; procedure main() { assert g; } ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableSomeAliveSomeDead() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; var d1:bool; // dead var d2:bool; // dead procedure main() { assert g; } ", "test.bpl"); Assert.AreEqual(3, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableUsedInProcModSet() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; procedure main(); modifies g; ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableUsedInRequires() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; procedure main(); requires g; ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void GlobalVariableUsedInEnsures() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" var g:bool; procedure main(); ensures g; ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); } [Test()] public void DeadGlobalVariableUsedInAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const g:bool; axiom g == true; procedure main(); ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(0, GlobalVariableCount(prog)); Assert.AreEqual(0, AxiomCount(prog)); } [Test()] public void LiveGlobalVariableUsedInAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const g:bool; axiom g == true; procedure main() { assert g; } ", "test.bpl"); Assert.AreEqual(1, GlobalVariableCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(1, GlobalVariableCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); } [Test()] public void LiveGlobalVariablesUsedInAxiom() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const x:int; // FIXME: Should GlobalDDE be changed to remove this variable? const z:int; // This variable could be considered dead but the axiom will keep it alive axiom x == z; procedure main() { var y:int; y := x; } ", "test.bpl"); Assert.AreEqual(2, GlobalVariableCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(2, GlobalVariableCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); } [Test()] public void TransitiveAxiomFunctionDependency() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" function f(int) returns (int); function g(int) returns (int); function h(int) returns (int); axiom (forall x:int :: f(x) > g(x)); // Should not remove axiom (forall x:int :: g(x) > h(x)); // Should not remove procedure main(a:int) requires h(a) > 0; { assert true; } ", "test.bpl"); Assert.AreEqual(3, FunctionCount(prog)); Assert.AreEqual(2, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(3, FunctionCount(prog)); Assert.AreEqual(2, AxiomCount(prog)); } [Test()] public void TransitiveAxiomGlobalDependency() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const f:int; const g:int; const h:int; axiom f > g; // Should not remove axiom g > h; // Should not remove procedure main(a:int) requires h > 0; { assert true; } ", "test.bpl"); Assert.AreEqual(3, GlobalVariableCount(prog)); Assert.AreEqual(2, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(3, GlobalVariableCount(prog)); Assert.AreEqual(2, AxiomCount(prog)); } [Test()] public void TransitiveAxiomGlobalAndFunctionDependency() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const e:int; const f:int; function g(int) returns (int); const h:int; axiom (e == f); // Should not remove axiom (forall x:int :: f > g(x)); // Should not remove axiom (forall x:int :: g(x) > h); // Should not remove procedure main(a:int) requires h > 0; { assert true; } ", "test.bpl"); Assert.AreEqual(3, GlobalVariableCount(prog)); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(3, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(3, GlobalVariableCount(prog)); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(3, AxiomCount(prog)); } [Test()] public void TransitiveAxiomTwoSets() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" const e:int; const f:int; function g(int) returns (int); const h:int; // One set the based on transitivity should not be removed axiom (e == f); axiom (forall x:int :: f > g(x)); axiom (forall x:int :: g(x) > h); // Another set that based on transitivity should not be removed function w(int) returns (int); const x:int; const y:int; const z:int; axiom x > y; axiom y > z; axiom (forall n:int :: w(n) > z); // Should be removed axiom false; procedure main(a:int) requires h > 0; requires z > 0; { assert true; } ", "test.bpl"); Assert.AreEqual(6, GlobalVariableCount(prog)); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(7, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(6, GlobalVariableCount(prog)); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(6, AxiomCount(prog)); } [Test()] public void NoAxiomDependencyOnInterpretedFunctions() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" function {:bvbuiltin ""bvadd""} BVADD32(bv32,bv32) returns (bv32); // This function and axiom are dead function ADD_ONE(bv32) returns (bv32); axiom (forall x:bv32 :: ADD_ONE(x) == BVADD32(x, 1bv32)); procedure main() { var x:bv32; var y:bv32; x := BVADD32(x, y); assert x == BVADD32(x, y); } ", "test.bpl"); Assert.AreEqual(0, GlobalVariableCount(prog)); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(0, GlobalVariableCount(prog)); Assert.AreEqual(1, FunctionCount(prog)); Assert.AreEqual(0, AxiomCount(prog)); var func = prog.TopLevelDeclarations.OfType<Function>().First(); Assert.AreEqual("BVADD32", func.Name); } [Test()] public void NoAxiomDependencyOnInterpretedFunctions2() { var prog = SymbooglixLibTests.SymbooglixTest.LoadProgramFrom(@" function {:bvbuiltin ""bvadd""} BVADD32(bv32,bv32) returns (bv32); // This function and axiom are live due to being used in main() function ADD_ONE(bv32) returns (bv32); axiom (forall x:bv32 :: ADD_ONE(x) == BVADD32(x, 1bv32)); procedure main() { var x:bv32; var y:bv32; x := BVADD32(x, y); assert x == BVADD32(x, y); assert BVADD32(x, 1bv32) == ADD_ONE(x); } ", "test.bpl"); Assert.AreEqual(0, GlobalVariableCount(prog)); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); RunGDDE(prog); Assert.AreEqual(0, GlobalVariableCount(prog)); Assert.AreEqual(2, FunctionCount(prog)); Assert.AreEqual(1, AxiomCount(prog)); } public void RunGDDE(Program prog) { var GDDE = new Symbooglix.Transform.GlobalDeadDeclEliminationPass(); var PM = new Symbooglix.Transform.PassManager(); PM.Add(GDDE); PM.Run(prog); } public int FunctionCount(Program prog) { return prog.TopLevelDeclarations.OfType<Function>().Count(); } public int AxiomCount(Program prog) { return prog.TopLevelDeclarations.OfType<Axiom>().Count(); } public int GlobalVariableCount(Program prog) { return prog.TopLevelDeclarations.OfType<Variable>().Count(); } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using Microsoft.PowerShell.Commands.GetCounter; using Microsoft.Win32; namespace Microsoft.Powershell.Commands.GetCounter.PdhNative { internal static class PdhResults { public const uint PDH_CSTATUS_VALID_DATA = 0x0; public const uint PDH_CSTATUS_NEW_DATA = 0x1; public const uint PDH_CSTATUS_NO_MACHINE = 0x800007D0; public const uint PDH_CSTATUS_NO_INSTANCE = 0x800007D1; public const uint PDH_MORE_DATA = 0x800007D2; public const uint PDH_CSTATUS_ITEM_NOT_VALIDATED = 0x800007D3; public const uint PDH_RETRY = 0x800007D4; public const uint PDH_NO_DATA = 0x800007D5; public const uint PDH_CALC_NEGATIVE_DENOMINATOR = 0x800007D6; public const uint PDH_CALC_NEGATIVE_TIMEBASE = 0x800007D7; public const uint PDH_CALC_NEGATIVE_VALUE = 0x800007D8; public const uint PDH_DIALOG_CANCELLED = 0x800007D9; public const uint PDH_END_OF_LOG_FILE = 0x800007DA; public const uint PDH_ASYNC_QUERY_TIMEOUT = 0x800007DB; public const uint PDH_CANNOT_SET_DEFAULT_REALTIME_DATASOURCE = 0x800007DC; public const uint PDH_UNABLE_MAP_NAME_FILES = 0x80000BD5; public const uint PDH_PLA_VALIDATION_WARNING = 0x80000BF3; public const uint PDH_CSTATUS_NO_OBJECT = 0xC0000BB8; public const uint PDH_CSTATUS_NO_COUNTER = 0xC0000BB9; public const uint PDH_CSTATUS_INVALID_DATA = 0xC0000BBA; public const uint PDH_MEMORY_ALLOCATION_FAILURE = 0xC0000BBB; public const uint PDH_INVALID_HANDLE = 0xC0000BBC; public const uint PDH_INVALID_ARGUMENT = 0xC0000BBD; public const uint PDH_FUNCTION_NOT_FOUND = 0xC0000BBE; public const uint PDH_CSTATUS_NO_COUNTERNAME = 0xC0000BBF; public const uint PDH_CSTATUS_BAD_COUNTERNAME = 0xC0000BC0; public const uint PDH_INVALID_BUFFER = 0xC0000BC1; public const uint PDH_INSUFFICIENT_BUFFER = 0xC0000BC2; public const uint PDH_CANNOT_CONNECT_MACHINE = 0xC0000BC3; public const uint PDH_INVALID_PATH = 0xC0000BC4; public const uint PDH_INVALID_INSTANCE = 0xC0000BC5; public const uint PDH_INVALID_DATA = 0xC0000BC6; public const uint PDH_NO_DIALOG_DATA = 0xC0000BC7; public const uint PDH_CANNOT_READ_NAME_STRINGS = 0xC0000BC8; public const uint PDH_LOG_FILE_CREATE_ERROR = 0xC0000BC9; public const uint PDH_LOG_FILE_OPEN_ERROR = 0xC0000BCA; public const uint PDH_LOG_TYPE_NOT_FOUND = 0xC0000BCB; public const uint PDH_NO_MORE_DATA = 0xC0000BCC; public const uint PDH_ENTRY_NOT_IN_LOG_FILE = 0xC0000BCD; public const uint PDH_DATA_SOURCE_IS_LOG_FILE = 0xC0000BCE; public const uint PDH_DATA_SOURCE_IS_REAL_TIME = 0xC0000BCF; public const uint PDH_UNABLE_READ_LOG_HEADER = 0xC0000BD0; public const uint PDH_FILE_NOT_FOUND = 0xC0000BD1; public const uint PDH_FILE_ALREADY_EXISTS = 0xC0000BD2; public const uint PDH_NOT_IMPLEMENTED = 0xC0000BD3; public const uint PDH_STRING_NOT_FOUND = 0xC0000BD4; public const uint PDH_UNKNOWN_LOG_FORMAT = 0xC0000BD6; public const uint PDH_UNKNOWN_LOGSVC_COMMAND = 0xC0000BD7; public const uint PDH_LOGSVC_QUERY_NOT_FOUND = 0xC0000BD8; public const uint PDH_LOGSVC_NOT_OPENED = 0xC0000BD9; public const uint PDH_WBEM_ERROR = 0xC0000BDA; public const uint PDH_ACCESS_DENIED = 0xC0000BDB; public const uint PDH_LOG_FILE_TOO_SMALL = 0xC0000BDC; public const uint PDH_INVALID_DATASOURCE = 0xC0000BDD; public const uint PDH_INVALID_SQLDB = 0xC0000BDE; public const uint PDH_NO_COUNTERS = 0xC0000BDF; public const uint PDH_SQL_ALLOC_FAILED = 0xC0000BE0; public const uint PDH_SQL_ALLOCCON_FAILED = 0xC0000BE1; public const uint PDH_SQL_EXEC_DIRECT_FAILED = 0xC0000BE2; public const uint PDH_SQL_FETCH_FAILED = 0xC0000BE3; public const uint PDH_SQL_ROWCOUNT_FAILED = 0xC0000BE4; public const uint PDH_SQL_MORE_RESULTS_FAILED = 0xC0000BE5; public const uint PDH_SQL_CONNECT_FAILED = 0xC0000BE6; public const uint PDH_SQL_BIND_FAILED = 0xC0000BE7; public const uint PDH_CANNOT_CONNECT_WMI_SERVER = 0xC0000BE8; public const uint PDH_PLA_COLLECTION_ALREADY_RUNNING = 0xC0000BE9; public const uint PDH_PLA_ERROR_SCHEDULE_OVERLAP = 0xC0000BEA; public const uint PDH_PLA_COLLECTION_NOT_FOUND = 0xC0000BEB; public const uint PDH_PLA_ERROR_SCHEDULE_ELAPSED = 0xC0000BEC; public const uint PDH_PLA_ERROR_NOSTART = 0xC0000BED; public const uint PDH_PLA_ERROR_ALREADY_EXISTS = 0xC0000BEE; public const uint PDH_PLA_ERROR_TYPE_MISMATCH = 0xC0000BEF; public const uint PDH_PLA_ERROR_FILEPATH = 0xC0000BF0; public const uint PDH_PLA_SERVICE_ERROR = 0xC0000BF1; public const uint PDH_PLA_VALIDATION_ERROR = 0xC0000BF2; public const uint PDH_PLA_ERROR_NAME_TOO_LONG = 0xC0000BF4; public const uint PDH_INVALID_SQL_LOG_FORMAT = 0xC0000BF5; public const uint PDH_COUNTER_ALREADY_IN_QUERY = 0xC0000BF6; public const uint PDH_BINARY_LOG_CORRUPT = 0xC0000BF7; public const uint PDH_LOG_SAMPLE_TOO_SMALL = 0xC0000BF8; public const uint PDH_OS_LATER_VERSION = 0xC0000BF9; public const uint PDH_OS_EARLIER_VERSION = 0xC0000BFA; public const uint PDH_INCORRECT_APPEND_TIME = 0xC0000BFB; public const uint PDH_UNMATCHED_APPEND_COUNTER = 0xC0000BFC; public const uint PDH_SQL_ALTER_DETAIL_FAILED = 0xC0000BFD; public const uint PDH_QUERY_PERF_DATA_TIMEOUT = 0xC0000BFE; } internal static class PerfDetail { public const uint PERF_DETAIL_NOVICE = 100; // The uninformed can understand it public const uint PERF_DETAIL_ADVANCED = 200; // For the advanced user public const uint PERF_DETAIL_EXPERT = 300; // For the expert user public const uint PERF_DETAIL_WIZARD = 400; // For the system designer } [StructLayout(LayoutKind.Sequential, Pack = 2, Size = 16)] internal struct SYSTEMTIME { public UInt16 year; public UInt16 month; public UInt16 dayOfWeek; public UInt16 day; public UInt16 hour; public UInt16 minute; public UInt16 second; public UInt16 milliseconds; } internal static class PdhFormat { public const uint PDH_FMT_RAW = 0x00000010; public const uint PDH_FMT_ANSI = 0x00000020; public const uint PDH_FMT_UNICODE = 0x00000040; public const uint PDH_FMT_LONG = 0x00000100; public const uint PDH_FMT_DOUBLE = 0x00000200; public const uint PDH_FMT_LARGE = 0x00000400; public const uint PDH_FMT_NOSCALE = 0x00001000; public const uint PDH_FMT_1000 = 0x00002000; public const uint PDH_FMT_NODATA = 0x00004000; public const uint PDH_FMT_NOCAP100 = 0x00008000; public const uint PERF_DETAIL_COSTLY = 0x00010000; public const uint PERF_DETAIL_STANDARD = 0x0000FFFF; } internal static class PdhLogAccess { public const uint PDH_LOG_READ_ACCESS = 0x00010000; public const uint PDH_LOG_WRITE_ACCESS = 0x00020000; public const uint PDH_LOG_UPDATE_ACCESS = 0x00040000; public const uint PDH_LOG_ACCESS_MASK = 0x000F0000; } internal static class PdhLogOpenMode { public const uint PDH_LOG_CREATE_NEW = 0x00000001; public const uint PDH_LOG_CREATE_ALWAYS = 0x00000002; public const uint PDH_LOG_OPEN_ALWAYS = 0x00000003; public const uint PDH_LOG_OPEN_EXISTING = 0x00000004; public const uint PDH_LOG_CREATE_MASK = 0x0000000F; } internal static class PdhLogOpenOption { public const uint PDH_LOG_OPT_USER_STRING = 0x01000000; public const uint PDH_LOG_OPT_CIRCULAR = 0x02000000; public const uint PDH_LOG_OPT_MAX_IS_BYTES = 0x04000000; public const uint PDH_LOG_OPT_APPEND = 0x08000000; } internal enum PdhLogFileType { PDH_LOG_TYPE_UNDEFINED = 0, PDH_LOG_TYPE_CSV = 1, PDH_LOG_TYPE_TSV = 2, PDH_LOG_TYPE_TRACE_KERNEL = 4, PDH_LOG_TYPE_TRACE_GENERIC = 5, PDH_LOG_TYPE_PERFMON = 6, PDH_LOG_TYPE_SQL = 7, PDH_LOG_TYPE_BINARY = 8 } internal static class PdhWildCardFlag { public const uint PDH_NOEXPANDCOUNTERS = 1; public const uint PDH_NOEXPANDINSTANCES = 2; public const uint PDH_REFRESHCOUNTERS = 4; } internal struct CounterHandleNInstance { public IntPtr hCounter; public string InstanceName; } internal class PdhHelper : IDisposable { [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] private struct PDH_COUNTER_PATH_ELEMENTS { [MarshalAs(UnmanagedType.LPWStr)] public string MachineName; [MarshalAs(UnmanagedType.LPWStr)] public string ObjectName; [MarshalAs(UnmanagedType.LPWStr)] public string InstanceName; [MarshalAs(UnmanagedType.LPWStr)] public string ParentInstance; public UInt32 InstanceIndex; [MarshalAs(UnmanagedType.LPWStr)] public string CounterName; } [StructLayout(LayoutKind.Sequential)] private struct PDH_FMT_COUNTERVALUE_LARGE { public uint CStatus; public Int64 largeValue; // [FieldOffset (4), MarshalAs(UnmanagedType.LPStr)] // public string AnsiStringValue; // [FieldOffset(4), MarshalAs(UnmanagedType.LPWStr)] // public string WideStringValue; } [StructLayout(LayoutKind.Sequential)] private struct PDH_FMT_COUNTERVALUE_DOUBLE { public uint CStatus; public double doubleValue; } [StructLayout(LayoutKind.Sequential)] private struct PDH_FMT_COUNTERVALUE_UNICODE { public uint CStatus; [MarshalAs(UnmanagedType.LPWStr)] public string WideStringValue; } [StructLayout(LayoutKind.Sequential)] private struct PDH_RAW_COUNTER { public uint CStatus; public System.Runtime.InteropServices.ComTypes.FILETIME TimeStamp; public Int64 FirstValue; public Int64 SecondValue; public uint MultiCount; } [StructLayout(LayoutKind.Sequential)] private struct PDH_TIME_INFO { public Int64 StartTime; public Int64 EndTime; public UInt32 SampleCount; } // // This is the structure returned by PdhGetCounterInfo(). // We only need dwType and lDefaultScale fields from this structure. // We access those fields directly. The struct is here for reference only. // [StructLayout(LayoutKind.Explicit, CharSet = CharSet.Unicode)] private struct PDH_COUNTER_INFO { [FieldOffset(0)] public UInt32 dwLength; [FieldOffset(4)] public UInt32 dwType; [FieldOffset(8)] public UInt32 CVersion; [FieldOffset(12)] public UInt32 CStatus; [FieldOffset(16)] public UInt32 lScale; [FieldOffset(20)] public UInt32 lDefaultScale; [FieldOffset(24)] public IntPtr dwUserData; [FieldOffset(32)] public IntPtr dwQueryUserData; [FieldOffset(40)] public string szFullPath; [FieldOffset(48)] public string szMachineName; [FieldOffset(56)] public string szObjectName; [FieldOffset(64)] public string szInstanceName; [FieldOffset(72)] public string szParentInstance; [FieldOffset(80)] public UInt32 dwInstanceIndex; [FieldOffset(88)] public string szCounterName; [FieldOffset(96)] public string szExplainText; [FieldOffset(104)] public IntPtr DataBuffer; } [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhBindInputDataSource(out PdhSafeDataSourceHandle phDataSource, string szLogFileNameList); [DllImport("pdh.dll")] private static extern uint PdhOpenQueryH(PdhSafeDataSourceHandle hDataSource, IntPtr dwUserData, out PdhSafeQueryHandle phQuery); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhAddCounter(PdhSafeQueryHandle queryHandle, string counterPath, IntPtr userData, out IntPtr counterHandle); // not on XP [DllImport("pdh.dll")] private static extern uint PdhCollectQueryDataWithTime(PdhSafeQueryHandle queryHandle, ref Int64 pllTimeStamp); [DllImport("pdh.dll")] private static extern uint PdhCollectQueryData(PdhSafeQueryHandle queryHandle); [DllImport("pdh.dll")] internal static extern uint PdhCloseQuery(IntPtr queryHandle); [DllImport("pdh.dll")] internal static extern uint PdhCloseLog(IntPtr logHandle, uint dwFlags); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhOpenLog(string szLogFileName, uint dwAccessFlags, ref PdhLogFileType lpdwLogType, PdhSafeQueryHandle hQuery, uint dwMaxSize, string szUserCaption, out PdhSafeLogHandle phLog ); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhGetFormattedCounterValue(IntPtr counterHandle, uint dwFormat, out IntPtr lpdwType, out PDH_FMT_COUNTERVALUE_DOUBLE pValue); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhGetRawCounterValue(IntPtr hCounter, out IntPtr lpdwType, out PDH_RAW_COUNTER pValue); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhEnumMachinesH(PdhSafeDataSourceHandle hDataSource, IntPtr mszMachineNameList, ref IntPtr pcchBufferLength); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhEnumObjectsH(PdhSafeDataSourceHandle hDataSource, string szMachineName, IntPtr mszObjectList, ref IntPtr pcchBufferLength, uint dwDetailLevel, bool bRefresh); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhEnumObjectItemsH(PdhSafeDataSourceHandle hDataSource, string szMachineName, string szObjectName, IntPtr mszCounterList, ref IntPtr pcchCounterListLength, IntPtr mszInstanceList, ref IntPtr pcchInstanceListLength, uint dwDetailLevel, uint dwFlags); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhMakeCounterPath(ref PDH_COUNTER_PATH_ELEMENTS pCounterPathElements, IntPtr szFullPathBuffer, ref IntPtr pcchBufferSize, UInt32 dwFlags); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhParseCounterPath(string szFullPathBuffer, IntPtr pCounterPathElements, // PDH_COUNTER_PATH_ELEMENTS ref IntPtr pdwBufferSize, uint dwFlags); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhExpandWildCardPathH(PdhSafeDataSourceHandle hDataSource, string szWildCardPath, IntPtr mszExpandedPathList, ref IntPtr pcchPathListLength, uint dwFlags); // not available on XP [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhValidatePathEx(PdhSafeDataSourceHandle hDataSource, string szFullPathBuffer); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhValidatePath(string szFullPathBuffer); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhGetCounterInfo(IntPtr hCounter, [MarshalAs(UnmanagedType.U1)] bool bRetrieveExplainText, ref IntPtr pdwBufferSize, IntPtr lpBuffer); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhGetCounterTimeBase(IntPtr hCounter, out UInt64 pTimeBase); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhGetDataSourceTimeRangeH(PdhSafeDataSourceHandle hDataSource, ref IntPtr pdwNumEntries, ref PDH_TIME_INFO pInfo, ref IntPtr pdwBufferSize); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhSetQueryTimeRange(PdhSafeQueryHandle hQuery, ref PDH_TIME_INFO pInfo); [DllImport("pdh.dll", CharSet = CharSet.Unicode)] private static extern uint PdhLookupPerfNameByIndex(string szMachineName, UInt32 dwNameIndex, IntPtr szNameBuffer, ref int pcchNameBufferSize); private PdhSafeDataSourceHandle _hDataSource = null; private PdhSafeQueryHandle _hQuery = null; private bool _firstReading = true; private PdhSafeLogHandle _hOutputLog = null; // // Implement IDisposable::Dispose() to close native safe handles // public void Dispose() { if (_hDataSource != null && !_hDataSource.IsInvalid) { _hDataSource.Dispose(); } if (_hOutputLog != null && !_hOutputLog.IsInvalid) { _hOutputLog.Dispose(); } if (_hQuery != null && !_hQuery.IsInvalid) { _hQuery.Dispose(); } GC.SuppressFinalize(this); } // // m_ConsumerPathToHandleAndInstanceMap map is used for reading counter date (live or from files). // private readonly Dictionary<string, CounterHandleNInstance> _consumerPathToHandleAndInstanceMap = new(); /// <summary> /// A helper reading in a Unicode string with embedded NULLs and splitting it into a StringCollection. /// </summary> /// <param name="strNative"></param> /// <param name="strSize"></param> /// <param name="strColl"></param> private static void ReadPdhMultiString(ref IntPtr strNative, Int32 strSize, ref StringCollection strColl) { Debug.Assert(strSize >= 2); int offset = 0; string allSubstringsWithNulls = string.Empty; while (offset <= ((strSize * sizeof(char)) - 4)) { Int32 next4 = Marshal.ReadInt32(strNative, offset); if (next4 == 0) { break; } allSubstringsWithNulls += (char)next4; offset += 2; } allSubstringsWithNulls = allSubstringsWithNulls.TrimEnd('\0'); strColl.AddRange(allSubstringsWithNulls.Split('\0')); } private static uint GetCounterInfoPlus(IntPtr hCounter, out UInt32 counterType, out UInt32 defaultScale, out UInt64 timeBase) { counterType = 0; defaultScale = 0; timeBase = 0; Debug.Assert(hCounter != IntPtr.Zero); IntPtr pBufferSize = new(0); uint res = PdhGetCounterInfo(hCounter, false, ref pBufferSize, IntPtr.Zero); if (res != PdhResults.PDH_MORE_DATA) { return res; } Int32 bufSize = pBufferSize.ToInt32(); IntPtr bufCounterInfo = Marshal.AllocHGlobal(bufSize); try { res = PdhGetCounterInfo(hCounter, false, ref pBufferSize, bufCounterInfo); if (res == PdhResults.PDH_CSTATUS_VALID_DATA && bufCounterInfo != IntPtr.Zero) { PDH_COUNTER_INFO pdhCounterInfo = (PDH_COUNTER_INFO)Marshal.PtrToStructure(bufCounterInfo, typeof(PDH_COUNTER_INFO)); counterType = pdhCounterInfo.dwType; defaultScale = pdhCounterInfo.lDefaultScale; } } finally { Marshal.FreeHGlobal(bufCounterInfo); } res = PdhGetCounterTimeBase(hCounter, out timeBase); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } return res; } public uint ConnectToDataSource() { if (_hDataSource != null && !_hDataSource.IsInvalid) { _hDataSource.Dispose(); } uint res = PdhHelper.PdhBindInputDataSource(out _hDataSource, null); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine("error in PdhBindInputDataSource: " + res); return res; } return PdhResults.PDH_CSTATUS_VALID_DATA; } /// <summary> /// Connects to a single named datasource, initializing m_hDataSource variable. /// </summary> /// <param name="dataSourceName"></param> /// <returns></returns> public uint ConnectToDataSource(string dataSourceName) { if (_hDataSource != null && !_hDataSource.IsInvalid) { _hDataSource.Dispose(); } uint res = PdhHelper.PdhBindInputDataSource(out _hDataSource, dataSourceName); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine("error in PdhBindInputDataSource: " + res); } return res; } public uint ConnectToDataSource(StringCollection blgFileNames) { if (blgFileNames.Count == 1) { return ConnectToDataSource(blgFileNames[0]); } string doubleNullTerminated = string.Empty; foreach (string fileName in blgFileNames) { doubleNullTerminated += fileName + '\0'; } doubleNullTerminated += '\0'; return ConnectToDataSource(doubleNullTerminated); } public uint OpenQuery() { uint res = PdhOpenQueryH(_hDataSource, IntPtr.Zero, out _hQuery); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine("error in PdhOpenQueryH: " + res); } return res; } public uint OpenLogForWriting(string logName, PdhLogFileType logFileType, bool bOverwrite, UInt32 maxSize, bool bCircular, string caption) { Debug.Assert(_hQuery != null); UInt32 accessFlags = PdhLogAccess.PDH_LOG_WRITE_ACCESS; accessFlags |= bCircular ? PdhLogOpenOption.PDH_LOG_OPT_CIRCULAR : 0; accessFlags |= bOverwrite ? PdhLogOpenMode.PDH_LOG_CREATE_ALWAYS : PdhLogOpenMode.PDH_LOG_CREATE_NEW; uint res = PdhOpenLog(logName, accessFlags, ref logFileType, _hQuery, maxSize, caption, out _hOutputLog); return res; } public uint SetQueryTimeRange(DateTime startTime, DateTime endTime) { Debug.Assert(_hQuery != null); Debug.Assert(endTime >= startTime); PDH_TIME_INFO pTimeInfo = new(); if (startTime != DateTime.MinValue && startTime.Kind == DateTimeKind.Local) { startTime = new DateTime(startTime.Ticks, DateTimeKind.Utc); } pTimeInfo.StartTime = (startTime == DateTime.MinValue) ? 0 : startTime.ToFileTimeUtc(); if (endTime != DateTime.MaxValue && endTime.Kind == DateTimeKind.Local) { endTime = new DateTime(endTime.Ticks, DateTimeKind.Utc); } pTimeInfo.EndTime = (endTime == DateTime.MaxValue) ? Int64.MaxValue : endTime.ToFileTimeUtc(); pTimeInfo.SampleCount = 0; return PdhSetQueryTimeRange(_hQuery, ref pTimeInfo); } public uint EnumBlgFilesMachines(ref StringCollection machineNames) { IntPtr MachineListTcharSizePtr = new(0); uint res = PdhHelper.PdhEnumMachinesH(_hDataSource, IntPtr.Zero, ref MachineListTcharSizePtr); if (res != PdhResults.PDH_MORE_DATA) { return res; } Int32 cChars = MachineListTcharSizePtr.ToInt32(); // should be ok on 64 bit IntPtr strMachineList = Marshal.AllocHGlobal(cChars * sizeof(char)); try { res = PdhHelper.PdhEnumMachinesH(_hDataSource, (IntPtr)strMachineList, ref MachineListTcharSizePtr); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { ReadPdhMultiString(ref strMachineList, MachineListTcharSizePtr.ToInt32(), ref machineNames); } } finally { Marshal.FreeHGlobal(strMachineList); } return res; } public uint EnumObjects(string machineName, ref StringCollection objectNames) { IntPtr pBufferSize = new(0); uint res = PdhEnumObjectsH(_hDataSource, machineName, IntPtr.Zero, ref pBufferSize, PerfDetail.PERF_DETAIL_WIZARD, false); if (res != PdhResults.PDH_MORE_DATA) { return res; } Int32 cChars = pBufferSize.ToInt32(); IntPtr strObjectList = Marshal.AllocHGlobal(cChars * sizeof(char)); try { res = PdhEnumObjectsH(_hDataSource, machineName, (IntPtr)strObjectList, ref pBufferSize, PerfDetail.PERF_DETAIL_WIZARD, false); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { ReadPdhMultiString(ref strObjectList, pBufferSize.ToInt32(), ref objectNames); } } finally { Marshal.FreeHGlobal(strObjectList); } return res; } public uint EnumObjectItems(string machineName, string objectName, ref StringCollection counterNames, ref StringCollection instanceNames) { IntPtr pCounterBufferSize = new(0); IntPtr pInstanceBufferSize = new(0); uint res = PdhEnumObjectItemsH(_hDataSource, machineName, objectName, IntPtr.Zero, ref pCounterBufferSize, IntPtr.Zero, ref pInstanceBufferSize, PerfDetail.PERF_DETAIL_WIZARD, 0); if (res == PdhResults.PDH_CSTATUS_NO_INSTANCE) { instanceNames.Clear(); return PdhResults.PDH_CSTATUS_VALID_DATA; // masking the error } else if (res == PdhResults.PDH_CSTATUS_NO_OBJECT) { counterNames.Clear(); return PdhResults.PDH_CSTATUS_VALID_DATA; // masking the error } else if (res != PdhResults.PDH_MORE_DATA) { // Console.WriteLine("error in PdhEnumObjectItemsH 1st call: " + res); return res; } Int32 cChars = pCounterBufferSize.ToInt32(); IntPtr strCountersList = (cChars > 0) ? Marshal.AllocHGlobal((cChars) * sizeof(char)) : IntPtr.Zero; // re-set count to 0 if it is lte 2 if (cChars < 0) { pCounterBufferSize = new IntPtr(0); } cChars = pInstanceBufferSize.ToInt32(); IntPtr strInstancesList = (cChars > 0) ? Marshal.AllocHGlobal((cChars) * sizeof(char)) : IntPtr.Zero; // re-set count to 0 if it is lte 2 if (cChars < 0) { pInstanceBufferSize = new IntPtr(0); } try { res = PdhEnumObjectItemsH(_hDataSource, machineName, objectName, strCountersList, ref pCounterBufferSize, strInstancesList, ref pInstanceBufferSize, PerfDetail.PERF_DETAIL_WIZARD, 0); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine("error in PdhEnumObjectItemsH 2nd call: " + res + "\n Counter buffer size is " // + pCounterBufferSize.ToInt32() + "\n Instance buffer size is " + pInstanceBufferSize.ToInt32()); } else { ReadPdhMultiString(ref strCountersList, pCounterBufferSize.ToInt32(), ref counterNames); if (strInstancesList != IntPtr.Zero) { ReadPdhMultiString(ref strInstancesList, pInstanceBufferSize.ToInt32(), ref instanceNames); } } } finally { if (strCountersList != IntPtr.Zero) { Marshal.FreeHGlobal(strCountersList); } if (strInstancesList != IntPtr.Zero) { Marshal.FreeHGlobal(strInstancesList); } } return res; } public uint GetValidPathsFromFiles(ref StringCollection validPaths) { Debug.Assert(_hDataSource != null && !_hDataSource.IsInvalid, "Call ConnectToDataSource before GetValidPathsFromFiles"); StringCollection machineNames = new(); uint res = this.EnumBlgFilesMachines(ref machineNames); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } foreach (string machine in machineNames) { StringCollection counterSets = new(); res = this.EnumObjects(machine, ref counterSets); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } foreach (string counterSet in counterSets) { // Console.WriteLine("Counter set " + counterSet); StringCollection counterSetCounters = new(); StringCollection counterSetInstances = new(); res = this.EnumObjectItems(machine, counterSet, ref counterSetCounters, ref counterSetInstances); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } res = this.GetValidPaths(machine, counterSet, ref counterSetCounters, ref counterSetInstances, ref validPaths); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } } } return res; } private bool IsPathValid(ref PDH_COUNTER_PATH_ELEMENTS pathElts, out string outPath) { bool ret = false; outPath = string.Empty; IntPtr pPathBufferSize = new(0); uint res = PdhMakeCounterPath(ref pathElts, IntPtr.Zero, ref pPathBufferSize, 0); if (res != PdhResults.PDH_MORE_DATA) { return false; } Int32 cChars = pPathBufferSize.ToInt32(); IntPtr strPath = Marshal.AllocHGlobal(cChars * sizeof(char)); try { res = PdhMakeCounterPath(ref pathElts, strPath, ref pPathBufferSize, 0); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { outPath = Marshal.PtrToStringUni(strPath); ret = (PdhValidatePathEx(_hDataSource, outPath) == PdhResults.PDH_CSTATUS_VALID_DATA); } } finally { Marshal.FreeHGlobal(strPath); } return ret; } public bool IsPathValid(string path) { return (PdhValidatePathEx(_hDataSource, path) == PdhResults.PDH_CSTATUS_VALID_DATA); } private static uint MakePath(PDH_COUNTER_PATH_ELEMENTS pathElts, out string outPath, bool bWildcardInstances) { outPath = string.Empty; IntPtr pPathBufferSize = new(0); if (bWildcardInstances) { pathElts.InstanceIndex = 0; pathElts.InstanceName = "*"; pathElts.ParentInstance = null; } uint res = PdhMakeCounterPath(ref pathElts, IntPtr.Zero, ref pPathBufferSize, 0); if (res != PdhResults.PDH_MORE_DATA) { return res; } Int32 cChars = pPathBufferSize.ToInt32(); IntPtr strPath = Marshal.AllocHGlobal(cChars * sizeof(char)); try { res = PdhMakeCounterPath(ref pathElts, strPath, ref pPathBufferSize, 0); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { outPath = Marshal.PtrToStringUni(strPath); } } finally { Marshal.FreeHGlobal(strPath); } return res; } private static uint MakeAllInstancePath(string origPath, out string unifiedPath) { unifiedPath = origPath; PDH_COUNTER_PATH_ELEMENTS elts = new(); uint res = ParsePath(origPath, ref elts); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } return MakePath(elts, out unifiedPath, true); } private static uint ParsePath(string fullPath, ref PDH_COUNTER_PATH_ELEMENTS pCounterPathElements) { IntPtr bufSize = new(0); uint res = PdhParseCounterPath(fullPath, IntPtr.Zero, ref bufSize, 0); if (res != PdhResults.PDH_MORE_DATA && res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine("error in PdhParseCounterPath: " + res); return res; } IntPtr structPtr = Marshal.AllocHGlobal(bufSize.ToInt32()); try { res = PdhParseCounterPath(fullPath, structPtr, ref bufSize, 0); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { // // Marshal.PtrToStructure will allocate managed memory for the object, // so the unmanaged ptr can be freed safely // pCounterPathElements = Marshal.PtrToStructure<PDH_COUNTER_PATH_ELEMENTS>(structPtr); } } finally { Marshal.FreeHGlobal(structPtr); } return res; } // // TranslateLocalCounterPath() helper translates counter paths from English into the current locale language. // NOTE: we can only translate counter set and counter names. // Translated instance names come from providers // This function will leave them unchanged: // however, it works for common cases like "*" and "_total" // and many instance names are just numbers, anyway. // // Also - this only supports local paths, b/c connecting to remote registry // requires a different firewall exception. // This function checks and Asserts if the path is not valid. // public uint TranslateLocalCounterPath(string englishPath, out string localizedPath) { uint res = PdhResults.PDH_CSTATUS_VALID_DATA; localizedPath = string.Empty; PDH_COUNTER_PATH_ELEMENTS pathElts = new(); res = ParsePath(englishPath, ref pathElts); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } // Check if the path is local and assert if not: string machineNameMassaged = pathElts.MachineName.ToLowerInvariant(); machineNameMassaged = machineNameMassaged.TrimStart('\\'); Debug.Assert(machineNameMassaged == System.Environment.MachineName.ToLowerInvariant()); string lowerEngCtrName = pathElts.CounterName.ToLowerInvariant(); string lowerEngObjectName = pathElts.ObjectName.ToLowerInvariant(); // Get the registry index RegistryKey rootKey = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib\\009"); string[] regCounters = (string[])rootKey.GetValue("Counter"); // NOTE: 1-based enumeration because the name strings follow index strings in the array Int32 counterIndex = -1; Int32 objIndex = -1; for (int enumIndex = 1; enumIndex < regCounters.Length; enumIndex++) { string regString = regCounters[enumIndex]; if (regString.ToLowerInvariant() == lowerEngCtrName) { try { counterIndex = Convert.ToInt32(regCounters[enumIndex - 1], CultureInfo.InvariantCulture); } catch (Exception) { return PdhResults.PDH_INVALID_PATH; } } else if (regString.ToLowerInvariant() == lowerEngObjectName) { try { objIndex = Convert.ToInt32(regCounters[enumIndex - 1], CultureInfo.InvariantCulture); } catch (Exception) { return PdhResults.PDH_INVALID_PATH; } } if (counterIndex != -1 && objIndex != -1) { break; } } if (counterIndex == -1 || objIndex == -1) { return PdhResults.PDH_INVALID_PATH; } // Now, call retrieve the localized names of the object and the counter by index: string objNameLocalized; res = LookupPerfNameByIndex(pathElts.MachineName, (uint)objIndex, out objNameLocalized); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } pathElts.ObjectName = objNameLocalized; string ctrNameLocalized; res = LookupPerfNameByIndex(pathElts.MachineName, (uint)counterIndex, out ctrNameLocalized); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { return res; } pathElts.CounterName = ctrNameLocalized; // Assemble the path back by using the translated object and counter names: res = MakePath(pathElts, out localizedPath, false); return res; } public uint LookupPerfNameByIndex(string machineName, uint index, out string locName) { // // NOTE: to make PdhLookupPerfNameByIndex() work, // localizedPath needs to be pre-allocated on the first call. // This is different from most other PDH functions that tolerate NULL buffers and return required size. // int strSize = 256; IntPtr localizedPathPtr = Marshal.AllocHGlobal(strSize * sizeof(char)); locName = string.Empty; uint res; try { res = PdhLookupPerfNameByIndex(machineName, index, localizedPathPtr, ref strSize); if (res == PdhResults.PDH_MORE_DATA) { Marshal.FreeHGlobal(localizedPathPtr); localizedPathPtr = Marshal.AllocHGlobal(strSize * sizeof(char)); res = PdhLookupPerfNameByIndex(machineName, index, localizedPathPtr, ref strSize); } if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { locName = Marshal.PtrToStringUni(localizedPathPtr); } } finally { Marshal.FreeHGlobal(localizedPathPtr); } return res; } public uint GetValidPaths(string machineName, string objectName, ref StringCollection counters, ref StringCollection instances, ref StringCollection validPaths) { PDH_COUNTER_PATH_ELEMENTS pathElts = new(); pathElts.MachineName = machineName; pathElts.ObjectName = objectName; foreach (string counterName in counters) { pathElts.CounterName = counterName; if (instances.Count == 0) { string pathCandidate; if (IsPathValid(ref pathElts, out pathCandidate)) { validPaths.Add(pathCandidate); } } else { foreach (string instanceName in instances) { pathElts.InstanceName = instanceName; pathElts.InstanceIndex = 0; string pathCandidate; if (IsPathValid(ref pathElts, out pathCandidate)) { validPaths.Add(pathCandidate); } } } } return PdhResults.PDH_CSTATUS_VALID_DATA; } public uint AddCounters(ref StringCollection validPaths, bool bFlushOldCounters) { Debug.Assert(_hQuery != null && !_hQuery.IsInvalid); if (bFlushOldCounters) { _consumerPathToHandleAndInstanceMap.Clear(); } bool bAtLeastOneAdded = false; uint res = PdhResults.PDH_CSTATUS_VALID_DATA; foreach (string counterPath in validPaths) { IntPtr counterHandle; res = PdhAddCounter(_hQuery, counterPath, IntPtr.Zero, out counterHandle); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { CounterHandleNInstance chi = new(); chi.hCounter = counterHandle; chi.InstanceName = null; PDH_COUNTER_PATH_ELEMENTS pathElts = new(); res = ParsePath(counterPath, ref pathElts); if (res == PdhResults.PDH_CSTATUS_VALID_DATA && pathElts.InstanceName != null) { chi.InstanceName = pathElts.InstanceName.ToLowerInvariant(); } if (!_consumerPathToHandleAndInstanceMap.ContainsKey(counterPath.ToLowerInvariant())) { _consumerPathToHandleAndInstanceMap.Add(counterPath.ToLowerInvariant(), chi); } bAtLeastOneAdded = true; } } return bAtLeastOneAdded ? PdhResults.PDH_CSTATUS_VALID_DATA : res; } public string GetCounterSetHelp(string szMachineName, string szObjectName) { // API not available to retrieve return string.Empty; } public uint ReadNextSet(out PerformanceCounterSampleSet nextSet, bool bSkipReading) { Debug.Assert(_hQuery != null && !_hQuery.IsInvalid); uint res = PdhResults.PDH_CSTATUS_VALID_DATA; nextSet = null; Int64 batchTimeStampFT = 0; res = PdhCollectQueryDataWithTime(_hQuery, ref batchTimeStampFT); if (bSkipReading) { return res; } if (res != PdhResults.PDH_CSTATUS_VALID_DATA && res != PdhResults.PDH_NO_DATA) { return res; } // // NOTE: PDH returns the filetime as local time, therefore // we need to call FromFileTimUtc() to avoid .NET applying the timezone adjustment. // However, that would result in the DateTime object having Kind.Utc. // We have to copy it once more to correct that (Kind is a read-only property). // DateTime batchStamp = DateTime.Now; if (res != PdhResults.PDH_NO_DATA) { batchStamp = new DateTime(DateTime.FromFileTimeUtc(batchTimeStampFT).Ticks, DateTimeKind.Local); } PerformanceCounterSample[] samplesArr = new PerformanceCounterSample[_consumerPathToHandleAndInstanceMap.Count]; uint sampleIndex = 0; uint numInvalidDataSamples = 0; uint lastErr = PdhResults.PDH_CSTATUS_VALID_DATA; foreach (string path in _consumerPathToHandleAndInstanceMap.Keys) { IntPtr counterTypePtr = new(0); UInt32 counterType = (UInt32)PerformanceCounterType.RawBase; UInt32 defaultScale = 0; UInt64 timeBase = 0; IntPtr hCounter = _consumerPathToHandleAndInstanceMap[path].hCounter; Debug.Assert(hCounter != IntPtr.Zero); res = GetCounterInfoPlus(hCounter, out counterType, out defaultScale, out timeBase); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { // Console.WriteLine ("GetCounterInfoPlus for " + path + " failed with " + res); } PDH_RAW_COUNTER rawValue; res = PdhGetRawCounterValue(hCounter, out counterTypePtr, out rawValue); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { samplesArr[sampleIndex++] = new PerformanceCounterSample(path, _consumerPathToHandleAndInstanceMap[path].InstanceName, 0, (ulong)0, (ulong)0, 0, PerformanceCounterType.RawBase, defaultScale, timeBase, batchStamp, (UInt64)batchStamp.ToFileTime(), (rawValue.CStatus == PdhResults.PDH_CSTATUS_VALID_DATA) ? res : rawValue.CStatus); numInvalidDataSamples++; lastErr = res; continue; } long dtFT = (((long)rawValue.TimeStamp.dwHighDateTime) << 32) + (uint)rawValue.TimeStamp.dwLowDateTime; DateTime dt = new(DateTime.FromFileTimeUtc(dtFT).Ticks, DateTimeKind.Local); PDH_FMT_COUNTERVALUE_DOUBLE fmtValueDouble; res = PdhGetFormattedCounterValue(hCounter, PdhFormat.PDH_FMT_DOUBLE | PdhFormat.PDH_FMT_NOCAP100, out counterTypePtr, out fmtValueDouble); if (res != PdhResults.PDH_CSTATUS_VALID_DATA) { samplesArr[sampleIndex++] = new PerformanceCounterSample(path, _consumerPathToHandleAndInstanceMap[path].InstanceName, 0, (ulong)rawValue.FirstValue, (ulong)rawValue.SecondValue, rawValue.MultiCount, (PerformanceCounterType)counterType, defaultScale, timeBase, dt, (UInt64)dtFT, (fmtValueDouble.CStatus == PdhResults.PDH_CSTATUS_VALID_DATA) ? res : rawValue.CStatus); numInvalidDataSamples++; lastErr = res; continue; } samplesArr[sampleIndex++] = new PerformanceCounterSample(path, _consumerPathToHandleAndInstanceMap[path].InstanceName, fmtValueDouble.doubleValue, (ulong)rawValue.FirstValue, (ulong)rawValue.SecondValue, rawValue.MultiCount, (PerformanceCounterType)counterTypePtr.ToInt32(), defaultScale, timeBase, dt, (UInt64)dtFT, fmtValueDouble.CStatus); } nextSet = new PerformanceCounterSampleSet(batchStamp, samplesArr, _firstReading); _firstReading = false; if (numInvalidDataSamples == samplesArr.Length) { res = lastErr; } else { // // Reset the error - any errors are saved per sample in PerformanceCounterSample.Status for kvetching later // res = PdhResults.PDH_CSTATUS_VALID_DATA; } return res; } public uint ExpandWildCardPath(string path, out StringCollection expandedPaths) { expandedPaths = new StringCollection(); IntPtr pcchPathListLength = new(0); uint res = PdhExpandWildCardPathH(_hDataSource, path, IntPtr.Zero, ref pcchPathListLength, PdhWildCardFlag.PDH_REFRESHCOUNTERS); if (res != PdhResults.PDH_MORE_DATA) { return res; } Int32 cChars = pcchPathListLength.ToInt32(); IntPtr strPathList = Marshal.AllocHGlobal(cChars * sizeof(char)); try { res = PdhExpandWildCardPathH(_hDataSource, path, strPathList, ref pcchPathListLength, PdhWildCardFlag.PDH_REFRESHCOUNTERS); if (res == PdhResults.PDH_CSTATUS_VALID_DATA) { ReadPdhMultiString(ref strPathList, pcchPathListLength.ToInt32(), ref expandedPaths); } } finally { Marshal.FreeHGlobal(strPathList); } return res; } } }
namespace ICSharpCode.NRefactory.Ast { using System; using System.Collections.Generic; using AgentRalph.Visitors; public partial class AddHandlerStatement { internal override bool ShallowMatch(INode right) { var r = (AddHandlerStatement) right; return false; } } public partial class AddressOfExpression { internal override bool ShallowMatch(INode right) { var r = (AddressOfExpression) right; return false; } } public partial class AnonymousMethodExpression { internal override bool ShallowMatch(INode right) { var r = (AnonymousMethodExpression) right; return false; } } public partial class ArrayCreateExpression { internal override bool ShallowMatch(INode right) { var r = (ArrayCreateExpression) right; return true; } } public partial class AssignmentExpression { internal override bool ShallowMatch(INode right) { var r = (AssignmentExpression) right; return this.Op == r.Op; } } public partial class Attribute { internal override bool ShallowMatch(INode right) { var r = (Attribute) right; return false; } } public abstract partial class AttributedNode { internal override bool ShallowMatch(INode right) { var r = (AttributedNode) right; return false; } } public partial class AttributeSection { internal override bool ShallowMatch(INode right) { var r = (AttributeSection) right; return false; } } public partial class BaseReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (BaseReferenceExpression) right; return true; } } public partial class BinaryOperatorExpression { internal override bool ShallowMatch(INode right) { var r = (BinaryOperatorExpression) right; return this.Op == r.Op; } } public partial class BreakStatement { internal override bool ShallowMatch(INode right) { var r = (BreakStatement) right; return true; } } public partial class CaseLabel { internal override bool ShallowMatch(INode right) { var r = (CaseLabel) right; return this.Label == r.Label; } } public partial class CastExpression { internal override bool ShallowMatch(INode right) { var r = (CastExpression) right; return true; } } public partial class CatchClause { internal override bool ShallowMatch(INode right) { var r = (CatchClause) right; return true; } } public partial class CheckedExpression { internal override bool ShallowMatch(INode right) { var r = (CheckedExpression) right; return false; } } public partial class CheckedStatement { internal override bool ShallowMatch(INode right) { var r = (CheckedStatement) right; return true; } } public partial class ClassReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (ClassReferenceExpression) right; return false; } } public partial class CollectionInitializerExpression { internal override bool ShallowMatch(INode right) { var r = (CollectionInitializerExpression) right; return true; } } internal sealed partial class NullCollectionInitializerExpression { internal override bool ShallowMatch(INode right) { var r = (NullCollectionInitializerExpression) right; return true; } } public partial class ConditionalExpression { internal override bool ShallowMatch(INode right) { var r = (ConditionalExpression) right; return true; } } public partial class ConstructorDeclaration { internal override bool ShallowMatch(INode right) { var r = (ConstructorDeclaration) right; return false; } } public partial class ConstructorInitializer : AbstractNode, INullable { internal override bool ShallowMatch(INode right) { var r = (ConstructorInitializer) right; return false; } } internal sealed partial class NullConstructorInitializer { internal override bool ShallowMatch(INode right) { var r = (NullConstructorInitializer) right; return false; } } public partial class ContinueStatement { internal override bool ShallowMatch(INode right) { var r = (ContinueStatement) right; return false; } } public partial class DeclareDeclaration { internal override bool ShallowMatch(INode right) { var r = (DeclareDeclaration) right; return false; } } public partial class DefaultValueExpression { internal override bool ShallowMatch(INode right) { var r = (DefaultValueExpression) right; return false; } } public partial class DelegateDeclaration { internal override bool ShallowMatch(INode right) { var r = (DelegateDeclaration) right; return this.Name == r.Name; } } public partial class DestructorDeclaration { internal override bool ShallowMatch(INode right) { var r = (DestructorDeclaration) right; return true; } } public partial class DirectionExpression { internal override bool ShallowMatch(INode right) { var r = (DirectionExpression) right; return true; } } public partial class DoLoopStatement { internal override bool ShallowMatch(INode right) { var r = (DoLoopStatement) right; return false; } } public partial class ElseIfSection { internal override bool ShallowMatch(INode right) { var r = (ElseIfSection) right; return false; } } public partial class EmptyStatement { internal override bool ShallowMatch(INode right) { var r = (EmptyStatement) right; return false; } } public partial class EndStatement { internal override bool ShallowMatch(INode right) { var r = (EndStatement) right; return false; } } public partial class EraseStatement { internal override bool ShallowMatch(INode right) { var r = (EraseStatement) right; return false; } } public partial class ErrorStatement { internal override bool ShallowMatch(INode right) { var r = (ErrorStatement) right; return false; } } public partial class EventAddRegion { internal override bool ShallowMatch(INode right) { var r = (EventAddRegion) right; return false; } } internal sealed partial class NullEventAddRegion { internal override bool ShallowMatch(INode right) { var r = (NullEventAddRegion) right; return false; } } public abstract partial class EventAddRemoveRegion : AttributedNode, INullable { internal override bool ShallowMatch(INode right) { var r = (EventAddRemoveRegion) right; return false; } } public partial class EventDeclaration { internal override bool ShallowMatch(INode right) { var r = (EventDeclaration) right; return false; } } public partial class EventRaiseRegion { internal override bool ShallowMatch(INode right) { var r = (EventRaiseRegion) right; return false; } } internal sealed partial class NullEventRaiseRegion { internal override bool ShallowMatch(INode right) { var r = (NullEventRaiseRegion) right; return false; } } public partial class EventRemoveRegion { internal override bool ShallowMatch(INode right) { var r = (EventRemoveRegion) right; return false; } } internal sealed partial class NullEventRemoveRegion { internal override bool ShallowMatch(INode right) { var r = (NullEventRemoveRegion) right; return false; } } public partial class ExitStatement { internal override bool ShallowMatch(INode right) { var r = (ExitStatement) right; return false; } } public partial class ExpressionRangeVariable : AbstractNode, INullable { internal override bool ShallowMatch(INode right) { var r = (ExpressionRangeVariable) right; return false; } } internal sealed partial class NullExpressionRangeVariable { internal override bool ShallowMatch(INode right) { var r = (NullExpressionRangeVariable) right; return false; } } public partial class ExpressionStatement { internal override bool ShallowMatch(INode right) { var r = (ExpressionStatement) right; return true; } } public partial class ExternAliasDirective { internal override bool ShallowMatch(INode right) { var r = (ExternAliasDirective) right; return false; } } public partial class FieldDeclaration { internal override bool ShallowMatch(INode right) { var r = (FieldDeclaration) right; return false; } } public partial class FixedStatement { internal override bool ShallowMatch(INode right) { var r = (FixedStatement) right; return false; } } public partial class ForeachStatement { internal override bool ShallowMatch(INode right) { var r = (ForeachStatement) right; return this.variableName == r.variableName; } } public partial class ForNextStatement { internal override bool ShallowMatch(INode right) { var r = (ForNextStatement) right; return false; } } public partial class ForStatement { internal override bool ShallowMatch(INode right) { var r = (ForStatement) right; return true; } } public partial class GotoCaseStatement { internal override bool ShallowMatch(INode right) { var r = (GotoCaseStatement) right; return false; } } public partial class GotoStatement { internal override bool ShallowMatch(INode right) { var r = (GotoStatement) right; return this.Label == r.Label; } } public partial class IdentifierExpression { internal override bool ShallowMatch(INode right) { var r = (IdentifierExpression) right; return this.Identifier == r.Identifier; } } public partial class IfElseStatement { internal override bool ShallowMatch(INode right) { var r = (IfElseStatement) right; return true; } } public partial class IndexerDeclaration { internal override bool ShallowMatch(INode right) { var r = (IndexerDeclaration) right; return false; } } public partial class IndexerExpression { internal override bool ShallowMatch(INode right) { var r = (IndexerExpression) right; return true; } } public partial class InterfaceImplementation { internal override bool ShallowMatch(INode right) { var r = (InterfaceImplementation) right; return false; } } public partial class InvocationExpression { internal override bool ShallowMatch(INode right) { var r = (InvocationExpression) right; return true; } } public partial class LabelStatement { internal override bool ShallowMatch(INode right) { var r = (LabelStatement) right; return this.Label == r.Label; } } public partial class LambdaExpression { internal override bool ShallowMatch(INode right) { var r = (LambdaExpression) right; return false; } } public partial class LockStatement { internal override bool ShallowMatch(INode right) { var r = (LockStatement) right; return true; } } public abstract partial class MemberNode { internal override bool ShallowMatch(INode right) { var r = (MemberNode) right; return false; } } public partial class MemberReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (MemberReferenceExpression) right; return this.MemberName == r.MemberName; } } public partial class MethodDeclaration { internal override bool ShallowMatch(INode right) { var r = (MethodDeclaration) right; for (int i = 0; i < this.HandlesClause.Count; i++) { if (this.HandlesClause[i] != r.HandlesClause[i]) return false; } return this.Name == r.Name && this.Modifier == r.Modifier; } } public partial class NamedArgumentExpression { internal override bool ShallowMatch(INode right) { var r = (NamedArgumentExpression) right; return false; } } public partial class NamespaceDeclaration { internal override bool ShallowMatch(INode right) { var r = (NamespaceDeclaration) right; return false; } } public partial class ObjectCreateExpression { internal override bool ShallowMatch(INode right) { var r = (ObjectCreateExpression) right; return true; } } public partial class OnErrorStatement { internal override bool ShallowMatch(INode right) { var r = (OnErrorStatement) right; return false; } } public partial class OperatorDeclaration { internal override bool ShallowMatch(INode right) { var r = (OperatorDeclaration) right; return false; } } public partial class OptionDeclaration { internal override bool ShallowMatch(INode right) { var r = (OptionDeclaration) right; return false; } } public partial class ParameterDeclarationExpression { internal override bool ShallowMatch(INode right) { var r = (ParameterDeclarationExpression) right; return false; } } public abstract partial class ParametrizedNode { internal override bool ShallowMatch(INode right) { var r = (ParametrizedNode) right; return false; } } public partial class ParenthesizedExpression { internal override bool ShallowMatch(INode right) { var r = (ParenthesizedExpression) right; return false; } } public partial class PointerReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (PointerReferenceExpression) right; return false; } } public partial class PropertyDeclaration { internal override bool ShallowMatch(INode right) { var r = (PropertyDeclaration) right; return false; } } public partial class PropertyGetRegion { internal override bool ShallowMatch(INode right) { var r = (PropertyGetRegion) right; return false; } } internal sealed partial class NullPropertyGetRegion { internal override bool ShallowMatch(INode right) { var r = (NullPropertyGetRegion) right; return false; } } public abstract partial class PropertyGetSetRegion : AttributedNode, INullable { internal override bool ShallowMatch(INode right) { var r = (PropertyGetSetRegion) right; return false; } } public partial class PropertySetRegion { internal override bool ShallowMatch(INode right) { var r = (PropertySetRegion) right; return false; } } internal sealed partial class NullPropertySetRegion { internal override bool ShallowMatch(INode right) { var r = (NullPropertySetRegion) right; return false; } } public partial class QueryExpression { internal override bool ShallowMatch(INode right) { var r = (QueryExpression) right; return true; } } internal sealed partial class NullQueryExpression { internal override bool ShallowMatch(INode right) { var r = (NullQueryExpression) right; return true; } } public partial class QueryExpressionAggregateClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionAggregateClause) right; return false; } } public abstract partial class QueryExpressionClause : AbstractNode, INullable { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionClause) right; return false; } } internal sealed partial class NullQueryExpressionClause { internal override bool ShallowMatch(INode right) { var r = (NullQueryExpressionClause) right; return true; } } public partial class QueryExpressionDistinctClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionDistinctClause) right; return true; } } public partial class QueryExpressionFromClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionFromClause) right; return true; } } internal sealed partial class NullQueryExpressionFromClause { internal override bool ShallowMatch(INode right) { var r = (NullQueryExpressionFromClause) right; return true; } } public abstract partial class QueryExpressionFromOrJoinClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionFromOrJoinClause) right; return false; } } public partial class QueryExpressionGroupClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionGroupClause) right; return false; } } public partial class QueryExpressionGroupJoinVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionGroupJoinVBClause) right; return true; } } public partial class QueryExpressionGroupVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionGroupVBClause) right; return true; } } public partial class QueryExpressionJoinClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionJoinClause) right; return true; } } public partial class QueryExpressionJoinConditionVB { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionJoinConditionVB) right; return true; } } public partial class QueryExpressionJoinVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionJoinVBClause) right; return false; } } internal sealed partial class NullQueryExpressionJoinVBClause { internal override bool ShallowMatch(INode right) { var r = (NullQueryExpressionJoinVBClause) right; return true; } } public partial class QueryExpressionLetClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionLetClause) right; return true; } } public partial class QueryExpressionLetVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionLetVBClause) right; return true; } } public partial class QueryExpressionOrderClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionOrderClause) right; return true; } } public partial class QueryExpressionOrdering { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionOrdering) right; return true; } } public partial class QueryExpressionPartitionVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionPartitionVBClause) right; return true; } } public partial class QueryExpressionSelectClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionSelectClause) right; return true; } } public partial class QueryExpressionSelectVBClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionSelectVBClause) right; return true; } } public partial class QueryExpressionWhereClause { internal override bool ShallowMatch(INode right) { var r = (QueryExpressionWhereClause) right; return true; } } public partial class RaiseEventStatement { internal override bool ShallowMatch(INode right) { var r = (RaiseEventStatement) right; return false; } } public partial class ReDimStatement { internal override bool ShallowMatch(INode right) { var r = (ReDimStatement) right; return false; } } public partial class RemoveHandlerStatement { internal override bool ShallowMatch(INode right) { var r = (RemoveHandlerStatement) right; return false; } } public partial class ResumeStatement { internal override bool ShallowMatch(INode right) { var r = (ResumeStatement) right; return false; } } public partial class ReturnStatement { internal override bool ShallowMatch(INode right) { var r = (ReturnStatement) right; return true; } } public partial class SizeOfExpression { internal override bool ShallowMatch(INode right) { var r = (SizeOfExpression) right; return true; } } public partial class StackAllocExpression { internal override bool ShallowMatch(INode right) { var r = (StackAllocExpression) right; return false; } } public partial class StopStatement { internal override bool ShallowMatch(INode right) { var r = (StopStatement) right; return false; } } public partial class SwitchSection { internal override bool ShallowMatch(INode right) { var r = (SwitchSection) right; return false; } } public partial class SwitchStatement { internal override bool ShallowMatch(INode right) { var r = (SwitchStatement) right; return true; } } public partial class TemplateDefinition { internal override bool ShallowMatch(INode right) { var r = (TemplateDefinition) right; return false; } } public partial class ThisReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (ThisReferenceExpression) right; return true; } } public partial class ThrowStatement { internal override bool ShallowMatch(INode right) { var r = (ThrowStatement) right; return true; } } public partial class TryCatchStatement { internal override bool ShallowMatch(INode right) { var r = (TryCatchStatement) right; return true; } } public partial class TypeDeclaration { internal override bool ShallowMatch(INode right) { var r = (TypeDeclaration) right; return false; } } public partial class TypeOfExpression { internal override bool ShallowMatch(INode right) { var r = (TypeOfExpression) right; return true; } } public partial class TypeOfIsExpression { internal override bool ShallowMatch(INode right) { var r = (TypeOfIsExpression) right; return false; } } public partial class TypeReferenceExpression { internal override bool ShallowMatch(INode right) { var r = (TypeReferenceExpression) right; return true; } } public partial class UnaryOperatorExpression { internal override bool ShallowMatch(INode right) { var r = (UnaryOperatorExpression) right; return this.Op == r.Op; } } public partial class UncheckedExpression { internal override bool ShallowMatch(INode right) { var r = (UncheckedExpression) right; return false; } } public partial class UncheckedStatement { internal override bool ShallowMatch(INode right) { var r = (UncheckedStatement) right; return false; } } public partial class UnsafeStatement { internal override bool ShallowMatch(INode right) { var r = (UnsafeStatement) right; return false; } } public partial class Using { internal override bool ShallowMatch(INode right) { var r = (Using) right; return false; } } public partial class UsingDeclaration { internal override bool ShallowMatch(INode right) { var r = (UsingDeclaration) right; return false; } } public partial class UsingStatement { internal override bool ShallowMatch(INode right) { var r = (UsingStatement) right; return true; } } public partial class VariableDeclaration { internal override bool ShallowMatch(INode right) { var r = (VariableDeclaration) right; return this.Name == r.Name; } } public partial class WithStatement { internal override bool ShallowMatch(INode right) { var r = (WithStatement) right; return false; } } public partial class YieldStatement { internal override bool ShallowMatch(INode right) { var r = (YieldStatement) right; return true; } } }
using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using NQuery.Runtime; namespace NQuery.Tests { [TestClass] public class ExpressionTests { public class Base { } public class Derived : Base { } public static class FunctionContainer { [FunctionBinding("IntIdent")] public static int? IntIdent(int? value) { return value; } [FunctionBinding("IntNullIf")] public static int? IntNullIf(int value, int nullValue) { if (value == nullValue) return null; return value; } } [TestMethod] public void SimpleExpression() { Expression<string> expr = new Expression<string>("'Hello' + ' ' + 'World!'"); string result = expr.Evaluate(); Assert.AreEqual("Hello World!", result); } #region Static Tests [TestMethod] public void SimpleConversion() { Expression<double> expr = new Expression<double>("2 * 100"); double result = expr.Evaluate(); Assert.AreEqual(200.0d, result); } [TestMethod] public void ConversionToLessSpecificType1() { Derived derived = new Derived(); Expression<Base> expr = new Expression<Base>("Test"); expr.Parameters.Add("Test", typeof(Derived), derived); Base result = expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void ConversionToLessSpecificType2() { Derived derived = new Derived(); Expression<Base> expr = new Expression<Base>("Test"); expr.Parameters.Add("Test", typeof(Base), derived); Base result = expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void ConversionToLessSpecficWithBoxingToObject() { Expression<object> expr = new Expression<object>("2 + 2"); object result = expr.Evaluate(); Assert.AreEqual(4, result); } [TestMethod] public void ConversionToLessSpecficWithBoxingToValueType() { Expression<ValueType> expr = new Expression<ValueType>("2 + 2"); ValueType result = expr.Evaluate(); Assert.AreEqual(4, result); } [TestMethod] public void ConversionToLessSpecficWithBoxingToEnum() { Expression<Enum> expr = new Expression<Enum>("MyDayOfWeek"); expr.Parameters.Add("MyDayOfWeek", typeof(Enum), DayOfWeek.Wednesday); Enum result = expr.Evaluate(); Assert.AreEqual(DayOfWeek.Wednesday, result); } [TestMethod] public void ConversionToMoreSpecificTypeWithoutCast() { Derived derived = new Derived(); Expression<Derived> expr = new Expression<Derived>("Test"); expr.Parameters.Add("Test", typeof(Base), derived); try { expr.Evaluate(); Assert.Fail("The expression should not be compiled"); } catch (CompilationException ex) { Assert.AreEqual(1, ex.CompilationErrors.Count); Assert.AreEqual("Cannot cast '@Test' from 'Base' to 'Derived'.", ex.CompilationErrors[0].Text); } } [TestMethod] public void ConversionToMoreSpecificTypeWithCast() { Derived derived = new Derived(); Expression<Derived> expr = new Expression<Derived>("CAST(Test AS 'NQuery.Tests.ExpressionTests+Derived')"); expr.Parameters.Add("Test", typeof(Base), derived); Derived result = expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void ConversionToMoreSpecificTypeWithCastAndUnboxing() { Expression<int> expr = new Expression<int>("CAST(Test AS INT) + CAST(Test AS INT)"); expr.Parameters.Add("Test", typeof(object), 2); int result = expr.Evaluate(); Assert.AreEqual(4, result); } [TestMethod] public void Int32WithZeroForNull() { Expression<int> expr = new Expression<int>("NULL"); int result = expr.Evaluate(); Assert.AreEqual(0, result); } [TestMethod] public void Int32WithMinusOneForNull() { Expression<int> expr = new Expression<int>("NULL"); expr.NullValue = -1; int result = expr.Evaluate(); Assert.AreEqual(-1, result); } [TestMethod] public void NullableInt32WithNullForNull() { Expression<int?> expr = new Expression<int?>("NULL"); int? result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void NullableInt32WithMinusOneForNull() { Expression<int?> expr = new Expression<int?>("NULL"); expr.NullValue = -1; int? result = expr.Evaluate(); Assert.AreEqual(-1, result); } [TestMethod] public void BooleanWithFalseForNull() { Expression<bool> expr = new Expression<bool>("NULL"); bool result = expr.Evaluate(); Assert.AreEqual(false, result); } [TestMethod] public void BooleanWithTrueForNull() { Expression<bool> expr = new Expression<bool>("NULL"); expr.NullValue = true; bool result = expr.Evaluate(); Assert.AreEqual(true, result); } [TestMethod] public void NullableBooleanWithNullForNull() { Expression<bool?> expr = new Expression<bool?>("NULL"); bool? result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void NullableBooleanWithFalseForNull() { Expression<bool?> expr = new Expression<bool?>("NULL"); expr.NullValue = false; bool? result = expr.Evaluate(); Assert.AreEqual(false, result); } [TestMethod] public void NullableBooleanWithTrueForNull() { Expression<bool?> expr = new Expression<bool?>("NULL"); expr.NullValue = true; bool? result = expr.Evaluate(); Assert.AreEqual(true, result); } #endregion #region Dynamic Tests [TestMethod] public void DynamicSimpleConversion() { Expression<object> expr = new Expression<object>("2 * 100"); expr.TargetType = typeof (double); double result = (double) expr.Evaluate(); Assert.AreEqual(200.0d, result); } [TestMethod] public void DynamicConversionToLessSpecificType1() { Derived derived = new Derived(); Expression<object> expr = new Expression<object>("Test"); expr.TargetType = typeof (Base); expr.Parameters.Add("Test", typeof(Derived), derived); Base result = (Base) expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void DynamicConversionToLessSpecificType2() { Derived derived = new Derived(); Expression<object> expr = new Expression<object>("Test"); expr.TargetType = typeof (Base); expr.Parameters.Add("Test", typeof(Base), derived); Base result = (Base) expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void DynamicConversionToMoreSpecificTypeWithoutCast() { Derived derived = new Derived(); Expression<object> expr = new Expression<object>("Test"); expr.TargetType = typeof (Derived); expr.Parameters.Add("Test", typeof(Base), derived); try { expr.Evaluate(); Assert.Fail("The expression should not be compiled"); } catch (CompilationException ex) { Assert.AreEqual(1, ex.CompilationErrors.Count); Assert.AreEqual("Cannot cast '@Test' from 'Base' to 'Derived'.", ex.CompilationErrors[0].Text); } } [TestMethod] public void DynamicConversionToMoreSpecificTypeWithCast() { Derived derived = new Derived(); Expression<object> expr = new Expression<object>("CAST(Test AS 'NQuery.Tests.ExpressionTests+Derived')"); expr.TargetType = typeof(Derived); expr.Parameters.Add("Test", typeof(Base), derived); Derived result = (Derived) expr.Evaluate(); Assert.AreSame(derived, result); } [TestMethod] public void DynamicConversionToMoreSpecificTypeWithCastAndUnboxing() { Expression<object> expr = new Expression<object>("CAST(Test AS INT) + CAST(Test AS INT)"); expr.TargetType = typeof (int); expr.Parameters.Add("Test", typeof(object), 2); int result = (int) expr.Evaluate(); Assert.AreEqual(4, result); } [TestMethod] public void DynamicInt32WithNullForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(int); object result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void DynamicInt32WithZeroForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof (int); expr.NullValue = 0; int result = (int) expr.Evaluate(); Assert.AreEqual(0, result); } [TestMethod] public void DynamicInt32WithMinusOneForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof (int); expr.NullValue = -1; int result = (int) expr.Evaluate(); Assert.AreEqual(-1, result); } [TestMethod] public void DynamicNullableInt32WithNullForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof (int?); object result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void DynamicNullableInt32WithMinusOneForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(int?); expr.NullValue = -1; int? result = (int?) expr.Evaluate(); Assert.AreEqual(-1, result); } [TestMethod] public void DynamicBooleanWithNullForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool); object result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void DynamicBooleanWithFalseForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool); expr.NullValue = false; bool result = (bool) expr.Evaluate(); Assert.AreEqual(false, result); } [TestMethod] public void DynamicBooleanWithTrueForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool); expr.NullValue = true; bool result = (bool) expr.Evaluate(); Assert.AreEqual(true, result); } [TestMethod] public void DynamicNullableBooleanWithNullForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool?); bool? result = (bool?) expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void DynamicNullableBooleanWithFalseForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool?); expr.NullValue = false; bool? result = (bool?) expr.Evaluate(); Assert.AreEqual(false, result); } [TestMethod] public void DynamicNullableBooleanWithTrueForNull() { Expression<object> expr = new Expression<object>("NULL"); expr.TargetType = typeof(bool?); expr.NullValue = true; bool? result = (bool?) expr.Evaluate(); Assert.AreEqual(true, result); } [TestMethod] public void DynamicTargetTypeDetectsConflictWithGenericType() { Expression<DateTime> expr = new Expression<DateTime>("NULL"); try { expr.TargetType = typeof(int); } catch (ArgumentException ex) { Assert.AreEqual("Cannot narrow down the target type to 'System.Int32' since the static expression type 'System.DateTime' would not be assignable from the new target type 'System.Int32' anymore.\r\nParameter name: value", ex.Message); } } #endregion [TestMethod] public void NullableIntsInExpression1() { Expression<int> expr = new Expression<int>(); expr.Text = "NullableInt1 + NullableInt2"; expr.Parameters.Add("NullableInt1", typeof (int?), 1); expr.Parameters.Add("NullableInt2", typeof (int?), 2); int result = expr.Evaluate(); Assert.AreEqual(3, result); } [TestMethod] public void NullableIntsInExpression2() { Expression<int?> expr = new Expression<int?>(); expr.Text = "NullableInt1 + NullableInt2"; expr.Parameters.Add("NullableInt1", typeof(int?), null); expr.Parameters.Add("NullableInt2", typeof(int?), 2); int? result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void CallingFunctionThatReturnsNullableInt1() { Expression<int?> expr = new Expression<int?>(); expr.DataContext.Functions.AddFromContainer(typeof(FunctionContainer)); expr.Text = "IntNullIf(4, 4)"; int? result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void CallingFunctionThatReturnsNullableInt2() { Expression<int?> expr = new Expression<int?>(); expr.DataContext.Functions.AddFromContainer(typeof(FunctionContainer)); expr.Text = "IntNullIf(4, 4) + 4"; int? result = expr.Evaluate(); Assert.AreEqual(null, result); } [TestMethod] public void CallingFunctionWithNullableInt() { Expression<int> expr = new Expression<int>(); expr.DataContext.Functions.AddFromContainer(typeof(FunctionContainer)); expr.Text = "IntIdent(42)"; int result = expr.Evaluate(); Assert.AreEqual(42, result); } } }
/* * MindTouch Core - open source enterprise collaborative networking * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit www.opengarden.org; * please review the licensing section. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * http://www.gnu.org/copyleft/gpl.html */ using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Text; using System.IO; using System.Xml.Xsl; using Autofac; using Autofac.Builder; using log4net; using MindTouch.Deki.Data; using MindTouch.Deki.Logic; using MindTouch.Deki.PubSub; using MindTouch.Deki.Script; using MindTouch.Deki.Script.Runtime.Library; using MindTouch.Deki.Search; using MindTouch.Deki.WikiManagement; using MindTouch.Dream; using MindTouch.Dream.Services.PubSub; using MindTouch.Tasking; using MindTouch.Web; using MindTouch.Xml; namespace MindTouch.Deki { using Yield = IEnumerator<IYield>; [DreamService("MindTouch Core Service", "Copyright (c) 2006-2010 MindTouch Inc.", Info = "http://developer.mindtouch.com/en/ref/MindTouch_API", SID = new[] { "sid://mindtouch.com/2006/11/dekiwiki", "http://services.mindtouch.com/deki/draft/2006/11/dekiwiki", "http://www.mindtouch.com/services/2006/11/dekiwiki" } )] [DreamServiceConfig("deki-db-server", "string?", "Database host name (default: localhost).")] [DreamServiceConfig("deki-db-port", "int?", "Database port (default: 3306).")] [DreamServiceConfig("deki-db-catalog", "string?", "Database table name (default: wikidb).")] [DreamServiceConfig("deki-db-user", "string?", "Database user name (default: wikiuser).")] [DreamServiceConfig("deki-db-password", "string", "Password for database user.")] [DreamServiceConfig("deki-db-options", "string", "Optional connection string parameters")] [DreamServiceConfig("deki-path", "string", "Application installation folder")] [DreamServiceConfig("deki-language", "string?", "Site language (default: \"en-US\").")] [DreamServiceConfig("deki-sitename", "string?", "Site name (default: \"MindTouch\").")] [DreamServiceConfig("admin-db-user", "string?", "Database administrator user name (default: \"root\").")] [DreamServiceConfig("admin-db-password", "string", "Database administrator password.")] [DreamServiceConfig("authtoken-salt", "string", "Private key used to generate unique auth tokens")] [DreamServiceConfig("deki-resources-path", "string?", "Path to resources folder (default: \"%deki-path%/resources\")")] [DreamServiceConfig("imagemagick-convert-path", "string", "Path to ImageMagick converter tool")] [DreamServiceConfig("imagemagick-identify-path", "string", "Path to ImageMagick identify tool")] [DreamServiceConfig("max-image-size", "int?", "Maximum supported image size in bytes or 0 for no limit (default: 0).")] [DreamServiceConfig("banned-words", "string?", "Comma separated list of banned words")] [DreamServiceBlueprint("setup/private-storage")] public partial class DekiWikiService : DekiExtService { //--- Constants --- public const string ANON_USERNAME = "Anonymous"; public const string PARAM_PAGEID = "pageid"; //Represents the ID (int) of a page public const string PARAM_REDIRECTS = "redirects"; internal const string PARAM_TITLE = "title"; //Represents the title of a page (full path) internal const string PARAM_FILEID = "fileid"; //Represents the ID (int) of a file attachment internal const string PARAM_FILENAME = "filename"; //Represents the filename of an attachment (without path info) internal const string PAGENOTFOUNDERROR = "Unable to find requested article"; internal const string AUTHREALM = "DekiWiki"; internal const string AUTHTOKEN_URIPARAM = "authtoken"; internal const string AUTHTOKEN_COOKIENAME = "authtoken"; internal const string AUTHTOKEN_HEADERNAME = "X-Authtoken"; internal const string IMPERSONATE_USER_QUERYNAME = "impersonateuserid"; internal const string WIKI_IDENTITY_HEADERNAME = "X-Deki-Site"; internal const string DATA_STATS_HEADERNAME = "X-Data-Stats"; private const string SID_FOR_LUCENE_INDEX = "http://services.mindtouch.com/deki/draft/2007/06/luceneindex"; private const string SID_FOR_VARNISH_SERVICE = "sid://mindtouch.com/2009/01/varnish"; public const string GRAVATAR_DEFAULT_PATH = "skins/common/images/default-avatar.png"; //--- Class Fields --- private static readonly ILog _log = LogUtils.CreateLog(); public static object SyncRoot = new object(); public static PlainTextResourceManager ResourceManager; public static DekiFont ScreenFont; private static XslCompiledTransform _extensionRenderXslt; //--- Fields --- public Dictionary<XUri, XDoc> RemoteExtensionLibraries = new Dictionary<XUri, XDoc>(); private InstanceManager _instanceManager; private NS[] _indexNamespaceWhitelist; private Plug _luceneIndex; private bool _isLocalLuceneService; private bool _isInitialized; private string _token; private LicenseData _license; private Plug _mailer; private Plug _pageSubscription; private Plug _packageUpdater; //--- Properties --- public InstanceManager Instancemanager { get { return _instanceManager; } } public override string AuthenticationRealm { get { return AUTHREALM; } } public string ImageMagickConvertPath { get { return Environment.ExpandEnvironmentVariables(Config["imagemagick-convert-path"].AsText ?? string.Empty); } } public string ImageMagickIdentifyPath { get { return Environment.ExpandEnvironmentVariables(Config["imagemagick-identify-path"].AsText ?? string.Empty); } } public string DekiPath { get { return Environment.ExpandEnvironmentVariables(Config["deki-path"].AsText); } } public string PrinceXmlPath { get { return Environment.ExpandEnvironmentVariables(Config["princexml-path"].AsText ?? string.Empty); } } public uint PrinceXmlTimeout { get { return Config["princexml-timeout"].AsUInt ?? 60000; } } public string PrinceXmlCssPath { get { return string.Format("{0}/skins/common/prince.css", DekiPath); } } public uint ImageMagickTimeout { get { return Config["imagemagick-timeout"].AsUInt ?? 30000; } } public string ResourcesPath { get { return Environment.ExpandEnvironmentVariables(Config["deki-resources-path"].AsText ?? Path.Combine(DekiPath, "resources")); } } public string MasterApiKey { get { return Config["api-key"].AsText ?? Config["apikey"].AsText; } } public NS[] IndexNamespaceWhitelist { get { return _indexNamespaceWhitelist; } } public Plug LuceneIndex { get { return _luceneIndex; } } public Plug Mailer { get { return _mailer; } } public Plug PageSubscription { get { return _pageSubscription; } } public Plug PackageUpdater { get { return _packageUpdater; } } internal LicenseData License { get { return _license; } set { _license = value; } } public override DreamFeatureStage[] Prologues { get { return new[] { new DreamFeatureStage("start-stats", this.PrologueStats, DreamAccess.Public), new DreamFeatureStage("set-deki-context", this.PrologueDekiContext, DreamAccess.Public) }; } } public override DreamFeatureStage[] Epilogues { get { return new[] { new DreamFeatureStage("end-stats", this.EpilogueStats, DreamAccess.Public), new DreamFeatureStage("identify-instance",this.EpilogueIdentify, DreamAccess.Public), }; } } public override ExceptionTranslator[] ExceptionTranslators { get { return new ExceptionTranslator[] { MapDekiDataException }; } } internal string Token { get { return _token; } } internal DekiScriptRuntime InternalScriptRuntime { get { return ScriptRuntime; } } //--- Methods --- internal Result<Plug> InternalCreateService(string path, string sid, XDoc config, Result<Plug> result) { return CreateService(path, sid, config, result); } protected override Yield Start(XDoc config, IContainer container, Result result) { yield return Coroutine.Invoke(base.Start, config, new Result()); // ensure imagemagick is setup correctly. if(string.IsNullOrEmpty(ImageMagickConvertPath)) { throw new NotImplementedException("Please set 'imagemagick-convert-path' in config to path of ImageMagick's 'convert'"); } if(!File.Exists(ImageMagickIdentifyPath)) { throw new FileNotFoundException("Cannot find ImagicMagick 'identify' binary: ", ImageMagickIdentifyPath); } if(string.IsNullOrEmpty(ImageMagickIdentifyPath)) { throw new NotImplementedException("Please set 'imagemagick-identify-path' in config to path of ImageMagick's 'identify'"); } if(!File.Exists(ImageMagickConvertPath)) { throw new FileNotFoundException("Cannot find ImagicMagick 'convert' binary: ", ImageMagickConvertPath); } // check for 'apikey' if(string.IsNullOrEmpty(MasterApiKey)) { throw new ArgumentNullException("apikey", "The global apikey is not defined. Please ensure that you have a global <apikey> defined in the MindTouch Core service settings xml file."); } // TODO (arnec): how to prevent injection of replacements that have license checking removed, // but still allow tests to do it (check that the replacement comes from a signed assembly // make sure we have an ISearchBL registered var builder = new ContainerBuilder(); builder.Register(c => DekiContext.Current).RequestScoped(); if(!container.IsRegistered<ISearchBL>()) { builder.Register(c => { var dekiContext = c.Resolve<DekiContext>(); var licenseState = LicenseBL.LicenseState; return new SearchBL( // Note (arnec): Need to figure out how to get the data session into the container DbUtils.CurrentSession, dekiContext.Instance.SearchCache, dekiContext.Instance.Id, dekiContext.Deki.Self.Uri, dekiContext.Deki.LuceneIndex, dekiContext.User, new DekiInstanceSettings(), new SearchQueryParser(), () => (licenseState == LicenseStateType.TRIAL || licenseState == LicenseStateType.COMMERCIAL) && LicenseBL.GetCapability("search-engine") == "adaptive" ); }).As<ISearchBL>().RequestScoped(); } builder.Build(container); // intialize instance manager _instanceManager = InstanceManager.New(this); // compute deki token ulong folded_productkey_md5 = 0; byte[] md5_bytes = StringUtil.ComputeHash(MasterApiKey, Encoding.UTF8); for(int i = 0; i < md5_bytes.Length; ++i) { folded_productkey_md5 ^= (ulong)md5_bytes[i] << (i & 7) * 8; } _token = folded_productkey_md5.ToString("X"); // setup resource manager lock(SyncRoot) { if(ResourceManager == null) { ResourceManager = new PlainTextResourceManager(ResourcesPath); ScreenFont = new DekiFont(Plug.New("resource://mindtouch.deki/MindTouch.Deki.Resources.Arial.mtdf").Get().AsBytes()); } } // initialize scripting engine XDoc scripting = Config["scripting"]; DekiScriptLibrary.InsertTextLimit = scripting["max-web-response-length"].AsLong ?? DekiScriptLibrary.InsertTextLimit; DekiScriptLibrary.MinCacheTtl = scripting["min-web-cache-ttl"].AsDouble ?? DekiScriptLibrary.MinCacheTtl; // set up deki pub sub (by default we override uri.publish with our own service, unless @must-use=true is specified) if(!(Config["uri.publish/@must-use"].AsBool ?? false)) { Result<Plug> pubsubResult; XDoc pubsubConfig = new XDoc("config") .Elem("uri.deki", Self.Uri.With("apikey", MasterApiKey)) .Start("downstream") .Elem("uri", PubSub.At("publish").Uri.WithoutLastSegment().At("subscribers")) .End() .Start("components") .Start("component") .Attr("type", typeof(IPubSubDispatcher).AssemblyQualifiedName) .Attr("implementation", typeof(DekiDispatcher).AssemblyQualifiedName) .End() .End() .Elem("authtoken", MasterApiKey); foreach(var cookie in Cookies.Fetch(PubSub.Uri)) { pubsubConfig.Add(cookie.AsSetCookieDocument); } yield return pubsubResult = CreateService( "pubsub", "sid://mindtouch.com/dream/2008/10/pubsub", pubsubConfig, new Result<Plug>()); PubSub = pubsubResult.Value; } // set up package updater service (unless is was passed in) if(config["packageupdater/@uri"].IsEmpty) { var packageConfig = config["packageupdater"]; packageConfig = packageConfig.IsEmpty ? new XDoc("config") : packageConfig.Clone(); if(packageConfig["package-path"].IsEmpty) { packageConfig.Elem("package-path", Path.Combine(Path.Combine(config["deki-path"].AsText, "packages"), "$1")); } yield return CreateService( "packageupdater", "sid://mindtouch.com/2010/04/packageupdater", new XDoc("config") .Elem("apikey", MasterApiKey) .AddNodes(packageConfig), new Result<Plug>() ); } // set up emailer service (unless it was passed in) XUri mailerUri; if(config["uri.mailer"].IsEmpty) { yield return CreateService( "mailer", "sid://mindtouch.com/2009/01/dream/email", new XDoc("config") .Elem("apikey", MasterApiKey) .AddAll(Config["smtp/*"]), new Result<Plug>() ); mailerUri = Self.Uri.At("mailer"); } else { mailerUri = config["uri.mailer"].AsUri; } _mailer = Plug.New(mailerUri); // set up the email subscription service (unless it was passed in) XUri pageSubscription; if(config["uri.page-subscription"].IsEmpty) { XDoc pagesubserviceConfig = new XDoc("config") .Elem("uri.deki", Self.Uri) .Elem("uri.emailer", mailerUri.At("message")) .Elem("resources-path", ResourcesPath) .Elem("apikey", MasterApiKey) .AddAll(Config["page-subscription/*"]); foreach(var cookie in Cookies.Fetch(mailerUri)) { pagesubserviceConfig.Add(cookie.AsSetCookieDocument); } yield return CreateService( "pagesubservice", "sid://mindtouch.com/deki/2008/11/changesubscription", pagesubserviceConfig, new Result<Plug>() ); pageSubscription = Self.Uri.At("pagesubservice"); config.Elem("uri.page-subscription", pageSubscription); } else { pageSubscription = config["uri.page-subscription"].AsUri; } _pageSubscription = Plug.New(pageSubscription); // set up package importer, if not provided XUri packageUpdater; if(config["uri.package"].IsEmpty) { yield return CreateService( "package", "sid://mindtouch.com/2009/07/package", new XDoc("config").Elem("uri.deki", Self.Uri), new Result<Plug>()); packageUpdater = Self.Uri.At("package"); config.Elem("uri.package", packageUpdater); } else { packageUpdater = config["uri.package"].AsUri; } _packageUpdater = Plug.New(packageUpdater); // set up lucene _luceneIndex = Plug.New(Config["indexer/@src"].AsUri); if(_luceneIndex == null) { // create the indexer service XDoc luceneIndexConfig = new XDoc("config").AddNodes(Config["indexer"]); if(MasterApiKey != null) { luceneIndexConfig.Start("apikey").Attr("hidden", true).Value(MasterApiKey).End(); } if(luceneIndexConfig["path.store"].IsEmpty) { luceneIndexConfig.Elem("path.store", Path.Combine(Path.Combine(config["deki-path"].AsText, "luceneindex"), "$1")); } yield return CreateService("luceneindex", SID_FOR_LUCENE_INDEX, luceneIndexConfig, new Result<Plug>()).Set(v => _luceneIndex = v); _isLocalLuceneService = true; } else { // push our host's pubsub service to lucene, to keep it up to date on our changes var pubsub = new XDoc("pubsub").Attr("href", PubSub); foreach(var cookie in PubSub.CookieJar.Fetch(PubSub.Uri)) { pubsub.Add(cookie.AsSetCookieDocument); } yield return _luceneIndex.At("subscriptions").PostAsync(pubsub); } _indexNamespaceWhitelist = new[] { NS.MAIN, NS.PROJECT, NS.USER, NS.TEMPLATE, NS.HELP, NS.MAIN_TALK, NS.PROJECT_TALK, NS.USER_TALK, NS.TEMPLATE_TALK, NS.HELP_TALK, NS.SPECIAL, NS.SPECIAL_TALK }; if(!string.IsNullOrEmpty(Config["indexer/namespace-whitelist"].AsText)) { List<NS> customWhitelist = new List<NS>(); foreach(string item in Config["indexer/namespace-whitelist"].AsText.Split(',')) { NS ns; if(SysUtil.TryParseEnum(item, out ns)) { customWhitelist.Add(ns); } } _indexNamespaceWhitelist = customWhitelist.ToArray(); } if(!Config["wikis/globalconfig/cache/varnish"].IsEmpty) { // create the varnish service // TODO (petee): getting the varnish config from wikis/globalconfig/cache is a hack // The frontend needs to get the max-age to send out the cache headers but we currently have no way // of getting the DekiWikiService config so we'll hack it so it comes back in GET:site/settings. XDoc varnishConfig = new XDoc("config") .Elem("uri.deki", Self.Uri.With("apikey", MasterApiKey)) .Elem("uri.varnish", Config["wikis/globalconfig/cache/varnish"].AsUri) .Elem("varnish-purge-delay", Config["wikis/globalconfig/cache/varnish-purge-delay"].AsInt ?? 10) .Elem("varnish-max-age", Config["wikis/globalconfig/cache/varnish-max-age"].AsInt ?? 300) .Start("apikey").Attr("hidden", true).Value(MasterApiKey).End(); yield return CreateService("varnish", SID_FOR_VARNISH_SERVICE, varnishConfig, new Result<Plug>()); } _isInitialized = true; result.Return(); } protected override Yield Stop(Result result) { _isInitialized = false; RemoteExtensionLibraries.Clear(); if(_instanceManager != null) { _instanceManager.Shutdown(); } if(_isLocalLuceneService) { _luceneIndex.DeleteAsync().Wait(); _isLocalLuceneService = false; } _indexNamespaceWhitelist = null; _token = null; _mailer = null; yield return Coroutine.Invoke(base.Stop, new Result()); result.Return(); } private T Resolve<T>(DreamContext context) { var instance = context.Container.Resolve<T>(); var public_key = instance.GetType().Assembly.GetName().GetPublicKey(); if(public_key.Length <= 0) { throw new DreamAbortException(DreamMessage.InternalError("missing assembly signature")); } if(!public_key.SequenceEqual(GetType().Assembly.GetName().GetPublicKey())) { throw new DreamAbortException(DreamMessage.InternalError("signature mismatch for DekiWikiService")); } if(!public_key.SequenceEqual(typeof(DreamService).Assembly.GetName().GetPublicKey())) { throw new DreamAbortException(DreamMessage.InternalError("signature mismatch for DreamService")); } return instance; } internal void CheckResponseCache(DreamContext context, bool longTimeout) { DekiContext deki = DekiContext.Current; if(UserBL.IsAnonymous(deki.User) && deki.Instance.CacheAnonymousOutput) { string key = string.Format("{0}.{1}", deki.User.ID, context.Uri); CheckResponseCache(key); context.CacheKeyAndTimeout = new Tuplet<object, TimeSpan>(key, longTimeout ? deki.Instance.CacheAnonymousOutputLong : deki.Instance.CacheAnonymousOutputShort); } } internal void EmptyResponseCacheInternal() { EmptyResponseCache(); } protected override DreamAccess DetermineAccess(DreamContext context, string key) { if(DekiContext.CurrentOrNull != null && DekiContext.Current.HasInstance) { //For features considered 'private' or 'internal', having a correct api-key or admin rights is required if(!string.IsNullOrEmpty(key) && (DekiContext.Current.Instance.ApiKey == key || MasterApiKey == key)) { return DreamAccess.Internal; } if(PermissionsBL.IsUserAllowed(DekiContext.Current.User, Permissions.ADMIN)) { return DreamAccess.Internal; } } return base.DetermineAccess(context, key); } protected override string TryGetServiceLicense(XUri sid) { string license; DateTime? expiration; TryGetServiceLicense(sid, out license, out expiration); return license; } internal bool TryGetServiceLicense(XUri sid, out string license, out DateTime? expiration) { license = null; expiration = null; if((sid != null) && (License.CurrentLicense != null)) { foreach(XDoc service in License.CurrentLicense["grants/service-license"]) { string text = service.AsText; // check if the licensed SID matches the requested SID XUri licensedSID = service["@sid"].AsUri; if(licensedSID == null) { // parse service-license contents for the SID Dictionary<string, string> values = HttpUtil.ParseNameValuePairs(text); // check if the licensed SID matches the requested SID string licensedSIDText; if(values.TryGetValue("sid", out licensedSIDText) && XUri.TryParse(licensedSIDText, out licensedSID) && sid.HasPrefix(licensedSID)) { // check if the licensed SID has an expiration date string licenseExpireText; DateTime licenseExpire; if(values.TryGetValue("expire", out licenseExpireText) && DateTime.TryParse(licenseExpireText, out licenseExpire)) { if(licenseExpire >= DateTime.UtcNow) { license = text; expiration = licenseExpire; return true; } } else { license = text; return true; } return false; } } else if(sid.HasPrefix(licensedSID)) { DateTime? expire = service["@date.expire"].AsDate; // check if the licensed SID has an expiration date if((expire == null) || (expire >= DateTime.UtcNow)) { license = text; expiration = expire; return true; } return false; } } } return false; } private UserBE SetContextAndAuthenticate(DreamMessage request, uint serviceid, bool autoCreateExternalUser, bool allowAnon, out bool altPassword) { UserBE user = AuthBL.Authenticate(DreamContext.Current, request, serviceid, autoCreateExternalUser, allowAnon, out altPassword); user = UserBL.UpdateUserTimestamp(user); DekiContext.Current.User = user; // check that a user token is set (it might not be set if a user logs-in directly using HTTP authentication) if(!UserBL.IsAnonymous(user) && (DekiContext.Current.AuthToken == null)) { DekiContext.Current.AuthToken = AuthBL.CreateAuthTokenForUser(user); } BanningBL.PerformBanCheck(); return user; } #region --- Prologues and Epilogues --- protected Yield PrologueDekiContext(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // check if we need to skip this feature if(context.Feature.PathSegments.Length > 1 && context.Feature.PathSegments[context.Feature.ServiceUri.Segments.Length].StartsWith("@")) { response.Return(request); yield break; } // check if service has initialized if(!_isInitialized) { throw new DreamInternalErrorException("service not initialized"); } //Build the dekicontext out of current request details and info from this wiki instance's details DekiInstance instance = _instanceManager.GetWikiInstance(request); DekiContext dekiContext = new DekiContext(this, instance, request); DreamContext.Current.SetState(dekiContext); // check if instance has already been initialized if(instance != null) { if(instance.Status == DekiInstanceStatus.CREATED) { bool created; try { lock(instance) { created = (instance.Status == DekiInstanceStatus.CREATED); if(created) { // initialize instance instance.Startup(dekiContext); } } // BUGBUGBUG (steveb): we startup the services AFTER the lock, because of race conditions, but this needs to be fixed if(created) { instance.StartServices(); } } catch(Exception e) { created = false; instance.StatusDescription = "Initialization exception: " + e.GetCoroutineStackTrace(); instance.Log.Error("Error initializing instance", e); } if(created) { // Note (arnec) this has to happen down here, since yield cannot exist inside a try/catch // send instance settings to mailer yield return Coroutine.Invoke(ConfigureMailer, ConfigBL.GetInstanceSettingsAsDoc(false), new Result()).CatchAndLog(_log); // check whether we have an index XDoc lucenestate = null; yield return LuceneIndex.At("initstate").With("wikiid", instance.Id).Get(new Result<XDoc>()).Set(x => lucenestate = x); // Note (arnec): defaulting to true, to avoid accidental re-index on false positive if(!(lucenestate["@exists"].AsBool ?? true)) { _log.DebugFormat("instance '{0}' doesn't have an index yet, forcing a rebuild", instance.Id); yield return Self.At("site", "search", "rebuild").With("apikey", instance.ApiKey).Post(new Result<DreamMessage>()); } } } instance.CheckInstanceIsReady(); if(instance.Status == DekiInstanceStatus.ABANDONED) { //If instance was abandoned (failed to initialize), error out. throw new DreamInternalErrorException(string.Format("wiki '{0}' has failed to initialize or did not start up properly: {1}", instance.Id, instance.StatusDescription)); } } // intialize culture/language + user if(instance != null) { // if not already started, start the database session for the current request if(DbUtils.CurrentSession == null) { DbUtils.CurrentSession = instance.SessionFactory.CreateSession(); } if(context.Culture.IsNeutralCulture || context.Culture.Equals(System.Globalization.CultureInfo.InvariantCulture)) { try { context.Culture = new System.Globalization.CultureInfo(instance.SiteLanguage); } catch { // in case the site language is invalid, default to US English context.Culture = new System.Globalization.CultureInfo("en-US"); } } if(!StringUtil.EqualsInvariantIgnoreCase(context.Feature.Signature, "users/authenticate")) { bool allowAnon = StringUtil.EqualsInvariantIgnoreCase(context.Uri.GetParam("authenticate", "false"), "false"); bool altPassword; SetContextAndAuthenticate(request, 0, false, allowAnon, out altPassword); } // TODO (steveb): we should update the culture based on the user's preferences } // continue processing response.Return(request); yield break; } private Yield PrologueStats(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // initialize stopwatch timer System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); sw.Start(); context.SetState("stats-stopwatch", sw); // continue processing response.Return(request); yield break; } private Yield EpilogueStats(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // check if we need to skip this feature if(context.Feature.PathSegments.Length > 1 && context.Feature.PathSegments[context.Feature.ServiceUri.Segments.Length].StartsWith("@")) { response.Return(request); yield break; } // check if the epilogue was called without a deki instance (e.g. during initialization or with an invalid hostname) if(DekiContext.CurrentOrNull == null || !DekiContext.Current.HasInstance) { response.Return(request); yield break; } // compute execution time TimeSpan executionTime = TimeSpan.Zero; System.Diagnostics.Stopwatch elapsedTimeSw = context.GetState<System.Diagnostics.Stopwatch>("stats-stopwatch"); if(elapsedTimeSw != null) { elapsedTimeSw.Stop(); executionTime = TimeSpan.FromMilliseconds(elapsedTimeSw.ElapsedMilliseconds); } // increate instance hit counter DekiContext.Current.Instance.IncreasetHitCounter(request.IsSuccessful, executionTime); // if logging is enabled, grab the response text if the request was not successful string exception = string.Empty; XDoc activeConfig = DekiContext.Current.Instance.Config ?? Config; bool loggingEnabled = !String.IsNullOrEmpty(activeConfig["dblogging-conn-string"].AsText); if(loggingEnabled) { if(!request.IsSuccessful) { exception = request.AsText(); } } //Build overall request stats header StringBuilder statsHeaderSb = new StringBuilder(); statsHeaderSb.AppendFormat("{0}={1}; ", "request-time-ms", (int)executionTime.TotalMilliseconds); //Append data stats IDekiDataStats sessionStats = DbUtils.CurrentSession as IDekiDataStats; Dictionary<string, string> stats; if(sessionStats != null) { stats = sessionStats.GetStats(); if(stats != null) { foreach(KeyValuePair<string, string> kvp in stats) { statsHeaderSb.AppendFormat("{0}={1}; ", kvp.Key, kvp.Value); } } } //Append context stats stats = DekiContext.Current.Stats; if(stats.Count > 0) { foreach(KeyValuePair<string, string> kvp in stats) { statsHeaderSb.AppendFormat("{0}={1}; ", kvp.Key, kvp.Value); } } string requestStats = statsHeaderSb.ToString(); request.Headers.Add(DATA_STATS_HEADERNAME, requestStats); DekiContext.Current.Instance.Log.InfoFormat("Finished [{0}:{1}] [{2}] {3}", context.Verb, context.Uri.Path, request.Status.ToString(), requestStats); // check if there is a catalog to record per-request information if(loggingEnabled) { try { //Write request/response info to stats table after sending response back to client UserBE u = DekiContext.Current.User; string username = u == null ? string.Empty : u.Name; DreamMessage initialRequest = DekiContext.Current.Request; string hostheader = (initialRequest == null) ? string.Empty : initialRequest.Headers.Host ?? string.Empty; DbUtils.CurrentSession.RequestLog_Insert(context.Uri, context.Verb, hostheader, context.Request.Headers.DreamOrigin, DekiContext.Current.Instance.Id, context.Feature.Signature, request.Status, username, (uint)executionTime.TotalMilliseconds, exception); } catch(Exception x) { DekiContext.Current.Instance.Log.Error(string.Format("Failed to write request to db log. [Instance:{0}; Feature:{1}; Verb:{2}; Status:{3}; Duration:{4};]", DekiContext.Current.Instance.Id, context.Feature.Signature, context.Verb, (int)request.Status, executionTime), x); } } // continue processing response.Return(request); yield break; } private Yield EpilogueIdentify(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // check if the epilogue was called without a deki instance (e.g. during initialization or with an invalid hostname) if(DekiContext.CurrentOrNull == null || !DekiContext.Current.HasInstance) { response.Return(request); yield break; } // end the database session for the current request if(null != DbUtils.CurrentSession) { DbUtils.CurrentSession.Dispose(); DbUtils.CurrentSession = null; } // attach our wikiid request.Headers.Add(WIKI_IDENTITY_HEADERNAME, "id=" + StringUtil.QuoteString(DekiContext.Current.Instance.Id)); response.Return(request); yield break; } #endregion private Yield ConfigureMailer(XDoc settings, Result result) { if(string.IsNullOrEmpty(settings["mail/smtp-servers"].AsText)) { yield return _mailer.At("configuration", DekiContext.Current.Instance.Id).DeleteAsync(); } else { string port = settings["mail/smtp-port"].AsText; yield return _mailer.At("configuration", DekiContext.Current.Instance.Id).PutAsync(new XDoc("smtp") .Elem("smtp-host", settings["mail/smtp-servers"].AsText) .Elem("use-ssl", StringUtil.EndsWithInvariantIgnoreCase(settings["mail/smtp-secure"].AsText ?? string.Empty, "ssl") || StringUtil.EndsWithInvariantIgnoreCase(settings["mail/smtp-secure"].AsText ?? string.Empty, "tls")) .Elem("smtp-port", string.IsNullOrEmpty(port) ? null : port) .Elem("smtp-auth-user", settings["mail/smtp-username"].AsText) .Elem("smtp-auth-password", settings["mail/smtp-password"].AsText)); } result.Return(); yield break; } private DreamMessage MapDekiDataException(DreamContext context, Exception exception) { return DekiExceptionMapper.Map(exception); } } }
#if !SILVERLIGHT using System; using System.Reflection; using System.Reflection.Emit; namespace EntityChange.Reflection { internal static class DynamicMethodFactory { public static Func<object, object[], object> CreateMethod(MethodInfo methodInfo) { if (methodInfo == null) throw new ArgumentNullException(nameof(methodInfo)); var dynamicMethod = CreateDynamicMethod( "Dynamic" + methodInfo.Name, typeof(object), new[] { typeof(object), typeof(object[]) }, methodInfo.DeclaringType); var generator = dynamicMethod.GetILGenerator(); var parameters = methodInfo.GetParameters(); var paramTypes = new Type[parameters.Length]; for (int i = 0; i < paramTypes.Length; i++) { var parameterInfo = parameters[i]; if (parameterInfo.ParameterType.IsByRef) paramTypes[i] = parameterInfo.ParameterType.GetElementType(); else paramTypes[i] = parameterInfo.ParameterType; } var locals = new LocalBuilder[paramTypes.Length]; for (int i = 0; i < paramTypes.Length; i++) locals[i] = generator.DeclareLocal(paramTypes[i], true); for (int i = 0; i < paramTypes.Length; i++) { generator.Emit(OpCodes.Ldarg_1); generator.FastInt(i); generator.Emit(OpCodes.Ldelem_Ref); generator.UnboxIfNeeded(paramTypes[i]); generator.Emit(OpCodes.Stloc, locals[i]); } if (!methodInfo.IsStatic) generator.Emit(OpCodes.Ldarg_0); for (int i = 0; i < paramTypes.Length; i++) { if (parameters[i].ParameterType.IsByRef) generator.Emit(OpCodes.Ldloca_S, locals[i]); else generator.Emit(OpCodes.Ldloc, locals[i]); } if (methodInfo.IsStatic) generator.EmitCall(OpCodes.Call, methodInfo, null); else generator.EmitCall(OpCodes.Callvirt, methodInfo, null); if (methodInfo.ReturnType == typeof(void)) generator.Emit(OpCodes.Ldnull); else generator.BoxIfNeeded(methodInfo.ReturnType); for (int i = 0; i < paramTypes.Length; i++) { if (!parameters[i].ParameterType.IsByRef) continue; generator.Emit(OpCodes.Ldarg_1); generator.FastInt(i); generator.Emit(OpCodes.Ldloc, locals[i]); var localType = locals[i].LocalType; if (localType.GetTypeInfo().IsValueType) generator.Emit(OpCodes.Box, localType); generator.Emit(OpCodes.Stelem_Ref); } generator.Emit(OpCodes.Ret); return dynamicMethod.CreateDelegate(typeof(Func<object, object[], object>)) as Func<object, object[], object>; } public static Func<object> CreateConstructor(Type type) { if (type == null) throw new ArgumentNullException(nameof(type)); var dynamicMethod = CreateDynamicMethod( "Create" + type.FullName, typeof(object), Type.EmptyTypes, type); dynamicMethod.InitLocals = true; var generator = dynamicMethod.GetILGenerator(); var typeInfo = type.GetTypeInfo(); if (typeInfo.IsValueType) { generator.DeclareLocal(type); generator.Emit(OpCodes.Ldloc_0); generator.Emit(OpCodes.Box, type); } else { var constructorInfo = typeInfo.GetConstructor(Type.EmptyTypes); if (constructorInfo == null) throw new InvalidOperationException($"Could not get constructor for {type}."); generator.Emit(OpCodes.Newobj, constructorInfo); } generator.Return(); return dynamicMethod.CreateDelegate(typeof(Func<object>)) as Func<object>; } public static Func<object, object> CreateGet(PropertyInfo propertyInfo) { if (propertyInfo == null) throw new ArgumentNullException(nameof(propertyInfo)); if (!propertyInfo.CanRead) return null; var methodInfo = propertyInfo.GetGetMethod(true); if (methodInfo == null) return null; var dynamicMethod = CreateDynamicMethod( "Get" + propertyInfo.Name, typeof(object), new[] { typeof(object) }, propertyInfo.DeclaringType); var generator = dynamicMethod.GetILGenerator(); if (!methodInfo.IsStatic) generator.PushInstance(propertyInfo.DeclaringType); generator.CallMethod(methodInfo); generator.BoxIfNeeded(propertyInfo.PropertyType); generator.Return(); return dynamicMethod.CreateDelegate(typeof(Func<object, object>)) as Func<object, object>; } public static Func<object, object> CreateGet(FieldInfo fieldInfo) { if (fieldInfo == null) throw new ArgumentNullException(nameof(fieldInfo)); var dynamicMethod = CreateDynamicMethod( "Get" + fieldInfo.Name, typeof(object), new[] { typeof(object) }, fieldInfo.DeclaringType); var generator = dynamicMethod.GetILGenerator(); if (fieldInfo.IsStatic) generator.Emit(OpCodes.Ldsfld, fieldInfo); else generator.PushInstance(fieldInfo.DeclaringType); generator.Emit(OpCodes.Ldfld, fieldInfo); generator.BoxIfNeeded(fieldInfo.FieldType); generator.Return(); return dynamicMethod.CreateDelegate(typeof(Func<object, object>)) as Func<object, object>; } public static Action<object, object> CreateSet(PropertyInfo propertyInfo) { if (propertyInfo == null) throw new ArgumentNullException(nameof(propertyInfo)); if (!propertyInfo.CanWrite) return null; var methodInfo = propertyInfo.GetSetMethod(true); if (methodInfo == null) return null; var dynamicMethod = CreateDynamicMethod( "Set" + propertyInfo.Name, null, new[] { typeof(object), typeof(object) }, propertyInfo.DeclaringType); var generator = dynamicMethod.GetILGenerator(); if (!methodInfo.IsStatic) generator.PushInstance(propertyInfo.DeclaringType); generator.Emit(OpCodes.Ldarg_1); generator.UnboxIfNeeded(propertyInfo.PropertyType); generator.CallMethod(methodInfo); generator.Return(); return dynamicMethod.CreateDelegate(typeof(Action<object, object>)) as Action<object, object>; } public static Action<object, object> CreateSet(FieldInfo fieldInfo) { if (fieldInfo == null) throw new ArgumentNullException(nameof(fieldInfo)); var dynamicMethod = CreateDynamicMethod( "Set" + fieldInfo.Name, null, new[] { typeof(object), typeof(object) }, fieldInfo.DeclaringType); var generator = dynamicMethod.GetILGenerator(); if (fieldInfo.IsStatic) generator.Emit(OpCodes.Ldsfld, fieldInfo); else generator.PushInstance(fieldInfo.DeclaringType); generator.Emit(OpCodes.Ldarg_1); generator.UnboxIfNeeded(fieldInfo.FieldType); generator.Emit(OpCodes.Stfld, fieldInfo); generator.Return(); return dynamicMethod.CreateDelegate(typeof(Action<object, object>)) as Action<object, object>; } private static DynamicMethod CreateDynamicMethod(string name, Type returnType, Type[] parameterTypes, Type owner) { var typeInfo = owner.GetTypeInfo(); return !typeInfo.IsInterface ? new DynamicMethod(name, returnType, parameterTypes, owner, true) : new DynamicMethod(name, returnType, parameterTypes, typeInfo.Assembly.ManifestModule, true); } } } #endif
using System; using Xunit; using Ensure.Core.Ensure; using System.Collections.Generic; using System.IO; namespace Ensure.Core.Ensure.Tests { public class EnsureTests { public static IEnumerable<object[]> GetObjects { get { return new[] { new object[] { new List<int>()}, new object[] {15}, new object[] {new object()}, new object[] {new Dictionary<double,double>()} }; } } public static IEnumerable<object[]> GetEmptyCollections { get { return new[] { new object[] { new List<int>()}, new object[] {new List<float>()}, new object[] {new List<string>()}, new object[] {new object[] {}} }; } } public static IEnumerable<object[]> GetNotEmptyCollections { get { return new[] { new object[] { new List<int>() {12} }, new object[] { new List<float>() {(float)23.5} }, new object[] { new List<string>() {"12"} }, new object[] { new object[] {"12"} } }; } } public static IEnumerable<object[]> GetCustomFailedChecks { get { return new[] { new object[] { (Func<double, bool>)((v) => v>0), -1}, new object[] { (Func<double, bool>)((v) => v>0), 0}, new object[] { (Func<string, bool>)((v) => !string.IsNullOrEmpty(v)), (string) null}, new object[] { (Func<string, bool>)((v) => !string.IsNullOrEmpty(v)), ""} }; } } public static IEnumerable<object[]> GetCustomPassedChecks { get { return new[] { new object[] { (Func<double, bool>)((v) => v>0), 10}, new object[] { (Func<double, bool>)((v) => v>0), 10.53}, new object[] { (Func<string, bool>)((v) => !string.IsNullOrEmpty(v)), "12"}, new object[] { (Func<string, bool>)((v) => !string.IsNullOrEmpty(v)), " "} }; } } [Fact] public void ShouldAlwaysWork() { Assert.True(true); } [Theory] [InlineData(1)] [InlineData(2.23)] [InlineData(0.5)] [InlineData(1e-5)] [InlineData((float)15.45)] public void ShouldWorkWhenGoodValueIsPassedToIsPositive(double value) { var ex = Record.Exception(() => Ensure.IsPositive(nameof(value),value)); Assert.Null(ex); } [Theory] [InlineData(0)] [InlineData(-1)] [InlineData(-0.5)] [InlineData(-1e-5)] [InlineData((float)-15.45)] public void ShouldThrowExceptionWhenWrongValueWasPassedToIsPositive(double testValue) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsPositive(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && ex.Message.Contains("positive")); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2.23)] [InlineData(0.5)] [InlineData(1e-5)] [InlineData((float)15.45)] public void ShouldWorkWhenGoodValueIsPassedToIsNonNegative(double testValue) { var ex = Record.Exception(() => Ensure.IsNonNegative(nameof(testValue),testValue)); Assert.Null(ex); } [Theory] [InlineData(-1)] [InlineData(-2.23)] [InlineData(-0.5)] [InlineData(-1e-5)] [InlineData((float)-15.45)] public void ShouldThrowExceptionWhenWrongValueWasPassedToIsNonNegative(double testValue) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsNonNegative(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && ex.Message.Contains("nonnegative")); } [Theory] [InlineData(-1)] [InlineData(-2.23)] [InlineData(-0.5)] [InlineData(-1e-5)] [InlineData((float)-15.45)] public void ShouldWorkWhenGoodValueIsPassedToIsNegative(double testValue) { var ex = Record.Exception(() => Ensure.IsNegative(nameof(testValue),testValue)); Assert.Null(ex); } [Theory] [InlineData(1)] [InlineData(2.23)] [InlineData(0.5)] [InlineData(1e-5)] [InlineData((float)15.45)] public void ShouldThrowExceptionWhenWrongValueWasPassedToIsNegative(double testValue) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsNegative(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && ex.Message.Contains("negative")); } [Theory] [InlineData(0)] [InlineData(-1)] [InlineData(-2.23)] [InlineData(-0.5)] [InlineData(-1e-5)] [InlineData((float)-15.45)] public void ShouldWorkWhenGoodValueIsPassedToIsNonPositive(double testValue) { var ex = Record.Exception(() => Ensure.IsNonPositive(nameof(testValue), testValue)); Assert.Null(ex); } [Theory] [InlineData(1)] [InlineData(2.23)] [InlineData(0.5)] [InlineData(1e-5)] [InlineData((float)15.45)] public void ShouldThrowExceptionWhenWrongValueWasPassedToIsNonPositive(double testValue) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsNonPositive(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && ex.Message.Contains("nonpositive")); } [Theory] [MemberData("GetObjects")] public void ShouldWorkWhenGoodValueIsPassedToIsNotNull(object testValue) { var ex = Record.Exception(() => Ensure.IsNotNull(nameof(testValue), testValue)); Assert.Null(ex); } [Fact] public void ShouldThrowExceptionWhenNullValueIsPassedToIsNotNull() { object testValue = null; var ex = Assert.Throws<EnsureException>(() => Ensure.IsNotNull(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && ex.Message.Contains("null")); } [Theory] [InlineData("12")] [InlineData(" ")] public void ShouldWorkWhenGoodValueIsPassedToIsNotNullOrEmpty(string testValue) { var ex = Record.Exception(() => Ensure.IsNotNullOrEmpty(nameof(testValue), testValue)); Assert.Null(ex); } [Theory] [InlineData(null)] [InlineData("")] public void ShouldThrowExceptionWhenNullStringIsPassedToIsNotNullOrEmpty(string testValue) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsNotNullOrEmpty(nameof(testValue),testValue)); Assert.True(ex.Message.Contains(nameof(testValue)) && (ex.Message.Contains("null") || ex.Message.Contains("empty"))); } [Theory] [InlineData("notexistingpath/notexistingfile")] [InlineData("")] [InlineData("12")] [InlineData(null)] public void ShouldThrowExceptionWhenNotExistingFileWasPassedToFileExists(string wrongPath) { var ex = Assert.Throws<EnsureException>(() => Ensure.FileExists(nameof(wrongPath),wrongPath)); Assert.True(ex.Message.Contains(nameof(wrongPath)) && ex.Message.Contains("exist")); } [Theory] [MemberData("GetNotEmptyCollections")] public void ShouldWorkWhenNonEmptyCollectionIsGivenToIsNotEmpty<T>(IEnumerable<T> nonEmptyCollection) { var ex = Record.Exception(() => Ensure.IsNotEmpty(nameof(nonEmptyCollection), nonEmptyCollection)); Assert.Null(ex); } [Theory] [MemberData("GetEmptyCollections")] public void ShouldThrowExceptionWhenEmptyCollectionIsGivenToIsNotEmpty<T>(IEnumerable<T> emptyCollection) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsNotEmpty<T>(nameof(emptyCollection),emptyCollection)); Assert.True(ex.Message.Contains(nameof(emptyCollection)) && ex.Message.Contains("empty")); } [Theory] [MemberData("GetNotEmptyCollections")] public void ShouldThrowExceptionWhenNonEmptyColletionIsGivenToIsEmpty<T>(IEnumerable<T> nonEmptyCollectio) { var ex = Assert.Throws<EnsureException>(() => Ensure.IsEmpty(nameof(nonEmptyCollectio),nonEmptyCollectio)); Assert.True(ex.Message.Contains(nameof(nonEmptyCollectio)) && ex.Message.Contains("empty")); } [Theory] [MemberData("GetCustomPassedChecks")] public void ShouldTWorkWhenCustomConditionIsSatisfied<T>(Func<T, bool> condition, T value) { var ex = Record.Exception(() => Ensure.SatisfiesCondition(nameof(value), value, condition, "exSatCheck")); Assert.Null(ex); } [Theory] [MemberData("GetCustomFailedChecks")] public void ShouldThrowExceptionWhenCustomConditionIsNotSatisfied<T>(Func<T, bool> condition, T value) { var ex1 = Assert.Throws<EnsureException>(() => Ensure.SatisfiesCondition(nameof(value), value, condition, "exSatCheck")); Assert.True(ex1.Message.Contains(nameof(value)) && ex1.Message.Contains("exSatCheck")); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Linq; using System.Windows; using System.Windows.Media; using Microsoft.CodeAnalysis.Editor; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Microsoft.VisualStudio.Text.Formatting; using Microsoft.VisualStudio.Text.Outlining; using Microsoft.VisualStudio.Text.Projection; using Microsoft.VisualStudio.Utilities; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.Implementation.DebuggerIntelliSense { internal partial class DebuggerTextView : IWpfTextView, IDebuggerTextView { /// <summary> /// The actual debugger view of the watch or immediate window that we're wrapping /// </summary> private readonly IWpfTextView _innerTextView; public DebuggerTextView( IWpfTextView innerTextView, IBufferGraph bufferGraph, bool isImmediateWindow) { _innerTextView = innerTextView; this.BufferGraph = bufferGraph; this.IsImmediateWindow = isImmediateWindow; } /// <summary> /// We basically replace the innerTextView's BufferGraph with our own custom projection graph /// that projects the immediate window contents into a context buffer: /// /// (1) /// (2) (5) /// (3) (6) /// (4) /// (1) Top level projection buffer - the subject buffer used by intellisense /// (2/3) Currently a double projection buffer combo that elides away the ? in the immediate window, and may add some /// boilerplate code to force an expression context. /// (4) innerTextView.TextBuffer, what the user actually sees in the watch/immediate windows /// (5) A read-only projection of (6) /// (6) The context buffer which is typically a source file /// /// </summary> public IBufferGraph BufferGraph { get; } public bool IsImmediateWindow { get; } public ITextCaret Caret { get { return _innerTextView.Caret; } } public bool HasAggregateFocus { get { return _innerTextView.HasAggregateFocus; } } public bool InLayout { get { return _innerTextView.InLayout; } } public bool IsClosed { get { return _innerTextView.IsClosed; } } public bool IsMouseOverViewOrAdornments { get { return _innerTextView.IsMouseOverViewOrAdornments; } } public double LineHeight { get { return _innerTextView.LineHeight; } } public double MaxTextRightCoordinate { get { return _innerTextView.MaxTextRightCoordinate; } } public IEditorOptions Options { get { return _innerTextView.Options; } } public PropertyCollection Properties { get { return _innerTextView.Properties; } } public ITrackingSpan ProvisionalTextHighlight { get { return _innerTextView.ProvisionalTextHighlight; } set { _innerTextView.ProvisionalTextHighlight = value; } } public ITextViewRoleSet Roles { get { return _innerTextView.Roles; } } public ITextSelection Selection { get { return _innerTextView.Selection; } } public ITextViewLineCollection TextViewLines { get { return _innerTextView.TextViewLines; } } public ITextViewModel TextViewModel { get { return _innerTextView.TextViewModel; } } public IViewScroller ViewScroller { get { return _innerTextView.ViewScroller; } } public double ViewportBottom { get { return _innerTextView.ViewportBottom; } } public double ViewportHeight { get { return _innerTextView.ViewportHeight; } } public double ViewportLeft { get { return _innerTextView.ViewportLeft; } set { _innerTextView.ViewportLeft = value; } } public double ViewportRight { get { return _innerTextView.ViewportRight; } } public double ViewportTop { get { return _innerTextView.ViewportTop; } } public double ViewportWidth { get { return _innerTextView.ViewportWidth; } } public ITextSnapshot VisualSnapshot { get { return _innerTextView.VisualSnapshot; } } public ITextDataModel TextDataModel { get { throw new NotSupportedException(); } } public ITextBuffer TextBuffer { get { return _innerTextView.TextBuffer; } } public ITextSnapshot TextSnapshot { get { return _innerTextView.TextSnapshot; } } public FrameworkElement VisualElement { get { return _innerTextView.VisualElement; } } public Brush Background { get { return _innerTextView.Background; } set { _innerTextView.Background = value; } } IWpfTextViewLineCollection IWpfTextView.TextViewLines { get { return _innerTextView.TextViewLines; } } public IFormattedLineSource FormattedLineSource { get { return _innerTextView.FormattedLineSource; } } public ILineTransformSource LineTransformSource { get { return _innerTextView.LineTransformSource; } } public double ZoomLevel { get { return _innerTextView.ZoomLevel; } set { _innerTextView.ZoomLevel = value; } } public void Close() { throw new NotSupportedException(); } public void DisplayTextLineContainingBufferPosition(SnapshotPoint bufferPosition, double verticalDistance, ViewRelativePosition relativeTo, double? viewportWidthOverride, double? viewportHeightOverride) { throw new NotSupportedException(); } public void DisplayTextLineContainingBufferPosition(SnapshotPoint bufferPosition, double verticalDistance, ViewRelativePosition relativeTo) { throw new NotSupportedException(); } public SnapshotSpan GetTextElementSpan(SnapshotPoint point) { throw new NotSupportedException(); } public ITextViewLine GetTextViewLineContainingBufferPosition(SnapshotPoint bufferPosition) { throw new NotSupportedException(); } public void QueueSpaceReservationStackRefresh() { throw new NotSupportedException(); } public IAdornmentLayer GetAdornmentLayer(string name) { return _innerTextView.GetAdornmentLayer(name); } public ISpaceReservationManager GetSpaceReservationManager(string name) { return _innerTextView.GetSpaceReservationManager(name); } IWpfTextViewLine IWpfTextView.GetTextViewLineContainingBufferPosition(SnapshotPoint bufferPosition) { return _innerTextView.GetTextViewLineContainingBufferPosition(bufferPosition); } public void DisconnectFromIntellisenseControllers() { // The innerTextView of the immediate window never closes, but we want // our completion subscribers to unsubscribe from events when this // DebuggerTextView is no longer in use. if (this.IsImmediateWindow) { this.ClosedInternal?.Invoke(this, EventArgs.Empty); } } private event EventHandler ClosedInternal; public event EventHandler Closed { add { if (this.IsImmediateWindow) { this.ClosedInternal += value; } else { _innerTextView.Closed += value; } } remove { if (this.IsImmediateWindow) { this.ClosedInternal -= value; } else { _innerTextView.Closed -= value; } } } public event EventHandler GotAggregateFocus { add { _innerTextView.GotAggregateFocus += value; } remove { _innerTextView.GotAggregateFocus -= value; } } public event EventHandler<TextViewLayoutChangedEventArgs> LayoutChanged { add { _innerTextView.LayoutChanged += value; } remove { _innerTextView.LayoutChanged -= value; } } public event EventHandler LostAggregateFocus { add { _innerTextView.LostAggregateFocus += value; } remove { _innerTextView.LostAggregateFocus -= value; } } public event EventHandler<MouseHoverEventArgs> MouseHover { add { _innerTextView.MouseHover += value; } remove { _innerTextView.MouseHover -= value; } } public event EventHandler ViewportHeightChanged { add { _innerTextView.ViewportHeightChanged += value; } remove { _innerTextView.ViewportHeightChanged -= value; } } public event EventHandler ViewportLeftChanged { add { _innerTextView.ViewportLeftChanged += value; } remove { _innerTextView.ViewportLeftChanged -= value; } } public event EventHandler ViewportWidthChanged { add { _innerTextView.ViewportWidthChanged += value; } remove { _innerTextView.ViewportWidthChanged -= value; } } public event EventHandler<BackgroundBrushChangedEventArgs> BackgroundBrushChanged { add { _innerTextView.BackgroundBrushChanged += value; } remove { _innerTextView.BackgroundBrushChanged -= value; } } public event EventHandler<ZoomLevelChangedEventArgs> ZoomLevelChanged { add { _innerTextView.ZoomLevelChanged += value; } remove { _innerTextView.ZoomLevelChanged -= value; } } } }
using System; using System.Data; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.Configuration; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using Vevo; using Vevo.DataAccessLib.Cart; using Vevo.Domain; using Vevo.Domain.Products; using Vevo.Shared.Utilities; using Vevo.Domain.Stores; public partial class AdminAdvanced_Components_CategoryFilterDrop : AdminAdvancedBaseUserControl, ICategoryFilterDrop { private void RefreshDropDownList() { uxRootCategoryDrop.Items.Clear(); InsertRootFirstLineIfNecessary(); IList<Category> categoryList = categoryList = DataAccessContext.CategoryRepository.GetByParentID( DataAccessContext.CultureRepository.GetOne( CultureID ), "0", "CategoryID", BoolFilter.ShowAll ); for (int i = 0; i < categoryList.Count; i++) { uxRootCategoryDrop.Items.Add( new ListItem( categoryList[i].Name + " (" + categoryList[i].CategoryID + ")", categoryList[i].CategoryID ) ); } uxRootCategoryDrop.SelectedValue = SelectedRootValue; PopulateCategoryDropList(); if (!IsDisplayRootCategoryDrop) uxRootCategoryFilterPanel.Visible = false; if (IsDisplayRootCategoryDrop && String.IsNullOrEmpty( uxRootCategoryDrop.SelectedValue )) uxCategoryDrop.Enabled = false; else uxCategoryDrop.Enabled = true; } private void PopulateCategoryDropList() { uxCategoryDrop.Items.Clear(); InsertFirstLineIfNecessary(); IList<Category> categoryList = DataAccessContext.CategoryRepository.GetByRootIDLeafOnly( DataAccessContext.CultureRepository.GetOne( CultureID ), SelectedRootValue, "ParentCategoryID", BoolFilter.ShowAll ); string currentParentID = ""; string tmpFullPath = ""; for (int i = 0; i < categoryList.Count; i++) { if (currentParentID != categoryList[i].ParentCategoryID) { tmpFullPath = categoryList[i].CreateFullCategoryPathParentOnly(); currentParentID = categoryList[i].ParentCategoryID; } uxCategoryDrop.Items.Add( new ListItem( tmpFullPath + categoryList[i].Name + " (" + categoryList[i].CategoryID + ")", categoryList[i].CategoryID ) ); } } private void InsertFirstLineIfNecessary() { if (FirstLineEnable) { uxCategoryDrop.Items.Add( new ListItem( Resources.SearchFilterMessages.FilterShowAll, "" ) ); } } private void InsertRootFirstLineIfNecessary() { if (RootFirstLineEnable) { uxRootCategoryDrop.Items.Add( new ListItem( Resources.SearchFilterMessages.FilterShowAll, "" ) ); } } private void LoadDefaultFromQuery() { if (!String.IsNullOrEmpty( MainContext.QueryString["CategoryID"] )) SelectedValue = MainContext.QueryString["CategoryID"]; } protected void Page_Load( object sender, EventArgs e ) { if (!MainContext.IsPostBack) { RefreshDropDownList(); LoadDefaultFromQuery(); } } protected void Page_PreRender( object sender, EventArgs e ) { if (!MainContext.IsPostBack) { } } protected void uxCategoryDrop_SelectedIndexChanged( object sender, EventArgs e ) { OnBubbleEvent( e ); } protected void uxRootCategoryDrop_SelectedIndexChanged( object sender, EventArgs e ) { PopulateCategoryDropList(); if (String.IsNullOrEmpty( uxRootCategoryDrop.SelectedValue )) uxCategoryDrop.Enabled = false; else uxCategoryDrop.Enabled = true; OnBubbleEvent( e ); } public string CultureID { get { if (ViewState["CultureID"] == null) return AdminConfig.CurrentCultureID; else return (string) ViewState["CultureID"]; } set { ViewState["CultureID"] = value; RefreshDropDownList(); } } public bool AutoPostBack { get { return uxCategoryDrop.AutoPostBack; } set { uxCategoryDrop.AutoPostBack = value; } } public string SelectedValue { get { return uxCategoryDrop.SelectedValue; } set { uxCategoryDrop.SelectedValue = value; } } public string SelectedRootValue { get { if (KeyUtilities.IsMultistoreLicense()) return uxRootCategoryDrop.SelectedValue; else return DataAccessContext.Configurations.GetValue( "RootCategory", DataAccessContext.StoreRepository.GetOne( Store.RegularStoreID ) ); } set { uxRootCategoryDrop.SelectedValue = value; } } public bool FirstLineEnable { get { if (ViewState["FirstLineEnable"] == null) return true; else return (bool) ViewState["FirstLineEnable"]; } set { ViewState["FirstLineEnable"] = value; RefreshDropDownList(); } } public bool RootFirstLineEnable { get { if (ViewState["RootFirstLineEnable"] == null) return false; else return (bool) ViewState["RootFirstLineEnable"]; } set { ViewState["RootFirstLineEnable"] = value; RefreshDropDownList(); } } public bool IsEnableRootCategoryDrop { get { if (ViewState["IsEnableRootCategoryDrop"] == null) return true; else return (bool) ViewState["IsEnableRootCategoryDrop"]; } set { ViewState["IsEnableRootCategoryDrop"] = value; SetEnableRootCategoryDrop(); } } private void SetEnableRootCategoryDrop() { uxRootCategoryDrop.Enabled = IsEnableRootCategoryDrop; } public bool IsDisplayRootCategoryDrop { get { if (ViewState["IsDisplayRootCategoryDrop"] == null || !KeyUtilities.IsMultistoreLicense()) return false; else return (bool) ViewState["IsDisplayRootCategoryDrop"]; } set { ViewState["IsDisplayRootCategoryDrop"] = value; RefreshDropDownList(); } } public void UpdateBrowseQuery( UrlQuery urlQuery ) { if (!String.IsNullOrEmpty( SelectedValue )) urlQuery.AddQuery( "CategoryID", SelectedValue ); else urlQuery.RemoveQuery( "CategoryID" ); if (!String.IsNullOrEmpty( SelectedRootValue )) urlQuery.AddQuery( "RootCategoryID", SelectedRootValue ); else urlQuery.RemoveQuery( "RootCategoryID" ); } public void RefreshCategoryDropList(string rootID) { SelectedRootValue = rootID; PopulateCategoryDropList(); } }
using System; using System.ComponentModel; using Android.Content; using Android.Content.Res; using Android.Graphics; using Android.Graphics.Drawables; using Android.Support.V4.Content; using Android.Support.V7.Widget; using Android.Util; using GlobalResource = Android.Resource; using Object = Java.Lang.Object; using static System.String; namespace Xamarin.Forms.Platform.Android.AppCompat { public class ButtonRenderer : ViewRenderer<Button, AppCompatButton>, global::Android.Views.View.IOnAttachStateChangeListener { TextColorSwitcher _textColorSwitcher; float _defaultFontSize; Typeface _defaultTypeface; bool _isDisposed; int _imageHeight = -1; public ButtonRenderer() { AutoPackage = false; } global::Android.Widget.Button NativeButton => Control; void IOnAttachStateChangeListener.OnViewAttachedToWindow(global::Android.Views.View attachedView) { UpdateText(); } void IOnAttachStateChangeListener.OnViewDetachedFromWindow(global::Android.Views.View detachedView) { } public override SizeRequest GetDesiredSize(int widthConstraint, int heightConstraint) { UpdateText(); return base.GetDesiredSize(widthConstraint, heightConstraint); } protected override void OnLayout(bool changed, int l, int t, int r, int b) { if (_imageHeight > -1) { // We've got an image (and no text); it's already centered horizontally, // we just need to adjust the padding so it centers vertically var diff = (b - t - _imageHeight) / 2; diff = Math.Max(diff, 0); Control?.SetPadding(0, diff, 0, -diff); } base.OnLayout(changed, l, t, r, b); } protected override AppCompatButton CreateNativeControl() { return new AppCompatButton(Context); } protected override void Dispose(bool disposing) { if (_isDisposed) return; _isDisposed = true; if (disposing) { if (Control != null) { Control.SetOnClickListener(null); Control.RemoveOnAttachStateChangeListener(this); Control.Tag = null; _textColorSwitcher = null; } } base.Dispose(disposing); } protected override void OnElementChanged(ElementChangedEventArgs<Button> e) { base.OnElementChanged(e); if (e.OldElement != null) { } if (e.NewElement != null) { if (Control == null) { AppCompatButton button = CreateNativeControl(); button.SetOnClickListener(ButtonClickListener.Instance.Value); button.Tag = this; _textColorSwitcher = new TextColorSwitcher(button.TextColors); SetNativeControl(button); button.AddOnAttachStateChangeListener(this); } UpdateAll(); UpdateBackgroundColor(); } } protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs e) { if (e.PropertyName == Button.TextProperty.PropertyName) UpdateText(); else if (e.PropertyName == Button.TextColorProperty.PropertyName) UpdateTextColor(); else if (e.PropertyName == VisualElement.IsEnabledProperty.PropertyName) UpdateEnabled(); else if (e.PropertyName == Button.FontProperty.PropertyName) UpdateFont(); else if (e.PropertyName == Button.ImageProperty.PropertyName) UpdateBitmap(); else if (e.PropertyName == VisualElement.IsVisibleProperty.PropertyName) UpdateText(); base.OnElementPropertyChanged(sender, e); } protected override void UpdateBackgroundColor() { if (Element == null || Control == null) return; Color backgroundColor = Element.BackgroundColor; if (backgroundColor.IsDefault) { if (Control.SupportBackgroundTintList != null) { Context context = Context; int id = GlobalResource.Attribute.ButtonTint; unchecked { using (var value = new TypedValue()) { try { Resources.Theme theme = context.Theme; if (theme != null && theme.ResolveAttribute(id, value, true)) #pragma warning disable 618 Control.SupportBackgroundTintList = Resources.GetColorStateList(value.Data); #pragma warning restore 618 else Control.SupportBackgroundTintList = new ColorStateList(ColorExtensions.States, new[] { (int)0xffd7d6d6, 0x7fd7d6d6 }); } catch (Exception ex) { Log.Warning("Xamarin.Forms.Platform.Android.ButtonRenderer", "Could not retrieve button background resource: {0}", ex); Control.SupportBackgroundTintList = new ColorStateList(ColorExtensions.States, new[] { (int)0xffd7d6d6, 0x7fd7d6d6 }); } } } } } else { int intColor = backgroundColor.ToAndroid().ToArgb(); int disableColor = backgroundColor.MultiplyAlpha(0.5).ToAndroid().ToArgb(); Control.SupportBackgroundTintList = new ColorStateList(ColorExtensions.States, new[] { intColor, disableColor }); } } void UpdateAll() { UpdateFont(); UpdateText(); UpdateBitmap(); UpdateTextColor(); UpdateEnabled(); } void UpdateBitmap() { var elementImage = Element.Image; var imageFile = elementImage?.File; _imageHeight = -1; if (elementImage == null || string.IsNullOrEmpty(imageFile)) { Control.SetCompoundDrawablesWithIntrinsicBounds(null, null, null, null); return; } var image = Context.Resources.GetDrawable(imageFile); if (IsNullOrEmpty(Element.Text)) { // No text, so no need for relative position; just center the image // There's no option for just plain-old centering, so we'll use Top // (which handles the horizontal centering) and some tricksy padding (in OnLayout) // to handle the vertical centering // Clear any previous padding and set the image as top/center Control.SetPadding(0, 0, 0, 0); Control.SetCompoundDrawablesWithIntrinsicBounds(null, image, null, null); // Keep track of the image height so we can use it in OnLayout _imageHeight = image.IntrinsicHeight; image?.Dispose(); return; } var layout = Element.ContentLayout; Control.CompoundDrawablePadding = (int)layout.Spacing; switch (layout.Position) { case Button.ButtonContentLayout.ImagePosition.Top: Control.SetCompoundDrawablesWithIntrinsicBounds(null, image, null, null); break; case Button.ButtonContentLayout.ImagePosition.Bottom: Control.SetCompoundDrawablesWithIntrinsicBounds(null, null, null, image); break; case Button.ButtonContentLayout.ImagePosition.Right: Control.SetCompoundDrawablesWithIntrinsicBounds(null, null, image, null); break; default: // Defaults to image on the left Control.SetCompoundDrawablesWithIntrinsicBounds(image, null, null, null); break; } image?.Dispose(); } void UpdateEnabled() { Control.Enabled = Element.IsEnabled; } void UpdateFont() { Button button = Element; Font font = button.Font; if (font == Font.Default && _defaultFontSize == 0f) return; if (_defaultFontSize == 0f) { _defaultTypeface = NativeButton.Typeface; _defaultFontSize = NativeButton.TextSize; } if (font == Font.Default) { NativeButton.Typeface = _defaultTypeface; NativeButton.SetTextSize(ComplexUnitType.Px, _defaultFontSize); } else { NativeButton.Typeface = font.ToTypeface(); NativeButton.SetTextSize(ComplexUnitType.Sp, font.ToScaledPixel()); } } void UpdateText() { var oldText = NativeButton.Text; NativeButton.Text = Element.Text; // If we went from or to having no text, we need to update the image position if (IsNullOrEmpty(oldText) != IsNullOrEmpty(NativeButton.Text)) { UpdateBitmap(); } } void UpdateTextColor() { _textColorSwitcher?.UpdateTextColor(Control, Element.TextColor); } class ButtonClickListener : Object, IOnClickListener { #region Statics public static readonly Lazy<ButtonClickListener> Instance = new Lazy<ButtonClickListener>(() => new ButtonClickListener()); #endregion public void OnClick(global::Android.Views.View v) { var renderer = v.Tag as ButtonRenderer; ((IButtonController)renderer?.Element)?.SendClicked(); } } } }
namespace DelegateSchedulerTest { partial class Form1 { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if(disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); this.startButton = new System.Windows.Forms.Button(); this.stopButton = new System.Windows.Forms.Button(); this.addButton = new System.Windows.Forms.Button(); this.logListBox = new System.Windows.Forms.ListBox(); this.idTextBox = new System.Windows.Forms.TextBox(); this.label1 = new System.Windows.Forms.Label(); this.timeNumericUpDown = new System.Windows.Forms.NumericUpDown(); this.label2 = new System.Windows.Forms.Label(); this.clearButton = new System.Windows.Forms.Button(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.label3 = new System.Windows.Forms.Label(); this.countNumericUpDown = new System.Windows.Forms.NumericUpDown(); this.pollNumericUpDown = new System.Windows.Forms.NumericUpDown(); this.label4 = new System.Windows.Forms.Label(); this.statusStrip1 = new System.Windows.Forms.StatusStrip(); this.toolStripStatusLabel1 = new System.Windows.Forms.ToolStripStatusLabel(); this.delegateScheduler1 = new Sanford.Threading.DelegateScheduler(this.components); ((System.ComponentModel.ISupportInitialize)(this.timeNumericUpDown)).BeginInit(); this.groupBox1.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.countNumericUpDown)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.pollNumericUpDown)).BeginInit(); this.statusStrip1.SuspendLayout(); this.SuspendLayout(); // // startButton // this.startButton.Location = new System.Drawing.Point(10, 181); this.startButton.Name = "startButton"; this.startButton.Size = new System.Drawing.Size(75, 23); this.startButton.TabIndex = 0; this.startButton.Text = "Start"; this.startButton.UseVisualStyleBackColor = true; this.startButton.Click += new System.EventHandler(this.startButton_Click); // // stopButton // this.stopButton.Location = new System.Drawing.Point(10, 210); this.stopButton.Name = "stopButton"; this.stopButton.Size = new System.Drawing.Size(75, 23); this.stopButton.TabIndex = 1; this.stopButton.Text = "Stop"; this.stopButton.UseVisualStyleBackColor = true; this.stopButton.Click += new System.EventHandler(this.stopButton_Click); // // addButton // this.addButton.Location = new System.Drawing.Point(35, 113); this.addButton.Name = "addButton"; this.addButton.Size = new System.Drawing.Size(75, 23); this.addButton.TabIndex = 4; this.addButton.Text = "Add"; this.addButton.UseVisualStyleBackColor = true; this.addButton.Click += new System.EventHandler(this.addButton_Click); // // logListBox // this.logListBox.FormattingEnabled = true; this.logListBox.Location = new System.Drawing.Point(187, 12); this.logListBox.Name = "logListBox"; this.logListBox.Size = new System.Drawing.Size(169, 238); this.logListBox.TabIndex = 4; // // idTextBox // this.idTextBox.Location = new System.Drawing.Point(54, 52); this.idTextBox.Name = "idTextBox"; this.idTextBox.Size = new System.Drawing.Size(98, 20); this.idTextBox.TabIndex = 2; // // label1 // this.label1.AutoSize = true; this.label1.Location = new System.Drawing.Point(30, 55); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(18, 13); this.label1.TabIndex = 5; this.label1.Text = "ID"; // // timeNumericUpDown // this.timeNumericUpDown.Location = new System.Drawing.Point(54, 26); this.timeNumericUpDown.Maximum = new decimal(new int[] { 50000, 0, 0, 0}); this.timeNumericUpDown.Minimum = new decimal(new int[] { 10, 0, 0, 0}); this.timeNumericUpDown.Name = "timeNumericUpDown"; this.timeNumericUpDown.Size = new System.Drawing.Size(56, 20); this.timeNumericUpDown.TabIndex = 1; this.timeNumericUpDown.TextAlign = System.Windows.Forms.HorizontalAlignment.Right; this.timeNumericUpDown.Value = new decimal(new int[] { 1000, 0, 0, 0}); // // label2 // this.label2.AutoSize = true; this.label2.Location = new System.Drawing.Point(18, 29); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(30, 13); this.label2.TabIndex = 7; this.label2.Text = "Time"; // // clearButton // this.clearButton.Location = new System.Drawing.Point(10, 239); this.clearButton.Name = "clearButton"; this.clearButton.Size = new System.Drawing.Size(75, 23); this.clearButton.TabIndex = 2; this.clearButton.Text = "Clear"; this.clearButton.UseVisualStyleBackColor = true; this.clearButton.Click += new System.EventHandler(this.clearButton_Click); // // groupBox1 // this.groupBox1.Controls.Add(this.label3); this.groupBox1.Controls.Add(this.countNumericUpDown); this.groupBox1.Controls.Add(this.label2); this.groupBox1.Controls.Add(this.addButton); this.groupBox1.Controls.Add(this.idTextBox); this.groupBox1.Controls.Add(this.timeNumericUpDown); this.groupBox1.Controls.Add(this.label1); this.groupBox1.Location = new System.Drawing.Point(10, 12); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(158, 156); this.groupBox1.TabIndex = 9; this.groupBox1.TabStop = false; this.groupBox1.Text = "Event"; // // label3 // this.label3.AutoSize = true; this.label3.Location = new System.Drawing.Point(13, 80); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(35, 13); this.label3.TabIndex = 9; this.label3.Text = "Count"; // // countNumericUpDown // this.countNumericUpDown.Location = new System.Drawing.Point(54, 78); this.countNumericUpDown.Minimum = new decimal(new int[] { 1, 0, 0, -2147483648}); this.countNumericUpDown.Name = "countNumericUpDown"; this.countNumericUpDown.Size = new System.Drawing.Size(56, 20); this.countNumericUpDown.TabIndex = 3; this.countNumericUpDown.TextAlign = System.Windows.Forms.HorizontalAlignment.Right; this.countNumericUpDown.Value = new decimal(new int[] { 1, 0, 0, 0}); // // pollNumericUpDown // this.pollNumericUpDown.Location = new System.Drawing.Point(95, 210); this.pollNumericUpDown.Maximum = new decimal(new int[] { 10000, 0, 0, 0}); this.pollNumericUpDown.Minimum = new decimal(new int[] { 10, 0, 0, 0}); this.pollNumericUpDown.Name = "pollNumericUpDown"; this.pollNumericUpDown.Size = new System.Drawing.Size(67, 20); this.pollNumericUpDown.TabIndex = 3; this.pollNumericUpDown.TextAlign = System.Windows.Forms.HorizontalAlignment.Right; this.pollNumericUpDown.Value = new decimal(new int[] { 100, 0, 0, 0}); this.pollNumericUpDown.ValueChanged += new System.EventHandler(this.pollNumericUpDown_ValueChanged); // // label4 // this.label4.AutoSize = true; this.label4.Location = new System.Drawing.Point(92, 194); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(76, 13); this.label4.TabIndex = 11; this.label4.Text = "Polling Interval"; // // statusStrip1 // this.statusStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.toolStripStatusLabel1}); this.statusStrip1.Location = new System.Drawing.Point(0, 277); this.statusStrip1.Name = "statusStrip1"; this.statusStrip1.Size = new System.Drawing.Size(376, 22); this.statusStrip1.TabIndex = 12; this.statusStrip1.Text = "statusStrip1"; // // toolStripStatusLabel1 // this.toolStripStatusLabel1.Name = "toolStripStatusLabel1"; this.toolStripStatusLabel1.Size = new System.Drawing.Size(0, 17); // // delegateScheduler1 // this.delegateScheduler1.PollingInterval = 10; this.delegateScheduler1.SynchronizingObject = this; this.delegateScheduler1.InvokeCompleted += new System.EventHandler<Sanford.Threading.InvokeCompletedEventArgs>(this.delegateScheduler1_InvokeCompleted); // // Form1 // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.ClientSize = new System.Drawing.Size(376, 299); this.Controls.Add(this.statusStrip1); this.Controls.Add(this.label4); this.Controls.Add(this.pollNumericUpDown); this.Controls.Add(this.groupBox1); this.Controls.Add(this.clearButton); this.Controls.Add(this.logListBox); this.Controls.Add(this.stopButton); this.Controls.Add(this.startButton); this.Name = "Form1"; this.Text = "DelegateScheduler Test"; ((System.ComponentModel.ISupportInitialize)(this.timeNumericUpDown)).EndInit(); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); ((System.ComponentModel.ISupportInitialize)(this.countNumericUpDown)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.pollNumericUpDown)).EndInit(); this.statusStrip1.ResumeLayout(false); this.statusStrip1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Button startButton; private System.Windows.Forms.Button stopButton; private System.Windows.Forms.Button addButton; private System.Windows.Forms.ListBox logListBox; private System.Windows.Forms.TextBox idTextBox; private System.Windows.Forms.Label label1; private System.Windows.Forms.NumericUpDown timeNumericUpDown; private System.Windows.Forms.Label label2; private System.Windows.Forms.Button clearButton; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.Label label3; private System.Windows.Forms.NumericUpDown countNumericUpDown; private System.Windows.Forms.NumericUpDown pollNumericUpDown; private System.Windows.Forms.Label label4; private Sanford.Threading.DelegateScheduler delegateScheduler1; private System.Windows.Forms.StatusStrip statusStrip1; private System.Windows.Forms.ToolStripStatusLabel toolStripStatusLabel1; } }
/* * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: support@ultracart.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter; namespace com.ultracart.admin.v2.Model { /// <summary> /// AffiliateLedgerQuery /// </summary> [DataContract] public partial class AffiliateLedgerQuery : IEquatable<AffiliateLedgerQuery>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="AffiliateLedgerQuery" /> class. /// </summary> /// <param name="affiliateOid">Affiliate ID associated with the ledger.</param> /// <param name="itemId">Item id associated with the ledger entry.</param> /// <param name="orderId">Order ID associated with the ledger entries.</param> /// <param name="subId">Sub ID value passed on the click that generated the ledger.</param> /// <param name="transactionDtsBegin">Minimum transaction date/time to return.</param> /// <param name="transactionDtsEnd">Maximum transaction date/time to return.</param> public AffiliateLedgerQuery(int? affiliateOid = default(int?), string itemId = default(string), string orderId = default(string), string subId = default(string), string transactionDtsBegin = default(string), string transactionDtsEnd = default(string)) { this.AffiliateOid = affiliateOid; this.ItemId = itemId; this.OrderId = orderId; this.SubId = subId; this.TransactionDtsBegin = transactionDtsBegin; this.TransactionDtsEnd = transactionDtsEnd; } /// <summary> /// Affiliate ID associated with the ledger /// </summary> /// <value>Affiliate ID associated with the ledger</value> [DataMember(Name="affiliate_oid", EmitDefaultValue=false)] public int? AffiliateOid { get; set; } /// <summary> /// Item id associated with the ledger entry /// </summary> /// <value>Item id associated with the ledger entry</value> [DataMember(Name="item_id", EmitDefaultValue=false)] public string ItemId { get; set; } /// <summary> /// Order ID associated with the ledger entries /// </summary> /// <value>Order ID associated with the ledger entries</value> [DataMember(Name="order_id", EmitDefaultValue=false)] public string OrderId { get; set; } /// <summary> /// Sub ID value passed on the click that generated the ledger /// </summary> /// <value>Sub ID value passed on the click that generated the ledger</value> [DataMember(Name="sub_id", EmitDefaultValue=false)] public string SubId { get; set; } /// <summary> /// Minimum transaction date/time to return /// </summary> /// <value>Minimum transaction date/time to return</value> [DataMember(Name="transaction_dts_begin", EmitDefaultValue=false)] public string TransactionDtsBegin { get; set; } /// <summary> /// Maximum transaction date/time to return /// </summary> /// <value>Maximum transaction date/time to return</value> [DataMember(Name="transaction_dts_end", EmitDefaultValue=false)] public string TransactionDtsEnd { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class AffiliateLedgerQuery {\n"); sb.Append(" AffiliateOid: ").Append(AffiliateOid).Append("\n"); sb.Append(" ItemId: ").Append(ItemId).Append("\n"); sb.Append(" OrderId: ").Append(OrderId).Append("\n"); sb.Append(" SubId: ").Append(SubId).Append("\n"); sb.Append(" TransactionDtsBegin: ").Append(TransactionDtsBegin).Append("\n"); sb.Append(" TransactionDtsEnd: ").Append(TransactionDtsEnd).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public virtual string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as AffiliateLedgerQuery); } /// <summary> /// Returns true if AffiliateLedgerQuery instances are equal /// </summary> /// <param name="input">Instance of AffiliateLedgerQuery to be compared</param> /// <returns>Boolean</returns> public bool Equals(AffiliateLedgerQuery input) { if (input == null) return false; return ( this.AffiliateOid == input.AffiliateOid || (this.AffiliateOid != null && this.AffiliateOid.Equals(input.AffiliateOid)) ) && ( this.ItemId == input.ItemId || (this.ItemId != null && this.ItemId.Equals(input.ItemId)) ) && ( this.OrderId == input.OrderId || (this.OrderId != null && this.OrderId.Equals(input.OrderId)) ) && ( this.SubId == input.SubId || (this.SubId != null && this.SubId.Equals(input.SubId)) ) && ( this.TransactionDtsBegin == input.TransactionDtsBegin || (this.TransactionDtsBegin != null && this.TransactionDtsBegin.Equals(input.TransactionDtsBegin)) ) && ( this.TransactionDtsEnd == input.TransactionDtsEnd || (this.TransactionDtsEnd != null && this.TransactionDtsEnd.Equals(input.TransactionDtsEnd)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.AffiliateOid != null) hashCode = hashCode * 59 + this.AffiliateOid.GetHashCode(); if (this.ItemId != null) hashCode = hashCode * 59 + this.ItemId.GetHashCode(); if (this.OrderId != null) hashCode = hashCode * 59 + this.OrderId.GetHashCode(); if (this.SubId != null) hashCode = hashCode * 59 + this.SubId.GetHashCode(); if (this.TransactionDtsBegin != null) hashCode = hashCode * 59 + this.TransactionDtsBegin.GetHashCode(); if (this.TransactionDtsEnd != null) hashCode = hashCode * 59 + this.TransactionDtsEnd.GetHashCode(); return hashCode; } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace SAEON.Observations.Data { /// <summary> /// Controller class for ImportBatchSummary /// </summary> [System.ComponentModel.DataObject] public partial class ImportBatchSummaryController { // Preload our schema.. ImportBatchSummary thisSchemaLoad = new ImportBatchSummary(); private string userName = String.Empty; protected string UserName { get { if (userName.Length == 0) { if (System.Web.HttpContext.Current != null) { userName=System.Web.HttpContext.Current.User.Identity.Name; } else { userName=System.Threading.Thread.CurrentPrincipal.Identity.Name; } } return userName; } } [DataObjectMethod(DataObjectMethodType.Select, true)] public ImportBatchSummaryCollection FetchAll() { ImportBatchSummaryCollection coll = new ImportBatchSummaryCollection(); Query qry = new Query(ImportBatchSummary.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; } [DataObjectMethod(DataObjectMethodType.Select, false)] public ImportBatchSummaryCollection FetchByID(object Id) { ImportBatchSummaryCollection coll = new ImportBatchSummaryCollection().Where("ID", Id).Load(); return coll; } [DataObjectMethod(DataObjectMethodType.Select, false)] public ImportBatchSummaryCollection FetchByQuery(Query qry) { ImportBatchSummaryCollection coll = new ImportBatchSummaryCollection(); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; } [DataObjectMethod(DataObjectMethodType.Delete, true)] public bool Delete(object Id) { return (ImportBatchSummary.Delete(Id) == 1); } [DataObjectMethod(DataObjectMethodType.Delete, false)] public bool Destroy(object Id) { return (ImportBatchSummary.Destroy(Id) == 1); } /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> [DataObjectMethod(DataObjectMethodType.Insert, true)] public void Insert(Guid Id,Guid ImportBatchID,Guid SensorID,Guid InstrumentID,Guid StationID,Guid SiteID,Guid PhenomenonOfferingID,Guid PhenomenonUOMID,int Count,int ValueCount,int? NullCount,int VerifiedCount,int UnverifiedCount,double? Minimum,double? Maximum,double? Average,double? StandardDeviation,double? Variance,double? LatitudeNorth,double? LatitudeSouth,double? LongitudeWest,double? LongitudeEast,double? ElevationMinimum,double? ElevationMaximum,DateTime? StartDate,DateTime? EndDate) { ImportBatchSummary item = new ImportBatchSummary(); item.Id = Id; item.ImportBatchID = ImportBatchID; item.SensorID = SensorID; item.InstrumentID = InstrumentID; item.StationID = StationID; item.SiteID = SiteID; item.PhenomenonOfferingID = PhenomenonOfferingID; item.PhenomenonUOMID = PhenomenonUOMID; item.Count = Count; item.ValueCount = ValueCount; item.NullCount = NullCount; item.VerifiedCount = VerifiedCount; item.UnverifiedCount = UnverifiedCount; item.Minimum = Minimum; item.Maximum = Maximum; item.Average = Average; item.StandardDeviation = StandardDeviation; item.Variance = Variance; item.LatitudeNorth = LatitudeNorth; item.LatitudeSouth = LatitudeSouth; item.LongitudeWest = LongitudeWest; item.LongitudeEast = LongitudeEast; item.ElevationMinimum = ElevationMinimum; item.ElevationMaximum = ElevationMaximum; item.StartDate = StartDate; item.EndDate = EndDate; item.Save(UserName); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> [DataObjectMethod(DataObjectMethodType.Update, true)] public void Update(Guid Id,Guid ImportBatchID,Guid SensorID,Guid InstrumentID,Guid StationID,Guid SiteID,Guid PhenomenonOfferingID,Guid PhenomenonUOMID,int Count,int ValueCount,int? NullCount,int VerifiedCount,int UnverifiedCount,double? Minimum,double? Maximum,double? Average,double? StandardDeviation,double? Variance,double? LatitudeNorth,double? LatitudeSouth,double? LongitudeWest,double? LongitudeEast,double? ElevationMinimum,double? ElevationMaximum,DateTime? StartDate,DateTime? EndDate) { ImportBatchSummary item = new ImportBatchSummary(); item.MarkOld(); item.IsLoaded = true; item.Id = Id; item.ImportBatchID = ImportBatchID; item.SensorID = SensorID; item.InstrumentID = InstrumentID; item.StationID = StationID; item.SiteID = SiteID; item.PhenomenonOfferingID = PhenomenonOfferingID; item.PhenomenonUOMID = PhenomenonUOMID; item.Count = Count; item.ValueCount = ValueCount; item.NullCount = NullCount; item.VerifiedCount = VerifiedCount; item.UnverifiedCount = UnverifiedCount; item.Minimum = Minimum; item.Maximum = Maximum; item.Average = Average; item.StandardDeviation = StandardDeviation; item.Variance = Variance; item.LatitudeNorth = LatitudeNorth; item.LatitudeSouth = LatitudeSouth; item.LongitudeWest = LongitudeWest; item.LongitudeEast = LongitudeEast; item.ElevationMinimum = ElevationMinimum; item.ElevationMaximum = ElevationMaximum; item.StartDate = StartDate; item.EndDate = EndDate; item.Save(UserName); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** Purpose: part of ComEventHelpers APIs which allow binding ** managed delegates to COM's connection point based events. ** **/ using System; using System.Collections.Generic; using System.Text; using System.Diagnostics; using System.Runtime.InteropServices; using System.Reflection; namespace System.Runtime.InteropServices { // see code:ComEventsHelper#ComEventsArchitecture internal class ComEventsMethod { // This delegate wrapper class handles dynamic invocation of delegates. The reason for the wrapper's // existence is that under certain circumstances we need to coerce arguments to types expected by the // delegates signature. Normally, reflection (Delegate.DynamicInvoke) handles types coercion // correctly but one known case is when the expected signature is 'ref Enum' - in this case // reflection by design does not do the coercion. Since we need to be compatible with COM interop // handling of this scenario - we are pre-processing delegate's signature by looking for 'ref enums' // and cache the types required for such coercion. internal class DelegateWrapper { private Delegate _d; private bool _once = false; private int _expectedParamsCount; private Type[] _cachedTargetTypes; public DelegateWrapper(Delegate d) { _d = d; } public Delegate Delegate { get { return _d; } set { _d = value; } } } #region private fields /// <summary> /// Invoking ComEventsMethod means invoking a multi-cast delegate attached to it. /// Since multicast delegate's built-in chaining supports only chaining instances of the same type, /// we need to complement this design by using an explicit linked list data structure. /// </summary> private DelegateWrapper[] _delegateWrappers; private int _dispid; private ComEventsMethod _next; #endregion #region ctor internal ComEventsMethod(int dispid) { _delegateWrappers = null; _dispid = dispid; } #endregion #region static internal methods internal static ComEventsMethod Find(ComEventsMethod methods, int dispid) { while (methods != null && methods._dispid != dispid) { methods = methods._next; } return methods; } internal static ComEventsMethod Add(ComEventsMethod methods, ComEventsMethod method) { method._next = methods; return method; } internal static ComEventsMethod Remove(ComEventsMethod methods, ComEventsMethod method) { if (methods == method) { methods = methods._next; } else { ComEventsMethod current = methods; while (current != null && current._next != method) current = current._next; if (current != null) current._next = method._next; } return methods; } #endregion #region public properties / methods internal bool Empty { get { return _delegateWrappers == null || _delegateWrappers.Length == 0; } } internal void AddDelegate(Delegate d) { int count = 0; if (_delegateWrappers != null) { count = _delegateWrappers.Length; } for (int i = 0; i < count; i++) { if (_delegateWrappers[i].Delegate.GetType() == d.GetType()) { _delegateWrappers[i].Delegate = Delegate.Combine(_delegateWrappers[i].Delegate, d); return; } } DelegateWrapper[] newDelegateWrappers = new DelegateWrapper[count + 1]; if (count > 0) { _delegateWrappers.CopyTo(newDelegateWrappers, 0); } DelegateWrapper wrapper = new DelegateWrapper(d); newDelegateWrappers[count] = wrapper; _delegateWrappers = newDelegateWrappers; } internal void RemoveDelegate(Delegate d) { int count = _delegateWrappers.Length; int removeIdx = -1; for (int i = 0; i < count; i++) { if (_delegateWrappers[i].Delegate.GetType() == d.GetType()) { removeIdx = i; break; } } if (removeIdx < 0) return; Delegate newDelegate = Delegate.Remove(_delegateWrappers[removeIdx].Delegate, d); if (newDelegate != null) { _delegateWrappers[removeIdx].Delegate = newDelegate; return; } // now remove the found entry from the _delegates array if (count == 1) { _delegateWrappers = null; return; } DelegateWrapper[] newDelegateWrappers = new DelegateWrapper[count - 1]; int j = 0; while (j < removeIdx) { newDelegateWrappers[j] = _delegateWrappers[j]; j++; } while (j < count - 1) { newDelegateWrappers[j] = _delegateWrappers[j + 1]; j++; } _delegateWrappers = newDelegateWrappers; } #endregion } }
#region Using using System; using System.Collections.Generic; using System.Text; using FlatRedBall; using FlatRedBall.Math; using FlatRedBall.Math.Geometry; using FlatRedBall.Graphics.Model; using FlatRedBall.ManagedSpriteGroups; using FlatRedBall.Graphics; using PolygonSaveList = FlatRedBall.Content.Polygon.PolygonSaveList; #endregion namespace CodeGenerator.Screens { public class Screen { #region Members protected Camera mCamera; protected Layer mLayer; protected List<Screen> mPopups = new List<Screen>(); private string mContentManagerName; // The following are objects which belong to the screen. // These are removed by the Screen when it is Destroyed protected SpriteList mSprites = new SpriteList(); protected List<SpriteGrid> mSpriteGrids = new List<SpriteGrid>(); protected PositionedObjectList<SpriteFrame> mSpriteFrames = new PositionedObjectList<SpriteFrame>(); protected PositionedObjectList<Text> mTexts = new PositionedObjectList<Text>(); protected PositionedObjectList<Polygon> mPolygons = new PositionedObjectList<Polygon>(); protected PositionedObjectList<AxisAlignedRectangle> mAxisAlignedRectangles = new PositionedObjectList<AxisAlignedRectangle>(); protected PositionedObjectList<Circle> mCircles = new PositionedObjectList<Circle>(); protected PositionedObjectList<Line> mLines = new PositionedObjectList<Line>(); protected PositionedModelList mPositionedModels = new PositionedModelList(); protected List<Layer> mLayers = new List<Layer>(); protected List<IDrawableBatch> mDrawableBatches = new List<IDrawableBatch>(); // End of objects which belong to the Screen. // These variables control the flow from one Screen to the next. protected Scene mLastLoadedScene; private bool mIsActivityFinished; private string mNextScreen; private bool mManageSpriteGrids; // Loading a Screen can take a considerable amount // of time - certainly more than 1/60 of a frame. Since // the XNA Game class runs at a fixed frame rate by default // the long loading time can cause a buildup of frames. Once // the Screen class finishes loading the Game class will spin quickly // calling Update over and over. // To prevent this, Screens can tell the ScreenManager // to set the Game's IsFixedFrameRate to false when the // Screen loads and back to true after. This occurs only // if this field is set to true; protected bool mTurnOffFixedFrameRateDuringLoading = false; #endregion #region Properties public string ContentManagerName { get { return mContentManagerName; } } #region XML Docs /// <summary> /// Gets and sets whether the activity is finished for a particular screen. /// </summary> /// <remarks> /// If activity is finished, then the ScreenManager or parent /// screen (if the screen is a popup) knows to destroy the screen /// and loads the NextScreen class.</remarks> #endregion public bool IsActivityFinished { get { return mIsActivityFinished; } set { mIsActivityFinished = value; } } public Layer Layer { get { return mLayer; } } public bool ManageSpriteGrids { get { return mManageSpriteGrids; } set { mManageSpriteGrids = value; } } #region XML Docs /// <summary> /// The fully qualified path of the Screen-inheriting class that this screen is /// to link to. /// </summary> /// <remarks> /// This property is read by the ScreenManager when IsActivityFinished is /// set to true. Therefore, this must always be set to some value before /// or in the same frame as when IsActivityFinished is set to true. /// </remarks> #endregion public string NextScreen { get { return mNextScreen; } set { mNextScreen = value; } } public bool TurnOffFixedFrameRateDuringLoading { get { return mTurnOffFixedFrameRateDuringLoading; } set { mTurnOffFixedFrameRateDuringLoading = value; } } protected bool UnloadsContentManagerWhenDestroyed { get; set; } #endregion #region Methods #region Constructor #region XML Docs /// <summary> /// Loads a screen, initializes internal fields, and stores the textures which /// belong to the screen. /// </summary> /// <!--Derived class should not change the argument list. The ScreenManager /// depends on the arguments for the constructor being the same for all derived /// classes.--> /// <remarks> /// When a screen is unloaded, it unloads all textures that it introduced /// to the TextureManager. It does not unload textures which were already /// cached in the TextureManager before the screen was loaded. /// This method gets all textures that are cached after the /// LoadScn method, then removes all /// textures that existed before the scene was loaded. /// /// Therefore, any texture that is used in the screen should /// exist in the .scn. This keeps the programmer /// from having to manually track textures and remove them when a /// screen unloads. The exception is objects which dynamically select /// their texture according to conditions and user inputs. These /// textures should be assigned to the screen with the AssignTextureToScreen /// method. /// </remarks> /// <param name="scnFileName">The .scn file name relative to the .exe.</param> /// <param name="istFileName">The .ist file name relative to the .exe.</param> /// <param name="sprMan">Reference to the SpriteManager used for scene loading and later texture removal.</param> /// <param name="random">Reference to the Random instance.</param> /// <param name="Music">Reference to the Music instance used to play songs.</param> #endregion public Screen(string scnFileName, string contentManagerName) { UnloadsContentManagerWhenDestroyed = true; mContentManagerName = contentManagerName; mManageSpriteGrids = true; IsActivityFinished = false; mLayer = ScreenManager.NextScreenLayer; #region load the .scnx // If no .scnx file is specified, don't call LoadScene. if (!string.IsNullOrEmpty(scnFileName)) { LoadScene(scnFileName, mLayer); } #endregion } #endregion #region Public Methods #region XML Docs /// <summary> /// Manages contained SpriteFrames or SpriteGrids. /// </summary> #endregion public virtual void Activity(bool firstTimeCalled) { if (mManageSpriteGrids) { foreach (SpriteGrid sg in mSpriteGrids) sg.Manage(); } for (int i = mPopups.Count - 1; i > -1; i--) { mPopups[i].Activity(false); if (mPopups[i].IsActivityFinished) { string nextPopup = mPopups[i].NextScreen; mPopups[i].Destroy(); mPopups.RemoveAt(i); if (nextPopup != string.Empty && nextPopup != null) { LoadPopup(nextPopup, false); } } } } #region XML Docs /// <summary> /// Method called by the ScreenManager which unloads all textures, /// Remove Sprites, destroys the GUI, Text objects, SpriteGrids, /// and SpriteFrames. /// </summary> #endregion public virtual void Destroy() { if (mLastLoadedScene != null) { mLastLoadedScene.Clear(); } // All of the popups should be destroyed as well foreach (Screen s in mPopups) s.Destroy(); SpriteManager.RemoveSpriteList(mSprites); // It's common for users to forget to add Particle Sprites // to the mSprites SpriteList. This will either create leftover // particles when the next screen loads or will throw an assert when // the ScreenManager checks if there are any leftover Sprites. To make // things easier we'll just clear the Particle Sprites here. If you don't // want this done (not likely), remove the following line, but only do so if // you really know what you're doing! SpriteManager.RemoveAllParticleSprites(); // Destory all SpriteGrids that belong to this Screen foreach (SpriteGrid sg in mSpriteGrids) sg.Destroy(); // Destroy all SpriteFrames that belong to this Screen while (mSpriteFrames.Count != 0) SpriteManager.RemoveSpriteFrame(mSpriteFrames[0]); TextManager.RemoveText(mTexts); while (mPolygons.Count != 0) ShapeManager.Remove(mPolygons[0]); while (mLines.Count != 0) ShapeManager.Remove(mLines[0]); while (mAxisAlignedRectangles.Count != 0) ShapeManager.Remove(mAxisAlignedRectangles[0]); while (mCircles.Count != 0) ShapeManager.Remove(mCircles[0]); while (mPositionedModels.Count != 0) ModelManager.RemoveModel(mPositionedModels[0]); if (UnloadsContentManagerWhenDestroyed) { FlatRedBallServices.Unload(mContentManagerName); } if (mLayer != null) { SpriteManager.RemoveLayer(mLayer); } for (int i = 0; i < mLayers.Count; i++) { SpriteManager.RemoveLayer(mLayers[i]); } } public Scene LoadScene(string fileName, Layer layer) { Scene scene = null; if ( #if XBOX360 FlatRedBallServices.IgnoreExtensionsWhenLoadingContent == false && #endif FlatRedBall.IO.FileManager.GetExtension(fileName) == "scnx") { #if XBOX360 throw new NotImplementedException("Cannot currently load .scnx files on the 360. Use content manager"); #else scene = FlatRedBall.Content.SpriteEditorScene.FromFile(fileName).ToScene(mContentManagerName); #endif } else { // There is either no extension on the fileName or the extensions are being ignored // by the engine. scene = FlatRedBallServices.Load<Scene>(fileName, mContentManagerName); } if (scene != null) { scene.AddToManagers(layer); mSprites.AddRange(scene.Sprites); mSpriteGrids.AddRange(scene.SpriteGrids); mSpriteFrames.AddRange(scene.SpriteFrames); mPositionedModels.AddRange(scene.PositionedModels); SpriteManager.SortTexturesSecondary(); } mLastLoadedScene = scene; return scene; } public PositionedObjectList<Polygon> LoadPolygonList(string name, bool addToShapeManager, bool makeVisible) { PolygonSaveList psl = PolygonSaveList.FromFile(name); PositionedObjectList<Polygon> loadedPolygons = psl.ToPolygonList(); if (addToShapeManager) { foreach (Polygon polygon in loadedPolygons) { ShapeManager.AddPolygon(polygon); } } foreach (Polygon polygon in loadedPolygons) { polygon.Visible = makeVisible; } mPolygons.AddRange(loadedPolygons); return loadedPolygons; } public Screen LoadPopup(string popupToLoad, Layer layerToLoadPopupOn) { Screen loadedScreen = ScreenManager.LoadScreen(popupToLoad, layerToLoadPopupOn); mPopups.Add(loadedScreen); return loadedScreen; } public Screen LoadPopup(string popupToLoad, bool useNewLayer) { Screen loadedScreen = ScreenManager.LoadScreen(popupToLoad, useNewLayer); mPopups.Add(loadedScreen); return loadedScreen; } #region XML Docs /// <summary>Tells the screen that we are done and wish to move to the /// supplied screen</summary> /// <param>Fully Qualified Type of the screen to move to</param> #endregion public void MoveToScreen(string screenClass) { IsActivityFinished = true; NextScreen = screenClass; } public void Shift(float x, float y, float z) { mSprites.Shift(x, y, z); foreach (SpriteGrid sg in mSpriteGrids) sg.Shift(x, y, z); mSpriteFrames.Shift(x, y, z); mTexts.Shift(x, y, z); mPolygons.Shift(x, y, z); mAxisAlignedRectangles.Shift(x, y, z); mCircles.Shift(x, y, z); } #endregion #region Protected Methods #endregion #endregion } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // namespace DiscUtils { using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Reflection; using DiscUtils.Partitions; /// <summary> /// Base class representing virtual hard disks. /// </summary> public abstract class VirtualDisk : #if !NETCORE MarshalByRefObject, #endif IDisposable { private static Dictionary<string, VirtualDiskFactory> s_extensionMap; private static Dictionary<string, VirtualDiskFactory> s_typeMap; private static Dictionary<string, Type> s_diskTransports; private VirtualDiskTransport _transport; /// <summary> /// Finalizes an instance of the VirtualDisk class. /// </summary> ~VirtualDisk() { Dispose(false); } /// <summary> /// Gets the set of disk formats supported as an array of file extensions. /// </summary> public static ICollection<string> SupportedDiskFormats { get { return ExtensionMap.Keys; } } /// <summary> /// Gets the set of disk types supported, as an array of identifiers. /// </summary> public static ICollection<string> SupportedDiskTypes { get { return TypeMap.Keys; } } /// <summary> /// Gets the geometry of the disk. /// </summary> public abstract Geometry Geometry { get; } /// <summary> /// Gets the geometry of the disk as it is anticipated a hypervisor BIOS will represent it. /// </summary> public virtual Geometry BiosGeometry { get { return Geometry.MakeBiosSafe(Geometry, Capacity); } } /// <summary> /// Gets the type of disk represented by this object. /// </summary> public abstract VirtualDiskClass DiskClass { get; } /// <summary> /// Gets the capacity of the disk (in bytes). /// </summary> public abstract long Capacity { get; } /// <summary> /// Gets the size of the disk's logical blocks (aka sector size), in bytes. /// </summary> public virtual int BlockSize { get { return Sizes.Sector; } } /// <summary> /// Gets the logical sector size of the disk, in bytes. /// </summary> /// <remarks>This is an alias for the <c>BlockSize</c> property.</remarks> public int SectorSize { get { return BlockSize; } } /// <summary> /// Gets the content of the disk as a stream. /// </summary> /// <remarks>Note the returned stream is not guaranteed to be at any particular position. The actual position /// will depend on the last partition table/file system activity, since all access to the disk contents pass /// through a single stream instance. Set the stream position before accessing the stream.</remarks> public abstract SparseStream Content { get; } /// <summary> /// Gets the layers that make up the disk. /// </summary> public abstract IEnumerable<VirtualDiskLayer> Layers { get; } /// <summary> /// Gets or sets the Windows disk signature of the disk, which uniquely identifies the disk. /// </summary> public virtual int Signature { get { return Utilities.ToInt32LittleEndian(GetMasterBootRecord(), 0x01B8); } set { byte[] mbr = GetMasterBootRecord(); Utilities.WriteBytesLittleEndian(value, mbr, 0x01B8); SetMasterBootRecord(mbr); } } /// <summary> /// Gets a value indicating whether the disk appears to have a valid partition table. /// </summary> /// <remarks>There is no reliable way to determine whether a disk has a valid partition /// table. The 'guess' consists of checking for basic indicators and looking for obviously /// invalid data, such as overlapping partitions.</remarks> public virtual bool IsPartitioned { get { return PartitionTable.IsPartitioned(Content); } } /// <summary> /// Gets the object that interprets the partition structure. /// </summary> /// <remarks>It is theoretically possible for a disk to contain two independent partition structures - a /// BIOS/GPT one and an Apple one, for example. This method will return in order of preference, /// a GUID partition table, a BIOS partition table, then in undefined preference one of any other partition /// tables found. See PartitionTable.GetPartitionTables to gain access to all the discovered partition /// tables on a disk.</remarks> public virtual PartitionTable Partitions { get { IList<PartitionTable> tables = PartitionTable.GetPartitionTables(this); if (tables == null || tables.Count == 0) { return null; } else if (tables.Count == 1) { return tables[0]; } else { PartitionTable best = null; int bestScore = -1; for (int i = 0; i < tables.Count; ++i) { int newScore = 0; if (tables[i] is GuidPartitionTable) { newScore = 2; } else if (tables[i] is BiosPartitionTable) { newScore = 1; } if (newScore > bestScore) { bestScore = newScore; best = tables[i]; } } return best; } } } /// <summary> /// Gets the parameters of the disk. /// </summary> /// <remarks>Most of the parameters are also available individually, such as DiskType and Capacity.</remarks> public virtual VirtualDiskParameters Parameters { get { return new VirtualDiskParameters() { DiskType = DiskClass, Capacity = Capacity, Geometry = Geometry, BiosGeometry = BiosGeometry, AdapterType = GenericDiskAdapterType.Ide }; } } /// <summary> /// Gets information about the type of disk. /// </summary> /// <remarks>This property provides access to meta-data about the disk format, for example whether the /// BIOS geometry is preserved in the disk file.</remarks> public abstract VirtualDiskTypeInfo DiskTypeInfo { get; } private static Dictionary<string, VirtualDiskFactory> ExtensionMap { get { if (s_extensionMap == null) { InitializeMaps(); } return s_extensionMap; } } private static Dictionary<string, VirtualDiskFactory> TypeMap { get { if (s_typeMap == null) { InitializeMaps(); } return s_typeMap; } } private static Dictionary<string, Type> DiskTransports { get { if (s_diskTransports == null) { Dictionary<string, Type> transports = new Dictionary<string, Type>(); foreach (var type in ReflectionHelper.GetAssembly(typeof(VirtualDisk)).GetTypes()) { foreach (VirtualDiskTransportAttribute attr in ReflectionHelper.GetCustomAttributes(type, typeof(VirtualDiskTransportAttribute), false)) { transports.Add(attr.Scheme.ToUpperInvariant(), type); } } s_diskTransports = transports; } return s_diskTransports; } } /// <summary> /// Gets the set of supported variants of a type of virtual disk. /// </summary> /// <param name="type">A type, as returned by <see cref="SupportedDiskTypes"/>.</param> /// <returns>A collection of identifiers, or empty if there is no variant concept for this type of disk.</returns> public static ICollection<string> GetSupportedDiskVariants(string type) { return TypeMap[type].Variants; } /// <summary> /// Gets information about disk type. /// </summary> /// <param name="type">The disk type, as returned by <see cref="SupportedDiskTypes"/>.</param> /// <param name="variant">The variant of the disk type.</param> /// <returns>Information about the disk type.</returns> public static VirtualDiskTypeInfo GetDiskType(string type, string variant) { return TypeMap[type].GetDiskTypeInformation(variant); } /// <summary> /// Create a new virtual disk, possibly within an existing disk. /// </summary> /// <param name="fileSystem">The file system to create the disk on.</param> /// <param name="type">The type of disk to create (see <see cref="SupportedDiskTypes"/>).</param> /// <param name="variant">The variant of the type to create (see <see cref="GetSupportedDiskVariants"/>).</param> /// <param name="path">The path (or URI) for the disk to create.</param> /// <param name="capacity">The capacity of the new disk.</param> /// <param name="geometry">The geometry of the new disk (or null).</param> /// <param name="parameters">Untyped parameters controlling the creation process (TBD).</param> /// <returns>The newly created disk.</returns> public static VirtualDisk CreateDisk(DiscFileSystem fileSystem, string type, string variant, string path, long capacity, Geometry geometry, Dictionary<string, string> parameters) { VirtualDiskFactory factory = TypeMap[type]; VirtualDiskParameters diskParams = new VirtualDiskParameters() { AdapterType = GenericDiskAdapterType.Scsi, Capacity = capacity, Geometry = geometry, }; if (parameters != null) { foreach (var key in parameters.Keys) { diskParams.ExtendedParameters[key] = parameters[key]; } } return factory.CreateDisk(new DiscFileLocator(fileSystem, Utilities.GetDirectoryFromPath(path)), variant.ToLowerInvariant(), Utilities.GetFileFromPath(path), diskParams); } /// <summary> /// Create a new virtual disk. /// </summary> /// <param name="type">The type of disk to create (see <see cref="SupportedDiskTypes"/>).</param> /// <param name="variant">The variant of the type to create (see <see cref="GetSupportedDiskVariants"/>).</param> /// <param name="path">The path (or URI) for the disk to create.</param> /// <param name="capacity">The capacity of the new disk.</param> /// <param name="geometry">The geometry of the new disk (or null).</param> /// <param name="parameters">Untyped parameters controlling the creation process (TBD).</param> /// <returns>The newly created disk.</returns> public static VirtualDisk CreateDisk(string type, string variant, string path, long capacity, Geometry geometry, Dictionary<string, string> parameters) { return CreateDisk(type, variant, path, capacity, geometry, null, null, parameters); } /// <summary> /// Create a new virtual disk. /// </summary> /// <param name="type">The type of disk to create (see <see cref="SupportedDiskTypes"/>).</param> /// <param name="variant">The variant of the type to create (see <see cref="GetSupportedDiskVariants"/>).</param> /// <param name="path">The path (or URI) for the disk to create.</param> /// <param name="capacity">The capacity of the new disk.</param> /// <param name="geometry">The geometry of the new disk (or null).</param> /// <param name="user">The user identity to use when accessing the <c>path</c> (or null).</param> /// <param name="password">The password to use when accessing the <c>path</c> (or null).</param> /// <param name="parameters">Untyped parameters controlling the creation process (TBD).</param> /// <returns>The newly created disk.</returns> public static VirtualDisk CreateDisk(string type, string variant, string path, long capacity, Geometry geometry, string user, string password, Dictionary<string, string> parameters) { VirtualDiskParameters diskParams = new VirtualDiskParameters() { AdapterType = GenericDiskAdapterType.Scsi, Capacity = capacity, Geometry = geometry, }; if (parameters != null) { foreach (var key in parameters.Keys) { diskParams.ExtendedParameters[key] = parameters[key]; } } return CreateDisk(type, variant, path, diskParams, user, password); } /// <summary> /// Create a new virtual disk. /// </summary> /// <param name="type">The type of disk to create (see <see cref="SupportedDiskTypes"/>).</param> /// <param name="variant">The variant of the type to create (see <see cref="GetSupportedDiskVariants"/>).</param> /// <param name="path">The path (or URI) for the disk to create.</param> /// <param name="diskParameters">Parameters controlling the capacity, geometry, etc of the new disk.</param> /// <param name="user">The user identity to use when accessing the <c>path</c> (or null).</param> /// <param name="password">The password to use when accessing the <c>path</c> (or null).</param> /// <returns>The newly created disk.</returns> public static VirtualDisk CreateDisk(string type, string variant, string path, VirtualDiskParameters diskParameters, string user, string password) { Uri uri = PathToUri(path); VirtualDisk result = null; Type transportType; if (!DiskTransports.TryGetValue(uri.Scheme.ToUpperInvariant(), out transportType)) { throw new FileNotFoundException(string.Format(CultureInfo.InvariantCulture, "Unable to parse path '{0}'", path), path); } VirtualDiskTransport transport = (VirtualDiskTransport)Activator.CreateInstance(transportType); try { transport.Connect(uri, user, password); if (transport.IsRawDisk) { result = transport.OpenDisk(FileAccess.ReadWrite); } else { VirtualDiskFactory factory = TypeMap[type]; result = factory.CreateDisk(transport.GetFileLocator(), variant.ToLowerInvariant(), Utilities.GetFileFromPath(path), diskParameters); } if (result != null) { result._transport = transport; transport = null; } return result; } finally { if (transport != null) { transport.Dispose(); } } } /// <summary> /// Opens an existing virtual disk. /// </summary> /// <param name="path">The path of the virtual disk to open, can be a URI.</param> /// <param name="access">The desired access to the disk.</param> /// <returns>The Virtual Disk, or <c>null</c> if an unknown disk format.</returns> public static VirtualDisk OpenDisk(string path, FileAccess access) { return OpenDisk(path, null, access, null, null); } /// <summary> /// Opens an existing virtual disk. /// </summary> /// <param name="path">The path of the virtual disk to open, can be a URI.</param> /// <param name="access">The desired access to the disk.</param> /// <param name="user">The user name to use for authentication (if necessary).</param> /// <param name="password">The password to use for authentication (if necessary).</param> /// <returns>The Virtual Disk, or <c>null</c> if an unknown disk format.</returns> public static VirtualDisk OpenDisk(string path, FileAccess access, string user, string password) { return OpenDisk(path, null, access, user, password); } /// <summary> /// Opens an existing virtual disk. /// </summary> /// <param name="path">The path of the virtual disk to open, can be a URI.</param> /// <param name="forceType">Force the detected disk type (<c>null</c> to detect).</param> /// <param name="access">The desired access to the disk.</param> /// <param name="user">The user name to use for authentication (if necessary).</param> /// <param name="password">The password to use for authentication (if necessary).</param> /// <returns>The Virtual Disk, or <c>null</c> if an unknown disk format.</returns> /// <remarks> /// The detected disk type can be forced by specifying a known disk type: /// RAW, VHD, VMDK, etc. /// </remarks> public static VirtualDisk OpenDisk(string path, string forceType, FileAccess access, string user, string password) { Uri uri = PathToUri(path); VirtualDisk result = null; Type transportType; if (!DiskTransports.TryGetValue(uri.Scheme.ToUpperInvariant(), out transportType)) { throw new FileNotFoundException(string.Format(CultureInfo.InvariantCulture, "Unable to parse path '{0}'", path), path); } VirtualDiskTransport transport = (VirtualDiskTransport)Activator.CreateInstance(transportType); try { transport.Connect(uri, user, password); if (transport.IsRawDisk) { result = transport.OpenDisk(access); } else { bool foundFactory; VirtualDiskFactory factory; if (!string.IsNullOrEmpty(forceType)) { foundFactory = TypeMap.TryGetValue(forceType, out factory); } else { string extension = Path.GetExtension(uri.AbsolutePath).ToUpperInvariant(); if (extension.StartsWith(".", StringComparison.Ordinal)) { extension = extension.Substring(1); } foundFactory = ExtensionMap.TryGetValue(extension, out factory); } if (foundFactory) { result = factory.OpenDisk(transport.GetFileLocator(), transport.GetFileName(), access); } } if (result != null) { result._transport = transport; transport = null; } return result; } finally { if (transport != null) { transport.Dispose(); } } } /// <summary> /// Opens an existing virtual disk, possibly from within an existing disk. /// </summary> /// <param name="fs">The file system to open the disk on.</param> /// <param name="path">The path of the virtual disk to open.</param> /// <param name="access">The desired access to the disk.</param> /// <returns>The Virtual Disk, or <c>null</c> if an unknown disk format.</returns> public static VirtualDisk OpenDisk(DiscFileSystem fs, string path, FileAccess access) { if (fs == null) { return OpenDisk(path, access); } string extension = Path.GetExtension(path).ToUpperInvariant(); if (extension.StartsWith(".", StringComparison.Ordinal)) { extension = extension.Substring(1); } VirtualDiskFactory factory; if (ExtensionMap.TryGetValue(extension, out factory)) { return factory.OpenDisk(fs, path, access); } return null; } /// <summary> /// Disposes of this instance, freeing underlying resources. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Reads the first sector of the disk, known as the Master Boot Record. /// </summary> /// <returns>The MBR as a byte array.</returns> public virtual byte[] GetMasterBootRecord() { byte[] sector = new byte[Sizes.Sector]; long oldPos = Content.Position; Content.Position = 0; Utilities.ReadFully(Content, sector, 0, Sizes.Sector); Content.Position = oldPos; return sector; } /// <summary> /// Overwrites the first sector of the disk, known as the Master Boot Record. /// </summary> /// <param name="data">The master boot record, must be 512 bytes in length.</param> public virtual void SetMasterBootRecord(byte[] data) { if (data == null) { throw new ArgumentNullException("data"); } else if (data.Length != Sizes.Sector) { throw new ArgumentException("The Master Boot Record must be exactly 512 bytes in length", "data"); } long oldPos = Content.Position; Content.Position = 0; Content.Write(data, 0, Sizes.Sector); Content.Position = oldPos; } /// <summary> /// Create a new differencing disk, possibly within an existing disk. /// </summary> /// <param name="fileSystem">The file system to create the disk on.</param> /// <param name="path">The path (or URI) for the disk to create.</param> /// <returns>The newly created disk.</returns> public abstract VirtualDisk CreateDifferencingDisk(DiscFileSystem fileSystem, string path); /// <summary> /// Create a new differencing disk. /// </summary> /// <param name="path">The path (or URI) for the disk to create.</param> /// <returns>The newly created disk.</returns> public abstract VirtualDisk CreateDifferencingDisk(string path); internal static VirtualDiskLayer OpenDiskLayer(FileLocator locator, string path, FileAccess access) { string extension = Path.GetExtension(path).ToUpperInvariant(); if (extension.StartsWith(".", StringComparison.Ordinal)) { extension = extension.Substring(1); } VirtualDiskFactory factory; if (ExtensionMap.TryGetValue(extension, out factory)) { return factory.OpenDiskLayer(locator, path, access); } return null; } /// <summary> /// Disposes of underlying resources. /// </summary> /// <param name="disposing"><c>true</c> if running inside Dispose(), indicating /// graceful cleanup of all managed objects should be performed, or <c>false</c> /// if running inside destructor.</param> protected virtual void Dispose(bool disposing) { if (disposing) { if (_transport != null) { _transport.Dispose(); } _transport = null; } } private static Uri PathToUri(string path) { if (string.IsNullOrEmpty(path)) { throw new ArgumentException("Path must not be null or empty", "path"); } if (path.Contains("://")) { return new Uri(path); } if (!Path.IsPathRooted(path)) { path = Path.GetFullPath(path); } // Built-in Uri class does cope well with query params on file Uris, so do some // parsing ourselves... if (path.Length >= 1 && path[0] == '\\') { UriBuilder builder = new UriBuilder("file:" + path.Replace('\\', '/')); return builder.Uri; } else if (path.StartsWith("//", StringComparison.OrdinalIgnoreCase)) { UriBuilder builder = new UriBuilder("file:" + path); return builder.Uri; } else if (path.Length >= 2 && path[1] == ':') { UriBuilder builder = new UriBuilder("file:///" + path.Replace('\\', '/')); return builder.Uri; } else { return new Uri(path); } } private static void InitializeMaps() { Dictionary<string, VirtualDiskFactory> typeMap = new Dictionary<string, VirtualDiskFactory>(); Dictionary<string, VirtualDiskFactory> extensionMap = new Dictionary<string, VirtualDiskFactory>(); foreach (var type in ReflectionHelper.GetAssembly(typeof(VirtualDisk)).GetTypes()) { VirtualDiskFactoryAttribute attr = (VirtualDiskFactoryAttribute)ReflectionHelper.GetCustomAttribute(type, typeof(VirtualDiskFactoryAttribute), false); if (attr != null) { VirtualDiskFactory factory = (VirtualDiskFactory)Activator.CreateInstance(type); typeMap.Add(attr.Type, factory); foreach (var extension in attr.FileExtensions) { extensionMap.Add(extension.ToUpperInvariant(), factory); } } } s_typeMap = typeMap; s_extensionMap = extensionMap; } } }
// SF API version v50.0 // Custom fields included: False // Relationship objects included: True using System; using NetCoreForce.Client.Models; using NetCoreForce.Client.Attributes; using Newtonsoft.Json; namespace NetCoreForce.Models { ///<summary> /// Contact Point Type Consent ///<para>SObject Name: ContactPointTypeConsent</para> ///<para>Custom Object: False</para> ///</summary> public class SfContactPointTypeConsent : SObject { [JsonIgnore] public static string SObjectTypeName { get { return "ContactPointTypeConsent"; } } ///<summary> /// Contact Point Type Consent ID /// <para>Name: Id</para> /// <para>SF Type: id</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "id")] [Updateable(false), Createable(false)] public string Id { get; set; } ///<summary> /// Owner ID /// <para>Name: OwnerId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "ownerId")] public string OwnerId { get; set; } ///<summary> /// Deleted /// <para>Name: IsDeleted</para> /// <para>SF Type: boolean</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "isDeleted")] [Updateable(false), Createable(false)] public bool? IsDeleted { get; set; } ///<summary> /// Name /// <para>Name: Name</para> /// <para>SF Type: string</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "name")] public string Name { get; set; } ///<summary> /// Created Date /// <para>Name: CreatedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdDate")] [Updateable(false), Createable(false)] public DateTimeOffset? CreatedDate { get; set; } ///<summary> /// Created By ID /// <para>Name: CreatedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdById")] [Updateable(false), Createable(false)] public string CreatedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: CreatedBy</para> ///</summary> [JsonProperty(PropertyName = "createdBy")] [Updateable(false), Createable(false)] public SfUser CreatedBy { get; set; } ///<summary> /// Last Modified Date /// <para>Name: LastModifiedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastModifiedDate { get; set; } ///<summary> /// Last Modified By ID /// <para>Name: LastModifiedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedById")] [Updateable(false), Createable(false)] public string LastModifiedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: LastModifiedBy</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedBy")] [Updateable(false), Createable(false)] public SfUser LastModifiedBy { get; set; } ///<summary> /// System Modstamp /// <para>Name: SystemModstamp</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "systemModstamp")] [Updateable(false), Createable(false)] public DateTimeOffset? SystemModstamp { get; set; } ///<summary> /// Last Viewed Date /// <para>Name: LastViewedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "lastViewedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastViewedDate { get; set; } ///<summary> /// Last Referenced Date /// <para>Name: LastReferencedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "lastReferencedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastReferencedDate { get; set; } ///<summary> /// Party ID /// <para>Name: PartyId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "partyId")] public string PartyId { get; set; } ///<summary> /// ReferenceTo: Individual /// <para>RelationshipName: Party</para> ///</summary> [JsonProperty(PropertyName = "party")] [Updateable(false), Createable(false)] public SfIndividual Party { get; set; } ///<summary> /// Contact Point Type ID /// <para>Name: ContactPointType</para> /// <para>SF Type: picklist</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "contactPointType")] public string ContactPointType { get; set; } ///<summary> /// Data Use Purpose ID /// <para>Name: DataUsePurposeId</para> /// <para>SF Type: reference</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "dataUsePurposeId")] public string DataUsePurposeId { get; set; } ///<summary> /// ReferenceTo: DataUsePurpose /// <para>RelationshipName: DataUsePurpose</para> ///</summary> [JsonProperty(PropertyName = "dataUsePurpose")] [Updateable(false), Createable(false)] public SfDataUsePurpose DataUsePurpose { get; set; } ///<summary> /// Privacy Consent Status /// <para>Name: PrivacyConsentStatus</para> /// <para>SF Type: picklist</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "privacyConsentStatus")] public string PrivacyConsentStatus { get; set; } ///<summary> /// Effective From /// <para>Name: EffectiveFrom</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "effectiveFrom")] public DateTimeOffset? EffectiveFrom { get; set; } ///<summary> /// Effective To /// <para>Name: EffectiveTo</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "effectiveTo")] public DateTimeOffset? EffectiveTo { get; set; } ///<summary> /// Capture Date /// <para>Name: CaptureDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "captureDate")] public DateTimeOffset? CaptureDate { get; set; } ///<summary> /// Capture Contact Point Type /// <para>Name: CaptureContactPointType</para> /// <para>SF Type: picklist</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "captureContactPointType")] public string CaptureContactPointType { get; set; } ///<summary> /// Capture Source /// <para>Name: CaptureSource</para> /// <para>SF Type: string</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "captureSource")] public string CaptureSource { get; set; } ///<summary> /// Double Consent Capture Date /// <para>Name: DoubleConsentCaptureDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "doubleConsentCaptureDate")] public DateTimeOffset? DoubleConsentCaptureDate { get; set; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Text; using LoreSoft.MathExpressions.Properties; using System.Globalization; namespace LoreSoft.MathExpressions { /// <summary> /// Evaluate math expressions /// </summary> /// <example>Using the MathEvaluator to calculate a math expression. /// <code> /// MathEvaluator eval = new MathEvaluator(); /// //basic math /// double result = eval.Evaluate("(2 + 1) * (1 + 2)"); /// //calling a function /// result = eval.Evaluate("sqrt(4)"); /// //evaluate trigonometric /// result = eval.Evaluate("cos(pi * 45 / 180.0)"); /// //convert inches to feet /// result = eval.Evaluate("12 [in->ft]"); /// //use variable /// result = eval.Evaluate("answer * 10"); /// </code> /// </example> public class MathEvaluator : IDisposable { /// <summary>The name of the answer variable.</summary> /// <seealso cref="Variables"/> public const string AnswerVariable = "answer"; //instance scope to optimize reuse private Stack<string> _symbolStack; private Queue<IExpression> _expressionQueue; private Dictionary<string, IExpression> _expressionCache; private StringBuilder _buffer; private Stack<double> _calculationStack; private Stack<double> _parameters; private List<string> _innerFunctions; private StringReader _expressionReader; /// <summary> /// Initializes a new instance of the <see cref="MathEvaluator"/> class. /// </summary> public MathEvaluator() { _variables = new VariableDictionary(this); _innerFunctions = new List<string>(FunctionExpression.GetFunctionNames()); _functions = new ReadOnlyCollection<string>(_innerFunctions); _expressionCache = new Dictionary<string, IExpression>(StringComparer.OrdinalIgnoreCase); _symbolStack = new Stack<string>(); _expressionQueue = new Queue<IExpression>(); _buffer = new StringBuilder(); _calculationStack = new Stack<double>(); _parameters = new Stack<double>(2); } private VariableDictionary _variables; /// <summary> /// Gets the variables collections. /// </summary> /// <value>The variables for <see cref="MathEvaluator"/>.</value> public VariableDictionary Variables { get { return _variables; } } private ReadOnlyCollection<string> _functions; /// <summary>Gets the functions available to <see cref="MathEvaluator"/>.</summary> /// <value>The functions for <see cref="MathEvaluator"/>.</value> /// <seealso cref="RegisterFunction"/> public ReadOnlyCollection<string> Functions { get { return _functions; } } /// <summary>Gets the answer from the last evaluation.</summary> /// <value>The answer variable value.</value> /// <seealso cref="Variables"/> public double Answer { get { return _variables[AnswerVariable]; } } /// <summary>Evaluates the specified expression.</summary> /// <param name="expression">The expression to evaluate.</param> /// <returns>The result of the evaluated expression.</returns> /// <exception cref="ArgumentNullException">When expression is null or empty.</exception> /// <exception cref="ParseException">When there is an error parsing the expression.</exception> public double Evaluate(string expression) { if (string.IsNullOrEmpty(expression)) throw new ArgumentNullException("expression"); _expressionReader = new StringReader(expression); _symbolStack.Clear(); _expressionQueue.Clear(); ParseExpressionToQueue(); double result = CalculateFromQueue(); _variables[AnswerVariable] = result; return result; } /// <summary>Registers a function for the <see cref="MathEvaluator"/>.</summary> /// <param name="functionName">Name of the function.</param> /// <param name="expression">An instance of <see cref="IExpression"/> for the function.</param> /// <exception cref="ArgumentNullException">When functionName or expression are null.</exception> /// <exception cref="ArgumentException">When IExpression.Evaluate property is null or the functionName is already registered.</exception> /// <seealso cref="Functions"/> /// <seealso cref="IExpression"/> public void RegisterFunction(string functionName, IExpression expression) { if (string.IsNullOrEmpty(functionName)) throw new ArgumentNullException("functionName"); if (expression == null) throw new ArgumentNullException("expression"); if (expression.Evaluate == null) throw new ArgumentException(Resources.EvaluatePropertyCanNotBeNull, "expression"); if (_innerFunctions.BinarySearch(functionName) >= 0) throw new ArgumentException( string.Format(CultureInfo.CurrentCulture, Resources.FunctionNameRegistered, functionName), "functionName"); _innerFunctions.Add(functionName); _innerFunctions.Sort(); _expressionCache.Add(functionName, expression); } /// <summary>Determines whether the specified name is a function.</summary> /// <param name="name">The name of the function.</param> /// <returns><c>true</c> if the specified name is function; otherwise, <c>false</c>.</returns> internal bool IsFunction(string name) { return (_innerFunctions.BinarySearch(name, StringComparer.OrdinalIgnoreCase) >= 0); } private void ParseExpressionToQueue() { char l = '\0'; char c = '\0'; do { // last non white space char if (!char.IsWhiteSpace(c)) l = c; c = (char)_expressionReader.Read(); if (char.IsWhiteSpace(c)) continue; if (TryNumber(c, l)) continue; if (TryString(c)) continue; if (TryStartGroup(c)) continue; if (TryOperator(c)) continue; if (TryEndGroup(c)) continue; if (TryConvert(c)) continue; throw new ParseException(Resources.InvalidCharacterEncountered + c); } while (_expressionReader.Peek() != -1); ProcessSymbolStack(); } private bool TryConvert(char c) { if (c != '[') return false; _buffer.Length = 0; _buffer.Append(c); char p = (char)_expressionReader.Peek(); while (char.IsLetter(p) || char.IsWhiteSpace(p) || p == '-' || p == '>' || p == ']') { if (!char.IsWhiteSpace(p)) _buffer.Append((char)_expressionReader.Read()); else _expressionReader.Read(); if (p == ']') break; p = (char)_expressionReader.Peek(); } if (ConvertExpression.IsConvertExpression(_buffer.ToString())) { IExpression e = GetExpressionFromSymbol(_buffer.ToString()); _expressionQueue.Enqueue(e); return true; } throw new ParseException(Resources.InvalidConvertionExpression + _buffer); } private bool TryString(char c) { if (!char.IsLetter(c)) return false; _buffer.Length = 0; _buffer.Append(c); char p = (char)_expressionReader.Peek(); while (char.IsLetter(p)) { _buffer.Append((char)_expressionReader.Read()); p = (char)_expressionReader.Peek(); } if (_variables.ContainsKey(_buffer.ToString())) { double value = _variables[_buffer.ToString()]; NumberExpression expression = new NumberExpression(value); _expressionQueue.Enqueue(expression); return true; } if (IsFunction(_buffer.ToString())) { _symbolStack.Push(_buffer.ToString()); return true; } throw new ParseException(Resources.InvalidVariableEncountered + _buffer); } private bool TryStartGroup(char c) { if (c != '(') return false; _symbolStack.Push(c.ToString()); return true; } private bool TryEndGroup(char c) { if (c != ')') return false; bool hasStart = false; while (_symbolStack.Count > 0) { string p = _symbolStack.Pop(); if (p == "(") { hasStart = true; if (_symbolStack.Count == 0) break; string n = _symbolStack.Peek(); if (FunctionExpression.IsFunction(n)) { p = _symbolStack.Pop(); IExpression f = GetExpressionFromSymbol(p); _expressionQueue.Enqueue(f); } break; } IExpression e = GetExpressionFromSymbol(p); _expressionQueue.Enqueue(e); } if (!hasStart) throw new ParseException(Resources.UnbalancedParentheses); return true; } private bool TryOperator(char c) { if (!OperatorExpression.IsSymbol(c)) return false; bool repeat; string s = c.ToString(); do { string p = _symbolStack.Count == 0 ? string.Empty : _symbolStack.Peek(); repeat = false; if (_symbolStack.Count == 0) _symbolStack.Push(s); else if (p == "(") _symbolStack.Push(s); else if (Precedence(s) > Precedence(p)) _symbolStack.Push(s); else { IExpression e = GetExpressionFromSymbol(_symbolStack.Pop()); _expressionQueue.Enqueue(e); repeat = true; } } while (repeat); return true; } private bool TryNumber(char c, char l) { bool isNumber = NumberExpression.IsNumber(c); // only negative when last char is group start or symbol bool isNegative = NumberExpression.IsNegativeSign(c) && (l == '\0' || l == '(' || OperatorExpression.IsSymbol(l)); if (!isNumber && !isNegative) return false; _buffer.Length = 0; _buffer.Append(c); char p = (char)_expressionReader.Peek(); while (NumberExpression.IsNumber(p)) { _buffer.Append((char)_expressionReader.Read()); p = (char)_expressionReader.Peek(); } double value; if (!(double.TryParse(_buffer.ToString(), out value))) throw new ParseException(Resources.InvalidNumberFormat + _buffer); NumberExpression expression = new NumberExpression(value); _expressionQueue.Enqueue(expression); return true; } private void ProcessSymbolStack() { while (_symbolStack.Count > 0) { string p = _symbolStack.Pop(); if (p.Length == 1 && p == "(") throw new ParseException(Resources.UnbalancedParentheses); IExpression e = GetExpressionFromSymbol(p); _expressionQueue.Enqueue(e); } } private IExpression GetExpressionFromSymbol(string p) { IExpression e; if (_expressionCache.ContainsKey(p)) e = _expressionCache[p]; else if (OperatorExpression.IsSymbol(p)) { e = new OperatorExpression(p); _expressionCache.Add(p, e); } else if (FunctionExpression.IsFunction(p)) { e = new FunctionExpression(p, false); _expressionCache.Add(p, e); } else if (ConvertExpression.IsConvertExpression(p)) { e = new ConvertExpression(p); _expressionCache.Add(p, e); } else throw new ParseException(Resources.InvalidSymbolOnStack + p); return e; } private static int Precedence(string c) { if (c.Length == 1 && (c[0] == '*' || c[0] == '/' || c[0] == '%')) return 2; return 1; } private double CalculateFromQueue() { double result; _calculationStack.Clear(); foreach (IExpression expression in _expressionQueue) { if (_calculationStack.Count < expression.ArgumentCount) throw new ParseException(Resources.NotEnoughNumbers + expression); _parameters.Clear(); for (int i = 0; i < expression.ArgumentCount; i++) _parameters.Push(_calculationStack.Pop()); _calculationStack.Push(expression.Evaluate.Invoke(_parameters.ToArray())); } result = _calculationStack.Pop(); return result; } #region IDisposable Members /// <summary> /// Releases unmanaged and - optionally - managed resources /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Releases unmanaged and managed resources /// </summary> /// <param name="disposing"> /// <c>true</c> to release both managed and unmanaged resources; /// <c>false</c> to release only unmanaged resources. /// </param> protected virtual void Dispose(bool disposing) { if (disposing) { if (_expressionReader != null) { _expressionReader.Dispose(); _expressionReader = null; } } } #endregion } }
using System; using NUnit.Framework; using Org.BouncyCastle.Crypto.Agreement; using Org.BouncyCastle.Crypto.Engines; using Org.BouncyCastle.Crypto.Generators; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Math; using Org.BouncyCastle.Security; using Org.BouncyCastle.Utilities.Test; namespace Org.BouncyCastle.Crypto.Tests { [TestFixture] public class DHTest : SimpleTest { private static readonly IBigInteger g512 = new BigInteger("153d5d6172adb43045b68ae8e1de1070b6137005686d29d3d73a7749199681ee5b212c9b96bfdcfa5b20cd5e3fd2044895d609cf9b410b7a0f12ca1cb9a428cc", 16); private static readonly IBigInteger p512 = new BigInteger("9494fec095f3b85ee286542b3836fc81a5dd0a0349b4c239dd38744d488cf8e31db8bcb7d33b41abb9e5a33cca9144b1cef332c94bf0573bf047a3aca98cdf3b", 16); private static readonly IBigInteger g768 = new BigInteger("7c240073c1316c621df461b71ebb0cdcc90a6e5527e5e126633d131f87461c4dc4afc60c2cb0f053b6758871489a69613e2a8b4c8acde23954c08c81cbd36132cfd64d69e4ed9f8e51ed6e516297206672d5c0a69135df0a5dcf010d289a9ca1", 16); private static readonly IBigInteger p768 = new BigInteger("8c9dd223debed1b80103b8b309715be009d48860ed5ae9b9d5d8159508efd802e3ad4501a7f7e1cfec78844489148cd72da24b21eddd01aa624291c48393e277cfc529e37075eccef957f3616f962d15b44aeab4039d01b817fde9eaa12fd73f", 16); private static readonly IBigInteger g1024 = new BigInteger("1db17639cdf96bc4eabba19454f0b7e5bd4e14862889a725c96eb61048dcd676ceb303d586e30f060dbafd8a571a39c4d823982117da5cc4e0f89c77388b7a08896362429b94a18a327604eb7ff227bffbc83459ade299e57b5f77b50fb045250934938efa145511166e3197373e1b5b1e52de713eb49792bedde722c6717abf", 16); private static readonly IBigInteger p1024 = new BigInteger("a00e283b3c624e5b2b4d9fbc2653b5185d99499b00fd1bf244c6f0bb817b4d1c451b2958d62a0f8a38caef059fb5ecd25d75ed9af403f5b5bdab97a642902f824e3c13789fed95fa106ddfe0ff4a707c85e2eb77d49e68f2808bcea18ce128b178cd287c6bc00efa9a1ad2a673fe0dceace53166f75b81d6709d5f8af7c66bb7", 16); public override string Name { get { return "DH"; } } private void doTestDH( int size, IBigInteger g, IBigInteger p) { DHKeyPairGenerator kpGen = getDHKeyPairGenerator(g, p); // // generate first pair // IAsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private; // // generate second pair // pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu2 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv2 = (DHPrivateKeyParameters)pair.Private; // // two way // DHAgreement e1 = new DHAgreement(); DHAgreement e2 = new DHAgreement(); e1.Init(pv1); e2.Init(pv2); IBigInteger m1 = e1.CalculateMessage(); IBigInteger m2 = e2.CalculateMessage(); IBigInteger k1 = e1.CalculateAgreement(pu2, m2); IBigInteger k2 = e2.CalculateAgreement(pu1, m1); if (!k1.Equals(k2)) { Fail(size + " bit 2-way test failed"); } } private void doTestDHBasic( int size, int privateValueSize, IBigInteger g, IBigInteger p) { DHBasicKeyPairGenerator kpGen = getDHBasicKeyPairGenerator(g, p, privateValueSize); // // generate first pair // IAsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private; checkKeySize(privateValueSize, pv1); // // generate second pair // pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu2 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv2 = (DHPrivateKeyParameters)pair.Private; checkKeySize(privateValueSize, pv2); // // two way // DHBasicAgreement e1 = new DHBasicAgreement(); DHBasicAgreement e2 = new DHBasicAgreement(); e1.Init(pv1); e2.Init(pv2); IBigInteger k1 = e1.CalculateAgreement(pu2); IBigInteger k2 = e2.CalculateAgreement(pu1); if (!k1.Equals(k2)) { Fail("basic " + size + " bit 2-way test failed"); } } private void checkKeySize( int privateValueSize, DHPrivateKeyParameters priv) { if (privateValueSize != 0) { if (priv.X.BitLength != privateValueSize) { Fail("limited key check failed for key size " + privateValueSize); } } } private void doTestGPWithRandom( DHKeyPairGenerator kpGen) { // // generate first pair // IAsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private; // // generate second pair // pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu2 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv2 = (DHPrivateKeyParameters)pair.Private; // // two way // DHAgreement e1 = new DHAgreement(); DHAgreement e2 = new DHAgreement(); e1.Init(new ParametersWithRandom(pv1, new SecureRandom())); e2.Init(new ParametersWithRandom(pv2, new SecureRandom())); IBigInteger m1 = e1.CalculateMessage(); IBigInteger m2 = e2.CalculateMessage(); IBigInteger k1 = e1.CalculateAgreement(pu2, m2); IBigInteger k2 = e2.CalculateAgreement(pu1, m1); if (!k1.Equals(k2)) { Fail("basic with random 2-way test failed"); } } private void doTestSimpleWithRandom( DHBasicKeyPairGenerator kpGen) { // // generate first pair // IAsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private; // // generate second pair // pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu2 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv2 = (DHPrivateKeyParameters)pair.Private; // // two way // DHBasicAgreement e1 = new DHBasicAgreement(); DHBasicAgreement e2 = new DHBasicAgreement(); e1.Init(new ParametersWithRandom(pv1, new SecureRandom())); e2.Init(new ParametersWithRandom(pv2, new SecureRandom())); IBigInteger k1 = e1.CalculateAgreement(pu2); IBigInteger k2 = e2.CalculateAgreement(pu1); if (!k1.Equals(k2)) { Fail("basic with random 2-way test failed"); } } private DHBasicKeyPairGenerator getDHBasicKeyPairGenerator( IBigInteger g, IBigInteger p, int privateValueSize) { DHParameters dhParams = new DHParameters(p, g, null, privateValueSize); DHKeyGenerationParameters dhkgParams = new DHKeyGenerationParameters(new SecureRandom(), dhParams); DHBasicKeyPairGenerator kpGen = new DHBasicKeyPairGenerator(); kpGen.Init(dhkgParams); return kpGen; } private DHKeyPairGenerator getDHKeyPairGenerator( IBigInteger g, IBigInteger p) { DHParameters dhParams = new DHParameters(p, g); DHKeyGenerationParameters dhkgParams = new DHKeyGenerationParameters(new SecureRandom(), dhParams); DHKeyPairGenerator kpGen = new DHKeyPairGenerator(); kpGen.Init(dhkgParams); return kpGen; } /** * this test is can take quiet a while */ private void doTestGeneration( int size) { DHParametersGenerator pGen = new DHParametersGenerator(); pGen.Init(size, 10, new SecureRandom()); DHParameters dhParams = pGen.GenerateParameters(); if (dhParams.L != 0) { Fail("DHParametersGenerator failed to set J to 0 in generated DHParameters"); } DHKeyGenerationParameters dhkgParams = new DHKeyGenerationParameters(new SecureRandom(), dhParams); DHBasicKeyPairGenerator kpGen = new DHBasicKeyPairGenerator(); kpGen.Init(dhkgParams); // // generate first pair // IAsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private; // // generate second pair // dhkgParams = new DHKeyGenerationParameters(new SecureRandom(), pu1.Parameters); kpGen.Init(dhkgParams); pair = kpGen.GenerateKeyPair(); DHPublicKeyParameters pu2 = (DHPublicKeyParameters)pair.Public; DHPrivateKeyParameters pv2 = (DHPrivateKeyParameters)pair.Private; // // two way // DHBasicAgreement e1 = new DHBasicAgreement(); DHBasicAgreement e2 = new DHBasicAgreement(); e1.Init(new ParametersWithRandom(pv1, new SecureRandom())); e2.Init(new ParametersWithRandom(pv2, new SecureRandom())); IBigInteger k1 = e1.CalculateAgreement(pu2); IBigInteger k2 = e2.CalculateAgreement(pu1); if (!k1.Equals(k2)) { Fail("basic with " + size + " bit 2-way test failed"); } } public override void PerformTest() { doTestDHBasic(512, 0, g512, p512); doTestDHBasic(768, 0, g768, p768); doTestDHBasic(1024, 0, g1024, p1024); doTestDHBasic(512, 64, g512, p512); doTestDHBasic(768, 128, g768, p768); doTestDHBasic(1024, 256, g1024, p1024); doTestDH(512, g512, p512); doTestDH(768, g768, p768); doTestDH(1024, g1024, p1024); // // generation test. // doTestGeneration(256); // // with random test // DHBasicKeyPairGenerator kpBasicGen = getDHBasicKeyPairGenerator(g512, p512, 0); doTestSimpleWithRandom(kpBasicGen); DHKeyPairGenerator kpGen = getDHKeyPairGenerator(g512, p512); doTestGPWithRandom(kpGen); // // parameter tests // DHAgreement dh = new DHAgreement(); IAsymmetricCipherKeyPair dhPair = kpGen.GenerateKeyPair(); try { dh.Init(dhPair.Public); Fail("DHAgreement key check failed"); } catch (ArgumentException) { // ignore } DHKeyPairGenerator kpGen768 = getDHKeyPairGenerator(g768, p768); try { dh.Init(dhPair.Private); dh.CalculateAgreement((DHPublicKeyParameters)kpGen768.GenerateKeyPair().Public, BigInteger.ValueOf(100)); Fail("DHAgreement agreement check failed"); } catch (ArgumentException) { // ignore } DHBasicAgreement dhBasic = new DHBasicAgreement(); IAsymmetricCipherKeyPair dhBasicPair = kpBasicGen.GenerateKeyPair(); try { dhBasic.Init(dhBasicPair.Public); Fail("DHBasicAgreement key check failed"); } catch (ArgumentException) { // expected } DHBasicKeyPairGenerator kpBasicGen768 = getDHBasicKeyPairGenerator(g768, p768, 0); try { dhBasic.Init(dhPair.Private); dhBasic.CalculateAgreement((DHPublicKeyParameters)kpBasicGen768.GenerateKeyPair().Public); Fail("DHBasicAgreement agreement check failed"); } catch (ArgumentException) { // expected } } public static void Main( string[] args) { ITest test = new DHTest(); ITestResult result = test.Perform(); Console.WriteLine(result); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
namespace Appleseed.Framework.Content.Data { using System; using System.Data; using System.Data.SqlClient; using Appleseed.Framework.Settings; /// <summary> /// This class encapsulates the basic attributes of a Question, and is used /// by the administration pages when manipulating questions. QuestionItem implements /// the IComparable interface so that an ArrayList of QuestionItems may be sorted /// by TabOrder, using the ArrayList//s Sort() method. /// </summary> public class QuestionItem : IComparable { #region Properties /// <summary> /// Gets or sets the question id. /// </summary> public int QuestionID { get; set; } /// <summary> /// Gets or sets the question name. /// </summary> public string QuestionName { get; set; } /// <summary> /// Gets or sets the question order. /// </summary> public int QuestionOrder { get; set; } /// <summary> /// Gets or sets the type option. /// </summary> public string TypeOption { get; set; } #endregion #region Implemented Interfaces #region IComparable /// <summary> /// Compares to. /// </summary> /// <param name="value">The value.</param> /// <remarks> /// public virtual int IComparable.CompareTo:CompareTo:(object value) /// </remarks> /// <returns>Int for comparison.</returns> public virtual int CompareTo(object value) { if (value == null) { return 1; } var compareOrder = ((QuestionItem)value).QuestionOrder; if (this.QuestionOrder == compareOrder) { return 0; } if (this.QuestionOrder < compareOrder) { return -1; } if (this.QuestionOrder > compareOrder) { return 1; } return 0; } #endregion #endregion } /// <summary> /// This class encapsulates the basic attributes of an Option, and is used /// by the administration pages when manipulating questions/options. OptionItem implements /// the IComparable interface so that an ArrayList of OptionItems may be sorted /// by TabOrder, using the ArrayList//s Sort() method. /// </summary> public class OptionItem : IComparable { #region Properties /// <summary> /// Gets or sets the option id. /// </summary> public int OptionID { get; set; } /// <summary> /// Gets or sets the option name. /// </summary> public string OptionName { get; set; } /// <summary> /// Gets or sets the option order. /// </summary> public int OptionOrder { get; set; } #endregion #region Implemented Interfaces #region IComparable /// <summary> /// The compare to. /// </summary> /// <param name="value">The value.</param> /// <returns>The compare to.</returns> /// <remarks> /// public virtual int : IComparable.CompareTo CompareTo( object value) // JLH!! /// </remarks> public virtual int CompareTo(object value) { if (value == null) { return 1; } var compareOrder = ((OptionItem)value).OptionOrder; if (this.OptionOrder == compareOrder) { return 0; } if (this.OptionOrder < compareOrder) { return -1; } if (this.OptionOrder > compareOrder) { return 1; } return 0; } #endregion #endregion } /// <summary> /// IBS Tasks module /// Class that encapsulates all data logic necessary to add/query/delete /// surveys within the Portal database. /// Moved into Appleseed by Jakob Hansen /// </summary> public class SurveyDB { #region Public Methods /// <summary> /// The AddAnswer method add a record in rb_SurveyAnswers table /// for a specific SurveyID and QuestionID. /// Other relevant sources: /// rb_AddSurveyAnswer Stored Procedure /// </summary> /// <param name="surveyId"> /// The survey ID. /// </param> /// <param name="questionId"> /// The question ID. /// </param> /// <param name="optionId"> /// The option ID. /// </param> /// <returns> /// The add answer. /// </returns> public int AddAnswer(int surveyId, int questionId, int optionId) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_AddSurveyAnswer", connection) { CommandType = CommandType.StoredProcedure }; // Add Parameters to SPROC var parameterSurveyID = new SqlParameter("@SurveyID", SqlDbType.Int, 4) { Value = surveyId }; command.Parameters.Add(parameterSurveyID); var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4) { Value = questionId }; command.Parameters.Add(parameterQuestionID); var parameterOptionID = new SqlParameter("@OptionID", SqlDbType.Int, 4) { Value = optionId }; command.Parameters.Add(parameterOptionID); var parameterItemID = new SqlParameter("@ItemID", SqlDbType.Int, 4) { Direction = ParameterDirection.Output }; command.Parameters.Add(parameterItemID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterItemID.Value; } /// <summary> /// The AddOption method add a record in rb_SurveyOptions table /// for a specific QuestionID. /// Other relevant sources: /// rb_AddSurveyOption Stored Procedure /// </summary> /// <param name="QuestionID"> /// The question ID. /// </param> /// <param name="OptionDesc"> /// The option desc. /// </param> /// <param name="ViewOrder"> /// The view order. /// </param> /// <returns> /// The add option. /// </returns> public int AddOption(int QuestionID, string OptionDesc, int ViewOrder) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_AddSurveyOption", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4); parameterQuestionID.Value = QuestionID; command.Parameters.Add(parameterQuestionID); var parameterOptionDesc = new SqlParameter("@OptionDesc", SqlDbType.NVarChar, 500); parameterOptionDesc.Value = OptionDesc; command.Parameters.Add(parameterOptionDesc); var parameterViewOrder = new SqlParameter("@ViewOrder", SqlDbType.Int, 4); parameterViewOrder.Value = ViewOrder; command.Parameters.Add(parameterViewOrder); var parameterOptionID = new SqlParameter("@OptionID", SqlDbType.Int, 4); parameterOptionID.Direction = ParameterDirection.Output; command.Parameters.Add(parameterOptionID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterOptionID.Value; } /// <summary> /// The AddQuestion method add a record in rb_SurveyQuestions table /// for a specific SurveyID. /// Other relevant sources: /// rb_AddSurveyQuestion Stored Procedure /// </summary> /// <param name="ModuleID"> /// The module ID. /// </param> /// <param name="Question"> /// The question. /// </param> /// <param name="ViewOrder"> /// The view order. /// </param> /// <param name="TypeOption"> /// The type option. /// </param> /// <returns> /// The add question. /// </returns> public int AddQuestion(int ModuleID, string Question, int ViewOrder, string TypeOption) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_AddSurveyQuestion", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = ModuleID; command.Parameters.Add(parameterModuleId); var parameterQuestion = new SqlParameter("@Question", SqlDbType.NVarChar, 500); parameterQuestion.Value = Question; command.Parameters.Add(parameterQuestion); var parameterViewOrder = new SqlParameter("@ViewOrder", SqlDbType.Int, 4); parameterViewOrder.Value = ViewOrder; command.Parameters.Add(parameterViewOrder); var parameterTypeOption = new SqlParameter("@TypeOption", SqlDbType.NVarChar, 2); parameterTypeOption.Value = TypeOption; command.Parameters.Add(parameterTypeOption); var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4); parameterQuestionID.Direction = ParameterDirection.Output; command.Parameters.Add(parameterQuestionID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterQuestionID.Value; } /// <summary> /// The DelOption method delete a record in rb_SurveyOptions table /// for a specific OptionID. /// Other relevant sources: /// rb_DelSurveyOption Stored Procedure /// </summary> /// <param name="OptionID"> /// The option ID. /// </param> /// <returns> /// The del option. /// </returns> public int DelOption(int OptionID) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_DelSurveyOption", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterOptionID = new SqlParameter("@OptionID", SqlDbType.Int, 4); parameterOptionID.Value = OptionID; command.Parameters.Add(parameterOptionID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return 1; } /// <summary> /// The DelQuestion method delete a record in rb_SurveyQuestions table /// for a specific QuestionID. /// Other relevant sources: /// rb_DelSurveyQuestion Stored Procedure /// </summary> /// <param name="QuestionID"> /// The question ID. /// </param> /// <returns> /// The del question. /// </returns> public int DelQuestion(int QuestionID) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_DelSurveyQuestion", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4); parameterQuestionID.Value = QuestionID; command.Parameters.Add(parameterQuestionID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return 1; } /// <summary> /// The ExistAddSurvey method checks whether the Survey exists in rb_Surveys /// table for a specific ModuleID, if not it creates a new one. /// Other relevant sources: /// rb_ExistAddSurvey Stored Procedure /// </summary> /// <param name="ModuleID"> /// The module ID. /// </param> /// <param name="CreatedByUser"> /// The created by user. /// </param> /// <returns> /// The exist add survey. /// </returns> public string ExistAddSurvey(int ModuleID, string CreatedByUser) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_ExistAddSurvey", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = ModuleID; command.Parameters.Add(parameterModuleId); var parameterCreatedByUser = new SqlParameter("@CreatedByUser", SqlDbType.NVarChar, 100); parameterCreatedByUser.Value = CreatedByUser; command.Parameters.Add(parameterCreatedByUser); var parameterSurveyDesc = new SqlParameter("@SurveyDesc", SqlDbType.NVarChar, 500); parameterSurveyDesc.Direction = ParameterDirection.Output; command.Parameters.Add(parameterSurveyDesc); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (string)parameterSurveyDesc.Value; } /// <summary> /// The ExistSurvey method checks whether the Survey exists in rb_Surveys /// table for a specific ModuleID. /// Other relevant sources: /// rb_ExistSurvey Stored Procedure /// </summary> /// <param name="ModuleID"> /// The module ID. /// </param> /// <returns> /// The exist survey. /// </returns> public int ExistSurvey(int ModuleID) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_ExistSurvey", connection); // Mark the Command as a SPROC command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = ModuleID; command.Parameters.Add(parameterModuleId); var parameterRowCount = new SqlParameter("@RowCount", SqlDbType.Int, 4); parameterRowCount.Direction = ParameterDirection.Output; command.Parameters.Add(parameterRowCount); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterRowCount.Value; } /// <summary> /// The GetAnswerNum method get the number of answers /// for a specific SurveyID and QuestionID. /// Other relevant sources: /// rb_GetSurveyAnswersNum Stored Procedure /// </summary> /// <param name="SurveyID"> /// The survey ID. /// </param> /// <param name="QuestionID"> /// The question ID. /// </param> /// <returns> /// The get answer num. /// </returns> public int GetAnswerNum(int SurveyID, int QuestionID) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyAnswersNum", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterSurveyID = new SqlParameter("@SurveyID", SqlDbType.Int, 4); parameterSurveyID.Value = SurveyID; command.Parameters.Add(parameterSurveyID); var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4); parameterQuestionID.Value = QuestionID; command.Parameters.Add(parameterQuestionID); var parameterNum = new SqlParameter("@NumAnswer", SqlDbType.Int, 4); parameterNum.Direction = ParameterDirection.Output; command.Parameters.Add(parameterNum); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterNum.Value; } /// <summary> /// The GetAnswers method returns a SqlDataReader containing all of the /// answers for a specific SurveyID. /// Other relevant sources: /// rb_GetSurveyAnswers Stored Procedure /// </summary> /// <param name="surveyId"> /// The survey ID. /// </param> /// <returns> /// </returns> public SqlDataReader GetAnswers(int surveyId) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyAnswers", connection) { CommandType = CommandType.StoredProcedure }; // Add Parameters to SPROC var parameterSurveyId = new SqlParameter("@SurveyID", SqlDbType.Int, 4) { Value = surveyId }; command.Parameters.Add(parameterSurveyId); // Execute the command and return the datareader connection.Open(); var result = command.ExecuteReader(CommandBehavior.CloseConnection); return result; } /// <summary> /// The GetDimArrays method get the dimensionof the arrays /// for a specific ModuleID and TypeOption. /// Other relevant sources: /// rb_GetSurveyDimArray Stored Procedure /// </summary> /// <param name="ModuleID"> /// The module ID. /// </param> /// <param name="TypeOption"> /// The type option. /// </param> /// <returns> /// The get dim array. /// </returns> public int GetDimArray(int ModuleID, string TypeOption) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyDimArray", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = ModuleID; command.Parameters.Add(parameterModuleId); var parameterTypeOption = new SqlParameter("@TypeOption", SqlDbType.NChar, 2); parameterTypeOption.Value = TypeOption; command.Parameters.Add(parameterTypeOption); var parameterDimArray = new SqlParameter("@DimArray", SqlDbType.Int, 4); parameterDimArray.Direction = ParameterDirection.Output; command.Parameters.Add(parameterDimArray); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterDimArray.Value; } /// <summary> /// The GetOptionList method returns a SqlDataReader containing all of the /// options for a specific QuestionID. /// Other relevant sources: /// GetSurveyOptionList Stored Procedure /// </summary> /// <param name="questionId"> /// The question ID. /// </param> /// <returns> /// </returns> public SqlDataReader GetOptionList(int questionId) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyOptionList", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterQuestionId = new SqlParameter("@QuestionID", SqlDbType.Int, 4) { Value = questionId }; command.Parameters.Add(parameterQuestionId); // Execute the command and return the datareader connection.Open(); var result = command.ExecuteReader(CommandBehavior.CloseConnection); return result; } /// <summary> /// The GetOptions method returns a SqlDataReader containing all of the /// options for a specific portal module. /// Other relevant sources: /// GetSurveyOptions Stored Procedure /// </summary> /// <param name="moduleID"> /// The module ID. /// </param> /// <param name="TypeOption"> /// The type option. /// </param> /// <returns> /// </returns> public SqlDataReader GetOptions(int moduleID, string TypeOption) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyOptions", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = moduleID; command.Parameters.Add(parameterModuleId); var parameterTypeOption = new SqlParameter("@TypeOption", SqlDbType.NVarChar, 2); parameterTypeOption.Value = TypeOption; command.Parameters.Add(parameterTypeOption); // Execute the command and return the datareader connection.Open(); var result = command.ExecuteReader(CommandBehavior.CloseConnection); return result; } /// <summary> /// The GetQuestionList method returns a SqlDataReader containing all of the /// questions for a specific SurveyID. /// Other relevant sources: /// GetSurveyQuestionList Stored Procedure /// </summary> /// <param name="ModuleID"> /// The module ID. /// </param> /// <returns> /// </returns> public SqlDataReader GetQuestionList(int ModuleID) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyQuestionList", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = ModuleID; command.Parameters.Add(parameterModuleId); // Execute the command and return the datareader connection.Open(); var result = command.ExecuteReader(CommandBehavior.CloseConnection); return result; } /// <summary> /// The GetQuestions method returns a SqlDataReader containing all of the /// questions for a specific portal module. /// Other relevant sources: /// GetSurveyQuestions Stored Procedure /// </summary> /// <param name="moduleId"> /// The module ID. /// </param> /// <returns> /// </returns> public SqlDataReader GetQuestions(int moduleId) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyQuestions", connection) { CommandType = CommandType.StoredProcedure }; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4) { Value = moduleId }; command.Parameters.Add(parameterModuleId); // Execute the command and return the datareader connection.Open(); var result = command.ExecuteReader(CommandBehavior.CloseConnection); return result; } /// <summary> /// The GetSurveyID method returns the SurveyID from rb_Surveys table /// for a specific ModuleID. /// Other relevant sources: /// rb_GetSurveyID Stored Procedure /// </summary> /// <param name="moduleId"> /// The module ID. /// </param> /// <returns> /// The get survey id. /// </returns> public int GetSurveyID(int moduleId) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_GetSurveyID", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterModuleId = new SqlParameter("@ModuleID", SqlDbType.Int, 4); parameterModuleId.Value = moduleId; command.Parameters.Add(parameterModuleId); var parameterSurveyID = new SqlParameter("@SurveyID", SqlDbType.Int, 4); parameterSurveyID.Direction = ParameterDirection.Output; command.Parameters.Add(parameterSurveyID); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return (int)parameterSurveyID.Value; } /// <summary> /// The UpdateOptionOrder method set the new ViewOrder in the /// rb_SurveyOptions table for a specific OptionID. /// Other relevant sources: /// rb_UpdateSurveyOptionOrder Stored Procedure /// </summary> /// <param name="OptionID"> /// The option ID. /// </param> /// <param name="Order"> /// The order. /// </param> /// <returns> /// The update option order. /// </returns> public int UpdateOptionOrder(int OptionID, int Order) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_UpdateSurveyOptionOrder", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameteroptionID = new SqlParameter("@OptionID", SqlDbType.Int, 4); parameteroptionID.Value = OptionID; command.Parameters.Add(parameteroptionID); var parameterOrder = new SqlParameter("@Order", SqlDbType.Int, 4); parameterOrder.Value = Order; command.Parameters.Add(parameterOrder); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return 1; } /// <summary> /// The UpdateQuestionOrder method set the new ViewOrder in the /// rb_SurveyQuestions table for a specific QuestionID. /// Other relevant sources: /// rb_UpdateSurveyQuestionOrder Stored Procedure /// </summary> /// <param name="QuestionID"> /// The question ID. /// </param> /// <param name="Order"> /// The order. /// </param> /// <returns> /// The update question order. /// </returns> public int UpdateQuestionOrder(int QuestionID, int Order) { // Create Instance of Connection and Command object var connection = Config.SqlConnectionString; var command = new SqlCommand("rb_UpdateSurveyQuestionOrder", connection); command.CommandType = CommandType.StoredProcedure; // Add Parameters to SPROC var parameterQuestionID = new SqlParameter("@QuestionID", SqlDbType.Int, 4); parameterQuestionID.Value = QuestionID; command.Parameters.Add(parameterQuestionID); var parameterOrder = new SqlParameter("@Order", SqlDbType.Int, 4); parameterOrder.Value = Order; command.Parameters.Add(parameterOrder); connection.Open(); try { command.ExecuteNonQuery(); } finally { connection.Close(); } return 1; } #endregion } }
// // AddinPropertyCollection.cs // // Author: // Lluis Sanchez Gual <lluis@novell.com> // // Copyright (c) 2011 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Linq; using System.Collections.Generic; namespace Mono.Addins.Description { /// <summary> /// A collection of add-in properties /// </summary> public interface AddinPropertyCollection: IEnumerable<AddinProperty> { /// <summary> /// Gets the value of a property /// </summary> /// <returns> /// The property value. /// </returns> /// <param name='name'> /// Name of the property. /// </param> /// <remarks> /// If the property is localized, it will return the value for the current language if exists, or the /// default value if it doesn't. /// </remarks> string GetPropertyValue (string name); /// <summary> /// Gets the value of a property /// </summary> /// <returns> /// The property value. /// </returns> /// <param name='name'> /// Name of the property. /// </param> /// <param name='locale'> /// Locale for which the value must be returned. /// </param> string GetPropertyValue (string name, string locale); /// <summary> /// Sets the value of a property /// </summary> /// <param name='name'> /// Name of the property /// </param> /// <param name='value'> /// New value. /// </param> void SetPropertyValue (string name, string value); /// <summary> /// Sets the value of a property for a specific locale /// </summary> /// <param name='name'> /// Name of the property. /// </param> /// <param name='value'> /// New value. /// </param> /// <param name='locale'> /// Locale of the property to be set. /// </param> void SetPropertyValue (string name, string value, string locale); /// <summary> /// Removes a property. /// </summary> /// <param name='name'> /// Name of the property. /// </param> /// <remarks> /// This method only removes properties which have no locale set. /// </remarks> void RemoveProperty (string name); /// <summary> /// Removes a property with a specified locale /// </summary> /// <param name='name'> /// Name of the property /// </param> /// <param name='locale'> /// Locale of the property /// </param> void RemoveProperty (string name, string locale); /// <summary> /// Checks whether this collection contains a property /// </summary> /// <returns><c>true</c>, if the collection has the property, <c>false</c> otherwise.</returns> /// <param name="name">Name of the property</param> bool HasProperty (string name); } public class AddinPropertyCollectionImpl: List<AddinProperty>, AddinPropertyCollection { AddinDescription desc; public AddinPropertyCollectionImpl () { } public AddinPropertyCollectionImpl (AddinDescription desc) { this.desc = desc; } public AddinPropertyCollectionImpl (AddinPropertyCollection col) { AddRange (col); } public string GetPropertyValue (string name) { return GetPropertyValue (name, System.Threading.Thread.CurrentThread.CurrentCulture.ToString ()); } public string GetPropertyValue (string name, string locale) { locale = NormalizeLocale (locale); string lang = GetLocaleLang (locale); AddinProperty sameLangDifCountry = null; AddinProperty sameLang = null; AddinProperty defaultLoc = null; foreach (var p in this) { if (p.Name == name) { if (p.Locale == locale) return ParseString (p.Value); string plang = GetLocaleLang (p.Locale); if (plang == p.Locale && plang == lang) // No country specified sameLang = p; else if (plang == lang) sameLangDifCountry = p; else if (p.Locale == null) defaultLoc = p; } } if (sameLang != null) return ParseString (sameLang.Value); else if (sameLangDifCountry != null) return ParseString (sameLangDifCountry.Value); else if (defaultLoc != null) return ParseString (defaultLoc.Value); else return string.Empty; } string ParseString (string s) { if (desc != null) return desc.ParseString (s); else return s; } string NormalizeLocale (string loc) { if (string.IsNullOrEmpty (loc)) return null; return loc.Replace ('_','-'); } string GetLocaleLang (string loc) { if (loc == null) return null; int i = loc.IndexOf ('-'); if (i != -1) return loc.Substring (0, i); else return loc; } public void SetPropertyValue (string name, string value) { SetPropertyValue (name, value, null); } public void SetPropertyValue (string name, string value, string locale) { if (string.IsNullOrEmpty (name)) throw new ArgumentException ("name can't be null or empty"); if (value == null) throw new ArgumentNullException ("value"); locale = NormalizeLocale (locale); foreach (var p in this) { if (p.Name == name && p.Locale == locale) { p.Value = value; return; } } AddinProperty prop = new AddinProperty (); prop.Name = name; prop.Value = value; prop.Locale = locale; Add (prop); } public void RemoveProperty (string name) { RemoveProperty (name, null); } public void RemoveProperty (string name, string locale) { locale = NormalizeLocale (locale); foreach (var p in this) { if (p.Name == name && p.Locale == locale) { Remove (p); return; } } } public bool HasProperty (string name) { foreach (var p in this) { if (p.Name == name) return true; } return false; } internal string ExtractCoreProperty (string name, bool removeProperty) { foreach (var p in this) { if (p.Name == name && p.Locale == null) { if (removeProperty) Remove (p); return p.Value; } } return null; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #pragma warning disable 0420 // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // A spin lock is a mutual exclusion lock primitive where a thread trying to acquire the lock waits in a loop ("spins") // repeatedly checking until the lock becomes available. As the thread remains active performing a non-useful task, // the use of such a lock is a kind of busy waiting and consumes CPU resources without performing real work. // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Diagnostics; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; namespace System.Threading { /// <summary> /// Provides a mutual exclusion lock primitive where a thread trying to acquire the lock waits in a loop /// repeatedly checking until the lock becomes available. /// </summary> /// <remarks> /// <para> /// Spin locks can be used for leaf-level locks where the object allocation implied by using a <see /// cref="System.Threading.Monitor"/>, in size or due to garbage collection pressure, is overly /// expensive. Avoiding blocking is another reason that a spin lock can be useful, however if you expect /// any significant amount of blocking, you are probably best not using spin locks due to excessive /// spinning. Spinning can be beneficial when locks are fine grained and large in number (for example, a /// lock per node in a linked list) as well as when lock hold times are always extremely short. In /// general, while holding a spin lock, one should avoid blocking, calling anything that itself may /// block, holding more than one spin lock at once, making dynamically dispatched calls (interface and /// virtuals), making statically dispatched calls into any code one doesn't own, or allocating memory. /// </para> /// <para> /// <see cref="SpinLock"/> should only be used when it's been determined that doing so will improve an /// application's performance. It's also important to note that <see cref="SpinLock"/> is a value type, /// for performance reasons. As such, one must be very careful not to accidentally copy a SpinLock /// instance, as the two instances (the original and the copy) would then be completely independent of /// one another, which would likely lead to erroneous behavior of the application. If a SpinLock instance /// must be passed around, it should be passed by reference rather than by value. /// </para> /// <para> /// Do not store <see cref="SpinLock"/> instances in readonly fields. /// </para> /// <para> /// All members of <see cref="SpinLock"/> are thread-safe and may be used from multiple threads /// concurrently. /// </para> /// </remarks> [DebuggerTypeProxy(typeof(SystemThreading_SpinLockDebugView))] [DebuggerDisplay("IsHeld = {IsHeld}")] public struct SpinLock { // The current ownership state is a single signed int. There are two modes: // // 1) Ownership tracking enabled: the high bit is 0, and the remaining bits // store the managed thread ID of the current owner. When the 31 low bits // are 0, the lock is available. // 2) Performance mode: when the high bit is 1, lock availability is indicated by the low bit. // When the low bit is 1 -- the lock is held; 0 -- the lock is available. // // There are several masks and constants below for convenience. private volatile int m_owner; // After how many yields, call Sleep(1) private const int SLEEP_ONE_FREQUENCY = 40; // After how many yields, check the timeout private const int TIMEOUT_CHECK_FREQUENCY = 10; // Thr thread tracking disabled mask private const int LOCK_ID_DISABLE_MASK = unchecked((int)0x80000000); //1000 0000 0000 0000 0000 0000 0000 0000 //the lock is held by some thread, but we don't know which private const int LOCK_ANONYMOUS_OWNED = 0x1; //0000 0000 0000 0000 0000 0000 0000 0001 // Waiters mask if the thread tracking is disabled private const int WAITERS_MASK = ~(LOCK_ID_DISABLE_MASK | 1); //0111 1111 1111 1111 1111 1111 1111 1110 // The Thread tacking is disabled and the lock bit is set, used in Enter fast path to make sure the id is disabled and lock is available private const int ID_DISABLED_AND_ANONYMOUS_OWNED = unchecked((int)0x80000001); //1000 0000 0000 0000 0000 0000 0000 0001 // If the thread is unowned if: // m_owner zero and the threa tracking is enabled // m_owner & LOCK_ANONYMOUS_OWNED = zero and the thread tracking is disabled private const int LOCK_UNOWNED = 0; // The maximum number of waiters (only used if the thread tracking is disabled) // The actual maximum waiters count is this number divided by two because each waiter increments the waiters count by 2 // The waiters count is calculated by m_owner & WAITERS_MASK 01111....110 private static int MAXIMUM_WAITERS = WAITERS_MASK; [MethodImpl(MethodImplOptions.AggressiveInlining)] private int CompareExchange(ref int location, int value, int comparand, ref bool success) { int result = Interlocked.CompareExchange(ref location, value, comparand); success = (result == comparand); return result; } /// <summary> /// Initializes a new instance of the <see cref="T:System.Threading.SpinLock"/> /// structure with the option to track thread IDs to improve debugging. /// </summary> /// <remarks> /// The default constructor for <see cref="SpinLock"/> tracks thread ownership. /// </remarks> /// <param name="enableThreadOwnerTracking">Whether to capture and use thread IDs for debugging /// purposes.</param> public SpinLock(bool enableThreadOwnerTracking) { m_owner = LOCK_UNOWNED; if (!enableThreadOwnerTracking) { m_owner |= LOCK_ID_DISABLE_MASK; Debug.Assert(!IsThreadOwnerTrackingEnabled, "property should be false by now"); } } /// <summary> /// Initializes a new instance of the <see cref="T:System.Threading.SpinLock"/> /// structure with the option to track thread IDs to improve debugging. /// </summary> /// <remarks> /// The default constructor for <see cref="SpinLock"/> tracks thread ownership. /// </remarks> /// <summary> /// Acquires the lock in a reliable manner, such that even if an exception occurs within the method /// call, <paramref name="lockTaken"/> can be examined reliably to determine whether the lock was /// acquired. /// </summary> /// <remarks> /// <see cref="SpinLock"/> is a non-reentrant lock, meaning that if a thread holds the lock, it is /// not allowed to enter the lock again. If thread ownership tracking is enabled (whether it's /// enabled is available through <see cref="IsThreadOwnerTrackingEnabled"/>), an exception will be /// thrown when a thread tries to re-enter a lock it already holds. However, if thread ownership /// tracking is disabled, attempting to enter a lock already held will result in deadlock. /// </remarks> /// <param name="lockTaken">True if the lock is acquired; otherwise, false. <paramref /// name="lockTaken"/> must be initialized to false prior to calling this method.</param> /// <exception cref="T:System.Threading.LockRecursionException"> /// Thread ownership tracking is enabled, and the current thread has already acquired this lock. /// </exception> /// <exception cref="T:System.ArgumentException"> /// The <paramref name="lockTaken"/> argument must be initialized to false prior to calling Enter. /// </exception> public void Enter(ref bool lockTaken) { //Try to keep the code and branching in this method as small as possible in order to inline the method int observedOwner = m_owner; if (lockTaken || //invalid parameter (observedOwner & ID_DISABLED_AND_ANONYMOUS_OWNED) != LOCK_ID_DISABLE_MASK || //thread tracking is enabled or the lock is already acquired CompareExchange(ref m_owner, observedOwner | LOCK_ANONYMOUS_OWNED, observedOwner, ref lockTaken) != observedOwner) //acquiring the lock failed ContinueTryEnter(Timeout.Infinite, ref lockTaken); // Then try the slow path if any of the above conditions is met } /// <summary> /// Attempts to acquire the lock in a reliable manner, such that even if an exception occurs within /// the method call, <paramref name="lockTaken"/> can be examined reliably to determine whether the /// lock was acquired. /// </summary> /// <remarks> /// Unlike <see cref="Enter"/>, TryEnter will not block waiting for the lock to be available. If the /// lock is not available when TryEnter is called, it will return immediately without any further /// spinning. /// </remarks> /// <param name="lockTaken">True if the lock is acquired; otherwise, false. <paramref /// name="lockTaken"/> must be initialized to false prior to calling this method.</param> /// <exception cref="T:System.Threading.LockRecursionException"> /// Thread ownership tracking is enabled, and the current thread has already acquired this lock. /// </exception> /// <exception cref="T:System.ArgumentException"> /// The <paramref name="lockTaken"/> argument must be initialized to false prior to calling TryEnter. /// </exception> public void TryEnter(ref bool lockTaken) { int observedOwner = m_owner; if (((observedOwner & LOCK_ID_DISABLE_MASK) == 0) | lockTaken) { // Thread tracking enabled or invalid arg. Take slow path. ContinueTryEnter(0, ref lockTaken); } else if ((observedOwner & LOCK_ANONYMOUS_OWNED) != 0) { // Lock already held by someone lockTaken = false; } else { // Lock wasn't held; try to acquire it. CompareExchange(ref m_owner, observedOwner | LOCK_ANONYMOUS_OWNED, observedOwner, ref lockTaken); } } /// <summary> /// Attempts to acquire the lock in a reliable manner, such that even if an exception occurs within /// the method call, <paramref name="lockTaken"/> can be examined reliably to determine whether the /// lock was acquired. /// </summary> /// <remarks> /// Unlike <see cref="Enter"/>, TryEnter will not block indefinitely waiting for the lock to be /// available. It will block until either the lock is available or until the <paramref /// name="timeout"/> /// has expired. /// </remarks> /// <param name="timeout">A <see cref="System.TimeSpan"/> that represents the number of milliseconds /// to wait, or a <see cref="System.TimeSpan"/> that represents -1 milliseconds to wait indefinitely. /// </param> /// <param name="lockTaken">True if the lock is acquired; otherwise, false. <paramref /// name="lockTaken"/> must be initialized to false prior to calling this method.</param> /// <exception cref="T:System.Threading.LockRecursionException"> /// Thread ownership tracking is enabled, and the current thread has already acquired this lock. /// </exception> /// <exception cref="T:System.ArgumentException"> /// The <paramref name="lockTaken"/> argument must be initialized to false prior to calling TryEnter. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="timeout"/> is a negative /// number other than -1 milliseconds, which represents an infinite time-out -or- timeout is greater /// than <see cref="System.Int32.MaxValue"/> milliseconds. /// </exception> public void TryEnter(TimeSpan timeout, ref bool lockTaken) { // Validate the timeout Int64 totalMilliseconds = (Int64)timeout.TotalMilliseconds; if (totalMilliseconds < -1 || totalMilliseconds > int.MaxValue) { throw new System.ArgumentOutOfRangeException( nameof(timeout), timeout, SR.SpinLock_TryEnter_ArgumentOutOfRange); } // Call reliable enter with the int-based timeout milliseconds TryEnter((int)timeout.TotalMilliseconds, ref lockTaken); } /// <summary> /// Attempts to acquire the lock in a reliable manner, such that even if an exception occurs within /// the method call, <paramref name="lockTaken"/> can be examined reliably to determine whether the /// lock was acquired. /// </summary> /// <remarks> /// Unlike <see cref="Enter"/>, TryEnter will not block indefinitely waiting for the lock to be /// available. It will block until either the lock is available or until the <paramref /// name="millisecondsTimeout"/> has expired. /// </remarks> /// <param name="millisecondsTimeout">The number of milliseconds to wait, or <see /// cref="System.Threading.Timeout.Infinite"/> (-1) to wait indefinitely.</param> /// <param name="lockTaken">True if the lock is acquired; otherwise, false. <paramref /// name="lockTaken"/> must be initialized to false prior to calling this method.</param> /// <exception cref="T:System.Threading.LockRecursionException"> /// Thread ownership tracking is enabled, and the current thread has already acquired this lock. /// </exception> /// <exception cref="T:System.ArgumentException"> /// The <paramref name="lockTaken"/> argument must be initialized to false prior to calling TryEnter. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"><paramref name="millisecondsTimeout"/> is /// a negative number other than -1, which represents an infinite time-out.</exception> public void TryEnter(int millisecondsTimeout, ref bool lockTaken) { int observedOwner = m_owner; if (millisecondsTimeout < -1 || //invalid parameter lockTaken || //invalid parameter (observedOwner & ID_DISABLED_AND_ANONYMOUS_OWNED) != LOCK_ID_DISABLE_MASK || //thread tracking is enabled or the lock is already acquired CompareExchange(ref m_owner, observedOwner | LOCK_ANONYMOUS_OWNED, observedOwner, ref lockTaken) != observedOwner) // acquiring the lock failed ContinueTryEnter(millisecondsTimeout, ref lockTaken); // The call the slow pth } /// <summary> /// Try acquire the lock with long path, this is usually called after the first path in Enter and /// TryEnter failed The reason for short path is to make it inline in the run time which improves the /// performance. This method assumed that the parameter are validated in Enter or TryEnter method. /// </summary> /// <param name="millisecondsTimeout">The timeout milliseconds</param> /// <param name="lockTaken">The lockTaken param</param> private void ContinueTryEnter(int millisecondsTimeout, ref bool lockTaken) { // The fast path doesn't throw any exception, so we have to validate the parameters here if (lockTaken) { lockTaken = false; throw new System.ArgumentException(SR.SpinLock_TryReliableEnter_ArgumentException); } if (millisecondsTimeout < -1) { throw new ArgumentOutOfRangeException( nameof(millisecondsTimeout), millisecondsTimeout, SR.SpinLock_TryEnter_ArgumentOutOfRange); } uint startTime = 0; if (millisecondsTimeout != Timeout.Infinite && millisecondsTimeout != 0) { startTime = TimeoutHelper.GetTime(); } if (IsThreadOwnerTrackingEnabled) { // Slow path for enabled thread tracking mode ContinueTryEnterWithThreadTracking(millisecondsTimeout, startTime, ref lockTaken); return; } // then thread tracking is disabled // In this case there are three ways to acquire the lock // 1- the first way the thread either tries to get the lock if it's free or updates the waiters, if the turn >= the processors count then go to 3 else go to 2 // 2- In this step the waiter threads spins and tries to acquire the lock, the number of spin iterations and spin count is dependent on the thread turn // the late the thread arrives the more it spins and less frequent it check the lock avilability // Also the spins count is increases each iteration // If the spins iterations finished and failed to acquire the lock, go to step 3 // 3- This is the yielding step, there are two ways of yielding Thread.Yield and Sleep(1) // If the timeout is expired in after step 1, we need to decrement the waiters count before returning int observedOwner; int turn = int.MaxValue; //***Step 1, take the lock or update the waiters // try to acquire the lock directly if possible or update the waiters count observedOwner = m_owner; if ((observedOwner & LOCK_ANONYMOUS_OWNED) == LOCK_UNOWNED) { if (CompareExchange(ref m_owner, observedOwner | 1, observedOwner, ref lockTaken) == observedOwner) { // Aquired lock return; } if (millisecondsTimeout == 0) { // Did not aquire lock in CompareExchange and timeout is 0 so fail fast return; } } else if (millisecondsTimeout == 0) { // Did not aquire lock as owned and timeout is 0 so fail fast return; } else //failed to acquire the lock,then try to update the waiters. If the waiters count reached the maximum, jsut break the loop to avoid overflow { if ((observedOwner & WAITERS_MASK) != MAXIMUM_WAITERS) { // This can still overflow, but maybe there will never be that many waiters turn = (Interlocked.Add(ref m_owner, 2) & WAITERS_MASK) >> 1; } } //lock acquired failed and waiters updated //*** Step 2, Spinning and Yielding var spinner = new SpinWait(); if (turn > PlatformHelper.ProcessorCount) { spinner.Count = SpinWait.YieldThreshold; } while (true) { spinner.SpinOnce(SLEEP_ONE_FREQUENCY); observedOwner = m_owner; if ((observedOwner & LOCK_ANONYMOUS_OWNED) == LOCK_UNOWNED) { int newOwner = (observedOwner & WAITERS_MASK) == 0 ? // Gets the number of waiters, if zero observedOwner | 1 // don't decrement it. just set the lock bit, it is zzero because a previous call of Exit(false) ehich corrupted the waiters : (observedOwner - 2) | 1; // otherwise decrement the waiters and set the lock bit Debug.Assert((newOwner & WAITERS_MASK) >= 0); if (CompareExchange(ref m_owner, newOwner, observedOwner, ref lockTaken) == observedOwner) { return; } } if (spinner.Count % TIMEOUT_CHECK_FREQUENCY == 0) { //Check the timeout. if (millisecondsTimeout != Timeout.Infinite && TimeoutHelper.UpdateTimeOut(startTime, millisecondsTimeout) <= 0) { DecrementWaiters(); return; } } } } /// <summary> /// decrements the waiters, in case of the timeout is expired /// </summary> private void DecrementWaiters() { SpinWait spinner = new SpinWait(); while (true) { int observedOwner = m_owner; if ((observedOwner & WAITERS_MASK) == 0) return; // don't decrement the waiters if it's corrupted by previous call of Exit(false) if (Interlocked.CompareExchange(ref m_owner, observedOwner - 2, observedOwner) == observedOwner) { Debug.Assert(!IsThreadOwnerTrackingEnabled); // Make sure the waiters never be negative which will cause the thread tracking bit to be flipped break; } spinner.SpinOnce(); } } /// <summary> /// ContinueTryEnter for the thread tracking mode enabled /// </summary> private void ContinueTryEnterWithThreadTracking(int millisecondsTimeout, uint startTime, ref bool lockTaken) { Debug.Assert(IsThreadOwnerTrackingEnabled); int lockUnowned = 0; // We are using thread IDs to mark ownership. Snap the thread ID and check for recursion. // We also must or the ID enablement bit, to ensure we propagate when we CAS it in. int m_newOwner = Environment.CurrentManagedThreadId; if (m_owner == m_newOwner) { // We don't allow lock recursion. throw new LockRecursionException(SR.SpinLock_TryEnter_LockRecursionException); } SpinWait spinner = new SpinWait(); // Loop until the lock has been successfully acquired or, if specified, the timeout expires. do { // We failed to get the lock, either from the fast route or the last iteration // and the timeout hasn't expired; spin once and try again. spinner.SpinOnce(); // Test before trying to CAS, to avoid acquiring the line exclusively unnecessarily. if (m_owner == lockUnowned) { if (CompareExchange(ref m_owner, m_newOwner, lockUnowned, ref lockTaken) == lockUnowned) { return; } } // Check the timeout. We only RDTSC if the next spin will yield, to amortize the cost. if (millisecondsTimeout == 0 || (millisecondsTimeout != Timeout.Infinite && spinner.NextSpinWillYield && TimeoutHelper.UpdateTimeOut(startTime, millisecondsTimeout) <= 0)) { return; } } while (true); } /// <summary> /// Releases the lock. /// </summary> /// <remarks> /// The default overload of <see cref="Exit()"/> provides the same behavior as if calling <see /// cref="Exit(Boolean)"/> using true as the argument, but Exit() could be slightly faster than Exit(true). /// </remarks> /// <exception cref="SynchronizationLockException"> /// Thread ownership tracking is enabled, and the current thread is not the owner of this lock. /// </exception> public void Exit() { //This is the fast path for the thread tracking is disabled, otherwise go to the slow path if ((m_owner & LOCK_ID_DISABLE_MASK) == 0) ExitSlowPath(true); else Interlocked.Decrement(ref m_owner); } /// <summary> /// Releases the lock. /// </summary> /// <param name="useMemoryBarrier"> /// A Boolean value that indicates whether a memory fence should be issued in order to immediately /// publish the exit operation to other threads. /// </param> /// <remarks> /// Calling <see cref="Exit(Boolean)"/> with the <paramref name="useMemoryBarrier"/> argument set to /// true will improve the fairness of the lock at the expense of some performance. The default <see /// cref="Enter"/> /// overload behaves as if specifying true for <paramref name="useMemoryBarrier"/>. /// </remarks> /// <exception cref="SynchronizationLockException"> /// Thread ownership tracking is enabled, and the current thread is not the owner of this lock. /// </exception> public void Exit(bool useMemoryBarrier) { // This is the fast path for the thread tracking is diabled and not to use memory barrier, otherwise go to the slow path // The reason not to add else statement if the usememorybarrier is that it will add more barnching in the code and will prevent // method inlining, so this is optimized for useMemoryBarrier=false and Exit() overload optimized for useMemoryBarrier=true. int tmpOwner = m_owner; if ((tmpOwner & LOCK_ID_DISABLE_MASK) != 0 & !useMemoryBarrier) { m_owner = tmpOwner & (~LOCK_ANONYMOUS_OWNED); } else ExitSlowPath(useMemoryBarrier); } /// <summary> /// The slow path for exit method if the fast path failed /// </summary> /// <param name="useMemoryBarrier"> /// A Boolean value that indicates whether a memory fence should be issued in order to immediately /// publish the exit operation to other threads /// </param> private void ExitSlowPath(bool useMemoryBarrier) { bool threadTrackingEnabled = (m_owner & LOCK_ID_DISABLE_MASK) == 0; if (threadTrackingEnabled && !IsHeldByCurrentThread) { throw new System.Threading.SynchronizationLockException( SR.SpinLock_Exit_SynchronizationLockException); } if (useMemoryBarrier) { if (threadTrackingEnabled) Interlocked.Exchange(ref m_owner, LOCK_UNOWNED); else Interlocked.Decrement(ref m_owner); } else { if (threadTrackingEnabled) m_owner = LOCK_UNOWNED; else { int tmpOwner = m_owner; m_owner = tmpOwner & (~LOCK_ANONYMOUS_OWNED); } } } /// <summary> /// Gets whether the lock is currently held by any thread. /// </summary> public bool IsHeld { get { if (IsThreadOwnerTrackingEnabled) return m_owner != LOCK_UNOWNED; return (m_owner & LOCK_ANONYMOUS_OWNED) != LOCK_UNOWNED; } } /// <summary> /// Gets whether the lock is currently held by any thread. /// </summary> /// <summary> /// Gets whether the lock is held by the current thread. /// </summary> /// <remarks> /// If the lock was initialized to track owner threads, this will return whether the lock is acquired /// by the current thread. It is invalid to use this property when the lock was initialized to not /// track thread ownership. /// </remarks> /// <exception cref="T:System.InvalidOperationException"> /// Thread ownership tracking is disabled. /// </exception> public bool IsHeldByCurrentThread { get { if (!IsThreadOwnerTrackingEnabled) { throw new InvalidOperationException(SR.SpinLock_IsHeldByCurrentThread); } return ((m_owner & (~LOCK_ID_DISABLE_MASK)) == Environment.CurrentManagedThreadId); } } /// <summary>Gets whether thread ownership tracking is enabled for this instance.</summary> public bool IsThreadOwnerTrackingEnabled { get { return (m_owner & LOCK_ID_DISABLE_MASK) == 0; } } #region Debugger proxy class /// <summary> /// Internal class used by debug type proxy attribute to display the owner thread ID /// </summary> internal class SystemThreading_SpinLockDebugView { // SpinLock object private SpinLock m_spinLock; /// <summary> /// SystemThreading_SpinLockDebugView constructor /// </summary> /// <param name="spinLock">The SpinLock to be proxied.</param> public SystemThreading_SpinLockDebugView(SpinLock spinLock) { // Note that this makes a copy of the SpinLock (struct). It doesn't hold a reference to it. m_spinLock = spinLock; } /// <summary> /// Checks if the lock is held by the current thread or not /// </summary> public bool? IsHeldByCurrentThread { get { try { return m_spinLock.IsHeldByCurrentThread; } catch (InvalidOperationException) { return null; } } } /// <summary> /// Gets the current owner thread, zero if it is released /// </summary> public int? OwnerThreadID { get { if (m_spinLock.IsThreadOwnerTrackingEnabled) { return m_spinLock.m_owner; } else { return null; } } } /// <summary> /// Gets whether the lock is currently held by any thread or not. /// </summary> public bool IsHeld { get { return m_spinLock.IsHeld; } } } #endregion } } #pragma warning restore 0420
using System; using System.ComponentModel; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; namespace WeifenLuo.WinFormsUI.Docking { /// <summary> /// Visual Studio 2012 Light theme. /// </summary> public class VS2012LightTheme : ThemeBase { /// <summary> /// Applies the specified theme to the dock panel. /// </summary> /// <param name="dockPanel">The dock panel.</param> public override void Apply(DockPanel dockPanel) { if (dockPanel == null) { throw new NullReferenceException("dockPanel"); } Measures.SplitterSize = 6; dockPanel.Extender.DockPaneCaptionFactory = new VS2012LightDockPaneCaptionFactory(); dockPanel.Extender.AutoHideStripFactory = new VS2012LightAutoHideStripFactory(); dockPanel.Extender.AutoHideWindowFactory = new VS2012LightAutoHideWindowFactory(); dockPanel.Extender.DockPaneStripFactory = new VS2012LightDockPaneStripFactory(); dockPanel.Extender.DockPaneSplitterControlFactory = new VS2012LightDockPaneSplitterControlFactory(); dockPanel.Extender.DockWindowSplitterControlFactory = new VS2012LightDockWindowSplitterControlFactory(); dockPanel.Extender.DockWindowFactory = new VS2012LightDockWindowFactory(); dockPanel.Extender.PaneIndicatorFactory = new VS2012LightPaneIndicatorFactory(); dockPanel.Extender.PanelIndicatorFactory = new VS2012LightPanelIndicatorFactory(); dockPanel.Extender.DockOutlineFactory = new VS2012LightDockOutlineFactory(); dockPanel.Skin = CreateVisualStudio2012Light(); } private class VS2012LightDockOutlineFactory : DockPanelExtender.IDockOutlineFactory { public DockOutlineBase CreateDockOutline() { return new VS2012LightDockOutline(); } private class VS2012LightDockOutline : DockOutlineBase { public VS2012LightDockOutline() { m_dragForm = new DragForm(); SetDragForm(Rectangle.Empty); DragForm.BackColor = Color.FromArgb(0xff, 91, 173, 255); DragForm.Opacity = 0.5; DragForm.Show(false); } DragForm m_dragForm; private DragForm DragForm { get { return m_dragForm; } } protected override void OnShow() { CalculateRegion(); } protected override void OnClose() { DragForm.Close(); } private void CalculateRegion() { if (SameAsOldValue) return; if (!FloatWindowBounds.IsEmpty) SetOutline(FloatWindowBounds); else if (DockTo is DockPanel) SetOutline(DockTo as DockPanel, Dock, (ContentIndex != 0)); else if (DockTo is DockPane) SetOutline(DockTo as DockPane, Dock, ContentIndex); else SetOutline(); } private void SetOutline() { SetDragForm(Rectangle.Empty); } private void SetOutline(Rectangle floatWindowBounds) { SetDragForm(floatWindowBounds); } private void SetOutline(DockPanel dockPanel, DockStyle dock, bool fullPanelEdge) { Rectangle rect = fullPanelEdge ? dockPanel.DockArea : dockPanel.DocumentWindowBounds; rect.Location = dockPanel.PointToScreen(rect.Location); if (dock == DockStyle.Top) { int height = dockPanel.GetDockWindowSize(DockState.DockTop); rect = new Rectangle(rect.X, rect.Y, rect.Width, height); } else if (dock == DockStyle.Bottom) { int height = dockPanel.GetDockWindowSize(DockState.DockBottom); rect = new Rectangle(rect.X, rect.Bottom - height, rect.Width, height); } else if (dock == DockStyle.Left) { int width = dockPanel.GetDockWindowSize(DockState.DockLeft); rect = new Rectangle(rect.X, rect.Y, width, rect.Height); } else if (dock == DockStyle.Right) { int width = dockPanel.GetDockWindowSize(DockState.DockRight); rect = new Rectangle(rect.Right - width, rect.Y, width, rect.Height); } else if (dock == DockStyle.Fill) { rect = dockPanel.DocumentWindowBounds; rect.Location = dockPanel.PointToScreen(rect.Location); } SetDragForm(rect); } private void SetOutline(DockPane pane, DockStyle dock, int contentIndex) { if (dock != DockStyle.Fill) { Rectangle rect = pane.DisplayingRectangle; if (dock == DockStyle.Right) rect.X += rect.Width / 2; if (dock == DockStyle.Bottom) rect.Y += rect.Height / 2; if (dock == DockStyle.Left || dock == DockStyle.Right) rect.Width -= rect.Width / 2; if (dock == DockStyle.Top || dock == DockStyle.Bottom) rect.Height -= rect.Height / 2; rect.Location = pane.PointToScreen(rect.Location); SetDragForm(rect); } else if (contentIndex == -1) { Rectangle rect = pane.DisplayingRectangle; rect.Location = pane.PointToScreen(rect.Location); SetDragForm(rect); } else { using (GraphicsPath path = pane.TabStripControl.GetOutline(contentIndex)) { RectangleF rectF = path.GetBounds(); Rectangle rect = new Rectangle((int)rectF.X, (int)rectF.Y, (int)rectF.Width, (int)rectF.Height); using (Matrix matrix = new Matrix(rect, new Point[] { new Point(0, 0), new Point(rect.Width, 0), new Point(0, rect.Height) })) { path.Transform(matrix); } Region region = new Region(path); SetDragForm(rect, region); } } } private void SetDragForm(Rectangle rect) { DragForm.Bounds = rect; if (rect == Rectangle.Empty) DragForm.Region = new Region(Rectangle.Empty); else if (DragForm.Region != null) DragForm.Region = null; } private void SetDragForm(Rectangle rect, Region region) { DragForm.Bounds = rect; DragForm.Region = region; } } } private class VS2012LightPanelIndicatorFactory : DockPanelExtender.IPanelIndicatorFactory { public DockPanel.IPanelIndicator CreatePanelIndicator(DockStyle style) { return new VS2012LightPanelIndicator(style); } private class VS2012LightPanelIndicator : PictureBox, DockPanel.IPanelIndicator { private static Image _imagePanelLeft = Resources.DockIndicator_PanelLeft_VS2012; private static Image _imagePanelRight = Resources.DockIndicator_PanelRight_VS2012; private static Image _imagePanelTop = Resources.DockIndicator_PanelTop_VS2012; private static Image _imagePanelBottom = Resources.DockIndicator_PanelBottom_VS2012; private static Image _imagePanelFill = Resources.DockIndicator_PanelFill_VS2012; private static Image _imagePanelLeftActive = Resources.DockIndicator_PanelLeft_VS2012; private static Image _imagePanelRightActive = Resources.DockIndicator_PanelRight_VS2012; private static Image _imagePanelTopActive = Resources.DockIndicator_PanelTop_VS2012; private static Image _imagePanelBottomActive = Resources.DockIndicator_PanelBottom_VS2012; private static Image _imagePanelFillActive = Resources.DockIndicator_PanelFill_VS2012; public VS2012LightPanelIndicator(DockStyle dockStyle) { m_dockStyle = dockStyle; SizeMode = PictureBoxSizeMode.AutoSize; Image = ImageInactive; } private DockStyle m_dockStyle; private DockStyle DockStyle { get { return m_dockStyle; } } private DockStyle m_status; public DockStyle Status { get { return m_status; } set { if (value != DockStyle && value != DockStyle.None) throw new InvalidEnumArgumentException(); if (m_status == value) return; m_status = value; IsActivated = (m_status != DockStyle.None); } } private Image ImageInactive { get { if (DockStyle == DockStyle.Left) return _imagePanelLeft; else if (DockStyle == DockStyle.Right) return _imagePanelRight; else if (DockStyle == DockStyle.Top) return _imagePanelTop; else if (DockStyle == DockStyle.Bottom) return _imagePanelBottom; else if (DockStyle == DockStyle.Fill) return _imagePanelFill; else return null; } } private Image ImageActive { get { if (DockStyle == DockStyle.Left) return _imagePanelLeftActive; else if (DockStyle == DockStyle.Right) return _imagePanelRightActive; else if (DockStyle == DockStyle.Top) return _imagePanelTopActive; else if (DockStyle == DockStyle.Bottom) return _imagePanelBottomActive; else if (DockStyle == DockStyle.Fill) return _imagePanelFillActive; else return null; } } private bool m_isActivated = false; private bool IsActivated { get { return m_isActivated; } set { m_isActivated = value; Image = IsActivated ? ImageActive : ImageInactive; } } public DockStyle HitTest(Point pt) { return this.Visible && ClientRectangle.Contains(PointToClient(pt)) ? DockStyle : DockStyle.None; } } } private class VS2012LightPaneIndicatorFactory : DockPanelExtender.IPaneIndicatorFactory { public DockPanel.IPaneIndicator CreatePaneIndicator() { return new VS2012LightPaneIndicator(); } private class VS2012LightPaneIndicator : PictureBox, DockPanel.IPaneIndicator { private static Bitmap _bitmapPaneDiamond = Resources.Dockindicator_PaneDiamond_VS2012; private static Bitmap _bitmapPaneDiamondLeft = Resources.Dockindicator_PaneDiamond_Fill_VS2012; private static Bitmap _bitmapPaneDiamondRight = Resources.Dockindicator_PaneDiamond_Fill_VS2012; private static Bitmap _bitmapPaneDiamondTop = Resources.Dockindicator_PaneDiamond_Fill_VS2012; private static Bitmap _bitmapPaneDiamondBottom = Resources.Dockindicator_PaneDiamond_Fill_VS2012; private static Bitmap _bitmapPaneDiamondFill = Resources.Dockindicator_PaneDiamond_Fill_VS2012; private static Bitmap _bitmapPaneDiamondHotSpot = Resources.Dockindicator_PaneDiamond_Hotspot_VS2012; private static Bitmap _bitmapPaneDiamondHotSpotIndex = Resources.DockIndicator_PaneDiamond_HotspotIndex_VS2012; private static DockPanel.HotSpotIndex[] _hotSpots = new[] { new DockPanel.HotSpotIndex(1, 0, DockStyle.Top), new DockPanel.HotSpotIndex(0, 1, DockStyle.Left), new DockPanel.HotSpotIndex(1, 1, DockStyle.Fill), new DockPanel.HotSpotIndex(2, 1, DockStyle.Right), new DockPanel.HotSpotIndex(1, 2, DockStyle.Bottom) }; private GraphicsPath _displayingGraphicsPath = DrawHelper.CalculateGraphicsPathFromBitmap(_bitmapPaneDiamond); public VS2012LightPaneIndicator() { SizeMode = PictureBoxSizeMode.AutoSize; Image = _bitmapPaneDiamond; Region = new Region(DisplayingGraphicsPath); } public GraphicsPath DisplayingGraphicsPath { get { return _displayingGraphicsPath; } } public DockStyle HitTest(Point pt) { if (!Visible) return DockStyle.None; pt = PointToClient(pt); if (!ClientRectangle.Contains(pt)) return DockStyle.None; for (int i = _hotSpots.GetLowerBound(0); i <= _hotSpots.GetUpperBound(0); i++) { if (_bitmapPaneDiamondHotSpot.GetPixel(pt.X, pt.Y) == _bitmapPaneDiamondHotSpotIndex.GetPixel(_hotSpots[i].X, _hotSpots[i].Y)) return _hotSpots[i].DockStyle; } return DockStyle.None; } private DockStyle m_status = DockStyle.None; public DockStyle Status { get { return m_status; } set { m_status = value; if (m_status == DockStyle.None) Image = _bitmapPaneDiamond; else if (m_status == DockStyle.Left) Image = _bitmapPaneDiamondLeft; else if (m_status == DockStyle.Right) Image = _bitmapPaneDiamondRight; else if (m_status == DockStyle.Top) Image = _bitmapPaneDiamondTop; else if (m_status == DockStyle.Bottom) Image = _bitmapPaneDiamondBottom; else if (m_status == DockStyle.Fill) Image = _bitmapPaneDiamondFill; } } } } private class VS2012LightAutoHideWindowFactory : DockPanelExtender.IAutoHideWindowFactory { public DockPanel.AutoHideWindowControl CreateAutoHideWindow(DockPanel panel) { return new VS2012LightAutoHideWindowControl(panel); } } private class VS2012LightDockPaneSplitterControlFactory : DockPanelExtender.IDockPaneSplitterControlFactory { public DockPane.SplitterControlBase CreateSplitterControl(DockPane pane) { return new VS2012LightSplitterControl(pane); } } private class VS2012LightDockWindowSplitterControlFactory : DockPanelExtender.IDockWindowSplitterControlFactory { public SplitterBase CreateSplitterControl() { return new VS2012LightDockWindow.VS2012LightDockWindowSplitterControl(); } } private class VS2012LightDockPaneStripFactory : DockPanelExtender.IDockPaneStripFactory { public DockPaneStripBase CreateDockPaneStrip(DockPane pane) { return new VS2012LightDockPaneStrip(pane); } } private class VS2012LightAutoHideStripFactory : DockPanelExtender.IAutoHideStripFactory { public AutoHideStripBase CreateAutoHideStrip(DockPanel panel) { return new VS2012LightAutoHideStrip(panel); } } private class VS2012LightDockPaneCaptionFactory : DockPanelExtender.IDockPaneCaptionFactory { public DockPaneCaptionBase CreateDockPaneCaption(DockPane pane) { return new VS2012LightDockPaneCaption(pane); } } private class VS2012LightDockWindowFactory : DockPanelExtender.IDockWindowFactory { public DockWindow CreateDockWindow(DockPanel dockPanel, DockState dockState) { return new VS2012LightDockWindow(dockPanel, dockState); } } public static DockPanelSkin CreateVisualStudio2012Light() { var specialBlue = Color.FromArgb(0xFF, 0x00, 0x7A, 0xCC); var dot = Color.FromArgb(80, 170, 220); var activeTab = specialBlue; var mouseHoverTab = Color.FromArgb(0xFF, 28, 151, 234); var inactiveTab = SystemColors.Control; var lostFocusTab = Color.FromArgb(0xFF, 204, 206, 219); var skin = new DockPanelSkin(); skin.AutoHideStripSkin.DockStripGradient.StartColor = specialBlue; skin.AutoHideStripSkin.DockStripGradient.EndColor = SystemColors.ControlLight; skin.AutoHideStripSkin.TabGradient.TextColor = SystemColors.ControlDarkDark; skin.DockPaneStripSkin.DocumentGradient.DockStripGradient.StartColor = SystemColors.Control; skin.DockPaneStripSkin.DocumentGradient.DockStripGradient.EndColor = SystemColors.Control; skin.DockPaneStripSkin.DocumentGradient.ActiveTabGradient.StartColor = activeTab; skin.DockPaneStripSkin.DocumentGradient.ActiveTabGradient.EndColor = lostFocusTab; skin.DockPaneStripSkin.DocumentGradient.ActiveTabGradient.TextColor = Color.White; skin.DockPaneStripSkin.DocumentGradient.InactiveTabGradient.StartColor = inactiveTab; skin.DockPaneStripSkin.DocumentGradient.InactiveTabGradient.EndColor = mouseHoverTab; skin.DockPaneStripSkin.DocumentGradient.InactiveTabGradient.TextColor = Color.Black; skin.DockPaneStripSkin.ToolWindowGradient.DockStripGradient.StartColor = SystemColors.Control; skin.DockPaneStripSkin.ToolWindowGradient.DockStripGradient.EndColor = SystemColors.Control; skin.DockPaneStripSkin.ToolWindowGradient.ActiveTabGradient.StartColor = SystemColors.ControlLightLight; skin.DockPaneStripSkin.ToolWindowGradient.ActiveTabGradient.EndColor = SystemColors.ControlLightLight; skin.DockPaneStripSkin.ToolWindowGradient.ActiveTabGradient.TextColor = specialBlue; skin.DockPaneStripSkin.ToolWindowGradient.InactiveTabGradient.StartColor = SystemColors.Control; skin.DockPaneStripSkin.ToolWindowGradient.InactiveTabGradient.EndColor = SystemColors.Control; skin.DockPaneStripSkin.ToolWindowGradient.InactiveTabGradient.TextColor = SystemColors.GrayText; skin.DockPaneStripSkin.ToolWindowGradient.ActiveCaptionGradient.StartColor = specialBlue; skin.DockPaneStripSkin.ToolWindowGradient.ActiveCaptionGradient.EndColor = dot; skin.DockPaneStripSkin.ToolWindowGradient.ActiveCaptionGradient.LinearGradientMode = LinearGradientMode.Vertical; skin.DockPaneStripSkin.ToolWindowGradient.ActiveCaptionGradient.TextColor = Color.White; skin.DockPaneStripSkin.ToolWindowGradient.InactiveCaptionGradient.StartColor = SystemColors.Control; skin.DockPaneStripSkin.ToolWindowGradient.InactiveCaptionGradient.EndColor = SystemColors.ControlDark; skin.DockPaneStripSkin.ToolWindowGradient.InactiveCaptionGradient.LinearGradientMode = LinearGradientMode.Vertical; skin.DockPaneStripSkin.ToolWindowGradient.InactiveCaptionGradient.TextColor = SystemColors.GrayText; return skin; } } }
using System; using System.Collections.Specialized; using System.Diagnostics; using System.Globalization; using System.IO; using System.Runtime.InteropServices; using System.Threading; using System.Windows.Forms; using System.Xml; namespace FeedBuilder { public partial class frmMain : Form { public frmMain() { InitializeComponent(); } #region " Private constants/variables" private const string DialogFilter = "Feed configuration files (*.config)|*.config|All files (*.*)|*.*"; private const string DefaultFileName = "FeedBuilder.config"; private OpenFileDialog _openDialog; #endregion private ArgumentsParser _argParser; #region " Properties" public string FileName { get; set; } public bool ShowGui { get; set; } #endregion #region " Loading/Initialization/Lifetime" private void frmMain_Load(Object sender, EventArgs e) { Visible = false; InitializeFormSettings(); string[] args = Environment.GetCommandLineArgs(); // The first arg is the path to ourself //If args.Count >= 2 Then // If File.Exists(args(1)) Then // Dim p As New FeedBuilderSettingsProvider() // p.LoadFrom(args(1)) // Me.FileName = args(1) // End If //End If // The first arg is the path to ourself _argParser = new ArgumentsParser(args); if (!_argParser.HasArgs) { FreeConsole(); return; } FileName = _argParser.FileName; if (!string.IsNullOrEmpty(FileName)) { if (File.Exists(FileName)) { FeedBuilderSettingsProvider p = new FeedBuilderSettingsProvider(); p.LoadFrom(FileName); InitializeFormSettings(); } else { _argParser.ShowGui = true; _argParser.Build = false; UpdateTitle(); } } if (_argParser.ShowGui) Show(); if (_argParser.Build) Build(); if (!_argParser.ShowGui) Close(); } private void InitializeFormSettings() { if (string.IsNullOrEmpty(Settings.Default.OutputFolder)) { txtOutputFolder.Text = string.Empty; } else { string path = GetFullDirectoryPath(Settings.Default.OutputFolder); txtOutputFolder.Text = Directory.Exists(path) ? Settings.Default.OutputFolder : string.Empty; } txtFeedXML.Text = string.IsNullOrEmpty(Settings.Default.FeedXML) ? string.Empty : Settings.Default.FeedXML; txtBaseURL.Text = string.IsNullOrEmpty(Settings.Default.BaseURL) ? string.Empty : Settings.Default.BaseURL; chkVersion.Checked = Settings.Default.CompareVersion; chkSize.Checked = Settings.Default.CompareSize; chkDate.Checked = Settings.Default.CompareDate; chkHash.Checked = Settings.Default.CompareHash; chkIgnoreSymbols.Checked = Settings.Default.IgnoreDebugSymbols; chkIgnoreVsHost.Checked = Settings.Default.IgnoreVsHosting; chkCopyFiles.Checked = Settings.Default.CopyFiles; chkCleanUp.Checked = Settings.Default.CleanUp; txtAddExtension.Text = Settings.Default.AddExtension; if (Settings.Default.IgnoreFiles == null) Settings.Default.IgnoreFiles = new StringCollection(); ReadFiles(); UpdateTitle(); } private void UpdateTitle() { if (string.IsNullOrEmpty(FileName)) Text = "Feed Builder"; else Text = "Feed Builder - " + FileName; } private void SaveFormSettings() { if (!string.IsNullOrEmpty(txtOutputFolder.Text.Trim()) && Directory.Exists(txtOutputFolder.Text.Trim())) Settings.Default.OutputFolder = txtOutputFolder.Text.Trim(); // ReSharper disable AssignNullToNotNullAttribute if (!string.IsNullOrEmpty(txtFeedXML.Text.Trim()) && Directory.Exists(Path.GetDirectoryName(txtFeedXML.Text.Trim()))) Settings.Default.FeedXML = txtFeedXML.Text.Trim(); // ReSharper restore AssignNullToNotNullAttribute if (!string.IsNullOrEmpty(txtBaseURL.Text.Trim())) Settings.Default.BaseURL = txtBaseURL.Text.Trim(); if (!string.IsNullOrEmpty(txtAddExtension.Text.Trim())) Settings.Default.AddExtension = txtAddExtension.Text.Trim(); Settings.Default.CompareVersion = chkVersion.Checked; Settings.Default.CompareSize = chkSize.Checked; Settings.Default.CompareDate = chkDate.Checked; Settings.Default.CompareHash = chkHash.Checked; Settings.Default.IgnoreDebugSymbols = chkIgnoreSymbols.Checked; Settings.Default.IgnoreVsHosting = chkIgnoreVsHost.Checked; Settings.Default.CopyFiles = chkCopyFiles.Checked; Settings.Default.CleanUp = chkCleanUp.Checked; if (Settings.Default.IgnoreFiles == null) Settings.Default.IgnoreFiles = new StringCollection(); Settings.Default.IgnoreFiles.Clear(); foreach (ListViewItem thisItem in lstFiles.Items) { if (!thisItem.Checked) Settings.Default.IgnoreFiles.Add(thisItem.Text); } } private void frmMain_FormClosing(object sender, FormClosingEventArgs e) { SaveFormSettings(); Settings.Default.Save(); } #endregion #region " Commands Events" private void cmdBuild_Click(Object sender, EventArgs e) { Build(); } private void btnOpenOutputs_Click(object sender, EventArgs e) { OpenOutputsFolder(); } private void btnNew_Click(Object sender, EventArgs e) { Settings.Default.Reset(); InitializeFormSettings(); } private void btnOpen_Click(Object sender, EventArgs e) { OpenFileDialog dlg; if (_openDialog == null) { dlg = new OpenFileDialog { CheckFileExists = true, FileName = string.IsNullOrEmpty(FileName) ? DefaultFileName : FileName }; _openDialog = dlg; } else dlg = _openDialog; dlg.Filter = DialogFilter; if (dlg.ShowDialog() != DialogResult.OK) return; FeedBuilderSettingsProvider p = new FeedBuilderSettingsProvider(); p.LoadFrom(dlg.FileName); FileName = dlg.FileName; InitializeFormSettings(); } private void btnSave_Click(Object sender, EventArgs e) { Save(false); } private void btnSaveAs_Click(Object sender, EventArgs e) { Save(true); } private void btnRefresh_Click(Object sender, EventArgs e) { ReadFiles(); } #endregion #region " Options Events" private void cmdOutputFolder_Click(Object sender, EventArgs e) { fbdOutputFolder.SelectedPath = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); if (fbdOutputFolder.ShowDialog(this) != DialogResult.OK) return; txtOutputFolder.Text = fbdOutputFolder.SelectedPath; ReadFiles(); } private void cmdFeedXML_Click(Object sender, EventArgs e) { sfdFeedXML.InitialDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); if (sfdFeedXML.ShowDialog(this) == DialogResult.OK) txtFeedXML.Text = sfdFeedXML.FileName; } private void chkIgnoreSymbols_CheckedChanged(object sender, EventArgs e) { ReadFiles(); } private void chkCopyFiles_CheckedChanged(Object sender, EventArgs e) { chkCleanUp.Enabled = chkCopyFiles.Checked; if (!chkCopyFiles.Checked) chkCleanUp.Checked = false; } #endregion #region " Helper Methods " private void Build() { AttachConsole(ATTACH_PARENT_PROCESS); Console.WriteLine("Building NAppUpdater feed '{0}'", txtBaseURL.Text.Trim()); if (string.IsNullOrEmpty(txtFeedXML.Text)) { const string msg = "The feed file location needs to be defined.\n" + "The outputs cannot be generated without this."; if (_argParser.ShowGui) MessageBox.Show(msg); Console.WriteLine(msg); return; } // If the target folder doesn't exist, create a path to it string dest = txtFeedXML.Text.Trim(); var destDir = Directory.GetParent(GetFullDirectoryPath(Path.GetDirectoryName(dest))); if (!Directory.Exists(destDir.FullName)) Directory.CreateDirectory(destDir.FullName); XmlDocument doc = new XmlDocument(); XmlDeclaration dec = doc.CreateXmlDeclaration("1.0", "utf-8", null); doc.AppendChild(dec); XmlElement feed = doc.CreateElement("Feed"); if (!string.IsNullOrEmpty(txtBaseURL.Text.Trim())) feed.SetAttribute("BaseUrl", txtBaseURL.Text.Trim()); doc.AppendChild(feed); XmlElement tasks = doc.CreateElement("Tasks"); Console.WriteLine("Processing feed items"); int itemsCopied = 0; int itemsCleaned = 0; int itemsSkipped = 0; int itemsFailed = 0; int itemsMissingConditions = 0; foreach (ListViewItem thisItem in lstFiles.Items) { string destFile = ""; string filename = ""; try { filename = thisItem.Text; destFile = Path.Combine(destDir.FullName, filename); } catch { } if (destFile == "" || filename == "") { string msg = string.Format("The file could not be pathed:\nFolder:'{0}'\nFile:{1}", destDir.FullName, filename); if (_argParser.ShowGui) MessageBox.Show(msg); Console.WriteLine(msg); continue; } if (thisItem.Checked) { var fileInfoEx = (FileInfoEx)thisItem.Tag; XmlElement task = doc.CreateElement("FileUpdateTask"); task.SetAttribute("localPath", fileInfoEx.RelativeName); if (!string.IsNullOrEmpty(txtAddExtension.Text)) task.SetAttribute("updateTo",fileInfoEx.RelativeName+"."+txtAddExtension.Text.Trim()); // generate FileUpdateTask metadata items task.SetAttribute("lastModified", fileInfoEx.FileInfo.LastWriteTime.ToFileTime().ToString(CultureInfo.InvariantCulture)); task.SetAttribute("fileSize", fileInfoEx.FileInfo.Length.ToString(CultureInfo.InvariantCulture)); if (!string.IsNullOrEmpty(fileInfoEx.FileVersion)) task.SetAttribute("version", fileInfoEx.FileVersion); XmlElement conds = doc.CreateElement("Conditions"); XmlElement cond; //File Exists cond = doc.CreateElement("FileExistsCondition"); cond.SetAttribute("type", "or-not"); conds.AppendChild(cond); //Version if (chkVersion.Checked && !string.IsNullOrEmpty(fileInfoEx.FileVersion)) { cond = doc.CreateElement("FileVersionCondition"); cond.SetAttribute("type", "or"); cond.SetAttribute("what", "below"); cond.SetAttribute("version", fileInfoEx.FileVersion); conds.AppendChild(cond); } //Size if (chkSize.Checked) { cond = doc.CreateElement("FileSizeCondition"); cond.SetAttribute("type", "or-not"); cond.SetAttribute("what", "is"); cond.SetAttribute("size", fileInfoEx.FileInfo.Length.ToString(CultureInfo.InvariantCulture)); conds.AppendChild(cond); } //Date if (chkDate.Checked) { cond = doc.CreateElement("FileDateCondition"); cond.SetAttribute("type", "or"); cond.SetAttribute("what", "older"); // local timestamp, not UTC cond.SetAttribute("timestamp", fileInfoEx.FileInfo.LastWriteTime.ToFileTime().ToString(CultureInfo.InvariantCulture)); conds.AppendChild(cond); } //Hash if (chkHash.Checked) { cond = doc.CreateElement("FileChecksumCondition"); cond.SetAttribute("type", "or-not"); cond.SetAttribute("checksumType", "sha256"); cond.SetAttribute("checksum", fileInfoEx.Hash); conds.AppendChild(cond); } if (conds.ChildNodes.Count == 0) itemsMissingConditions++; task.AppendChild(conds); tasks.AppendChild(task); if (chkCopyFiles.Checked) { if (CopyFile(fileInfoEx.FileInfo.FullName, destFile)) itemsCopied++; else itemsFailed++; } } else { try { if (chkCleanUp.Checked & File.Exists(destFile)) { File.Delete(destFile); itemsCleaned += 1; } else itemsSkipped += 1; } catch (IOException) { itemsFailed += 1; } } } feed.AppendChild(tasks); string xmlDest = Path.Combine(destDir.FullName, Path.GetFileName(dest)); doc.Save(xmlDest); // open the outputs folder if we're running from the GUI or // we have an explicit command line option to do so if (!_argParser.HasArgs || _argParser.OpenOutputsFolder) OpenOutputsFolder(); Console.WriteLine("Done building feed."); if (itemsCopied > 0) Console.WriteLine("{0,5} items copied", itemsCopied); if (itemsCleaned > 0) Console.WriteLine("{0,5} items cleaned", itemsCleaned); if (itemsSkipped > 0) Console.WriteLine("{0,5} items skipped", itemsSkipped); if (itemsFailed > 0) Console.WriteLine("{0,5} items failed", itemsFailed); if (itemsMissingConditions > 0) Console.WriteLine("{0,5} items without any conditions", itemsMissingConditions); } private bool CopyFile(string sourceFile, string destFile) { // If the target folder doesn't exist, create the path to it var fi = new FileInfo(destFile); var d = Directory.GetParent(fi.FullName); if (!Directory.Exists(d.FullName)) CreateDirectoryPath(d.FullName); if (!string.IsNullOrEmpty(txtAddExtension.Text)) destFile += "." + txtAddExtension.Text.Trim(); // Copy with delayed retry int retries = 3; while (retries > 0) { try { if (File.Exists(destFile)) File.Delete(destFile); File.Copy(sourceFile, destFile); retries = 0; // success return true; } catch (IOException) { // Failed... let's try sleeping a bit (slow disk maybe) if (retries-- > 0) Thread.Sleep(200); } catch (UnauthorizedAccessException) { // same handling as IOException if (retries-- > 0) Thread.Sleep(200); } } return false; } private void CreateDirectoryPath(string directoryPath) { // Create the folder/path if it doesn't exist, with delayed retry int retries = 3; while (retries > 0 && !Directory.Exists(directoryPath)) { Directory.CreateDirectory(directoryPath); if (retries-- < 3) Thread.Sleep(200); } } private void OpenOutputsFolder() { string path = txtOutputFolder.Text.Trim(); if (string.IsNullOrEmpty(path)) { return; } string dir = GetFullDirectoryPath(path); CreateDirectoryPath(dir); Process process = new Process { StartInfo = { UseShellExecute = true, FileName = dir } }; process.Start(); } private int GetImageIndex(string ext) { switch (ext.Trim('.')) { case "bmp": return 1; case "dll": return 2; case "doc": case "docx": return 3; case "exe": return 4; case "htm": case "html": return 5; case "jpg": case "jpeg": return 6; case "pdf": return 7; case "png": return 8; case "txt": return 9; case "wav": case "mp3": return 10; case "wmv": return 11; case "xls": case "xlsx": return 12; case "zip": return 13; default: return 0; } } private void ReadFiles() { string outputDir = GetFullDirectoryPath(txtOutputFolder.Text.Trim()); if (string.IsNullOrEmpty(outputDir) || !Directory.Exists(outputDir)) { return; } outputDir = GetFullDirectoryPath(outputDir); lstFiles.BeginUpdate(); lstFiles.Items.Clear(); FileSystemEnumerator enumerator = new FileSystemEnumerator(outputDir, "*.*", true); foreach (FileInfo fi in enumerator.Matches()) { string filePath = fi.FullName; if ((IsIgnorable(filePath))) { continue; } FileInfoEx fileInfo = new FileInfoEx(filePath, outputDir.Length); ListViewItem item = new ListViewItem(fileInfo.RelativeName, GetImageIndex(fileInfo.FileInfo.Extension)); item.SubItems.Add(fileInfo.FileVersion); item.SubItems.Add(fileInfo.FileInfo.Length.ToString(CultureInfo.InvariantCulture)); item.SubItems.Add(fileInfo.FileInfo.LastWriteTime.ToString(CultureInfo.InvariantCulture)); item.SubItems.Add(fileInfo.Hash); item.Checked = (!Settings.Default.IgnoreFiles.Contains(fileInfo.FileInfo.Name)); item.Tag = fileInfo; lstFiles.Items.Add(item); } lstFiles.EndUpdate(); } private string GetFullDirectoryPath(string path) { string absolutePath = path; if (!Path.IsPathRooted(absolutePath)) { absolutePath = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), path); } if (!absolutePath.EndsWith("\\")) { absolutePath += "\\"; } return Path.GetFullPath(absolutePath); } private bool IsIgnorable(string filename) { string ext = Path.GetExtension(filename); if ((chkIgnoreSymbols.Checked && ext == ".pdb")) return true; return (chkIgnoreVsHost.Checked && filename.ToLower().Contains("vshost.exe")); } private void Save(bool forceDialog) { SaveFormSettings(); if (forceDialog || string.IsNullOrEmpty(FileName)) { SaveFileDialog dlg = new SaveFileDialog { Filter = DialogFilter, FileName = DefaultFileName }; DialogResult result = dlg.ShowDialog(); if (result == DialogResult.OK) { FeedBuilderSettingsProvider p = new FeedBuilderSettingsProvider(); p.SaveAs(dlg.FileName); FileName = dlg.FileName; } } else { FeedBuilderSettingsProvider p = new FeedBuilderSettingsProvider(); p.SaveAs(FileName); } UpdateTitle(); } #endregion private void frmMain_DragEnter(object sender, DragEventArgs e) { string[] files = (string[])e.Data.GetData(DataFormats.FileDrop); if (files.Length == 0) return; e.Effect = files[0].EndsWith(".config") ? DragDropEffects.Move : DragDropEffects.None; } private void frmMain_DragDrop(object sender, DragEventArgs e) { string[] files = (string[])e.Data.GetData(DataFormats.FileDrop); if (files.Length == 0) return; try { string fileName = files[0]; FeedBuilderSettingsProvider p = new FeedBuilderSettingsProvider(); p.LoadFrom(fileName); FileName = fileName; InitializeFormSettings(); } catch (Exception ex) { MessageBox.Show("The file could not be opened: \n" + ex.Message); } } private static readonly int ATTACH_PARENT_PROCESS = -1; [DllImport("kernel32.dll")] private static extern bool AttachConsole(int dwProcessId); [DllImport("kernel32.dll")] private static extern bool FreeConsole(); } }
using Autofac; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.Infrastructure; using Miningcore.Api.Extensions; using Miningcore.Api.Responses; using Miningcore.Blockchain; using Miningcore.Configuration; using Miningcore.Extensions; using Miningcore.Mining; using Miningcore.Persistence.Model; using Miningcore.Persistence.Model.Projections; using Miningcore.Persistence.Repositories; using Miningcore.Time; using System.Collections.Concurrent; using System.Data; using System.Globalization; using System.Net; using Microsoft.AspNetCore.Mvc.ActionConstraints; using NLog; namespace Miningcore.Api.Controllers; [Route("api/pools")] [ApiController] public class PoolApiController : ApiControllerBase { public PoolApiController(IComponentContext ctx, IActionDescriptorCollectionProvider _adcp) : base(ctx) { statsRepo = ctx.Resolve<IStatsRepository>(); blocksRepo = ctx.Resolve<IBlockRepository>(); minerRepo = ctx.Resolve<IMinerRepository>(); shareRepo = ctx.Resolve<IShareRepository>(); paymentsRepo = ctx.Resolve<IPaymentRepository>(); clock = ctx.Resolve<IMasterClock>(); pools = ctx.Resolve<ConcurrentDictionary<string, IMiningPool>>(); adcp = _adcp; } private readonly IStatsRepository statsRepo; private readonly IBlockRepository blocksRepo; private readonly IPaymentRepository paymentsRepo; private readonly IMinerRepository minerRepo; private readonly IShareRepository shareRepo; private readonly IMasterClock clock; private readonly IActionDescriptorCollectionProvider adcp; private readonly ConcurrentDictionary<string, IMiningPool> pools; private static readonly NLog.ILogger logger = LogManager.GetCurrentClassLogger(); #region Actions [HttpGet] public async Task<GetPoolsResponse> Get() { var response = new GetPoolsResponse { Pools = await Task.WhenAll(clusterConfig.Pools.Where(x => x.Enabled).Select(async config => { // load stats var stats = await cf.Run(con => statsRepo.GetLastPoolStatsAsync(con, config.Id)); // get pool pools.TryGetValue(config.Id, out var pool); // map var result = config.ToPoolInfo(mapper, stats, pool); // enrich result.TotalPaid = await cf.Run(con => statsRepo.GetTotalPoolPaymentsAsync(con, config.Id)); result.TotalBlocks = await cf.Run(con => blocksRepo.GetPoolBlockCountAsync(con, config.Id)); result.LastPoolBlockTime = await cf.Run(con => blocksRepo.GetLastPoolBlockTimeAsync(con, config.Id)); var from = clock.Now.AddDays(-1); var minersByHashrate = await cf.Run(con => statsRepo.PagePoolMinersByHashrateAsync(con, config.Id, from, 0, 15)); result.TopMiners = minersByHashrate.Select(mapper.Map<MinerPerformanceStats>).ToArray(); return result; }).ToArray()) }; return response; } [HttpGet("/api/help")] public ActionResult GetHelp() { var tmp = adcp.ActionDescriptors.Items .Select(x => { // Get and pad http method var method = x?.ActionConstraints?.OfType<HttpMethodActionConstraint>().FirstOrDefault()?.HttpMethods.First(); method = $"{method,-5}"; return $"{method} -> {x.AttributeRouteInfo.Template}"; }); // convert curly braces var result = string.Join("\n", tmp).Replace("{", "<").Replace("}", ">") + "\n"; return Content(result); } [HttpGet("{poolId}")] public async Task<GetPoolResponse> GetPoolInfoAsync(string poolId) { var pool = GetPool(poolId); // load stats var stats = await cf.Run(con => statsRepo.GetLastPoolStatsAsync(con, pool.Id)); // get pool pools.TryGetValue(pool.Id, out var poolInstance); var response = new GetPoolResponse { Pool = pool.ToPoolInfo(mapper, stats, poolInstance) }; // enrich response.Pool.TotalPaid = await cf.Run(con => statsRepo.GetTotalPoolPaymentsAsync(con, pool.Id)); response.Pool.TotalBlocks = await cf.Run(con => blocksRepo.GetPoolBlockCountAsync(con, pool.Id)); response.Pool.LastPoolBlockTime = await cf.Run(con => blocksRepo.GetLastPoolBlockTimeAsync(con, pool.Id)); var from = clock.Now.AddDays(-1); response.Pool.TopMiners = (await cf.Run(con => statsRepo.PagePoolMinersByHashrateAsync( con, pool.Id, from, 0, 15))) .Select(mapper.Map<MinerPerformanceStats>) .ToArray(); return response; } [HttpGet("{poolId}/performance")] public async Task<GetPoolStatsResponse> GetPoolPerformanceAsync(string poolId, [FromQuery(Name = "r")] SampleRange range = SampleRange.Day, [FromQuery(Name = "i")] SampleInterval interval = SampleInterval.Hour) { var pool = GetPool(poolId); // set range var end = clock.Now; DateTime start; switch(range) { case SampleRange.Day: start = end.AddDays(-1); break; case SampleRange.Month: start = end.AddDays(-30); break; default: throw new ApiException("invalid interval"); } var stats = await cf.Run(con => statsRepo.GetPoolPerformanceBetweenAsync( con, pool.Id, interval, start, end)); var response = new GetPoolStatsResponse { Stats = stats.Select(mapper.Map<AggregatedPoolStats>).ToArray() }; return response; } [HttpGet("{poolId}/miners")] public async Task<MinerPerformanceStats[]> PagePoolMinersAsync( string poolId, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); // set range var end = clock.Now; var start = end.AddDays(-1); var miners = (await cf.Run(con => statsRepo.PagePoolMinersByHashrateAsync( con, pool.Id, start, page, pageSize))) .Select(mapper.Map<MinerPerformanceStats>) .ToArray(); return miners; } [HttpGet("{poolId}/blocks")] public async Task<Responses.Block[]> PagePoolBlocksAsync( string poolId, [FromQuery] int page, [FromQuery] int pageSize = 15, [FromQuery] BlockStatus[] state = null) { var pool = GetPool(poolId); var blockStates = state != null && state.Length > 0 ? state : new[] { BlockStatus.Confirmed, BlockStatus.Pending, BlockStatus.Orphaned }; var blocks = (await cf.Run(con => blocksRepo.PageBlocksAsync(con, pool.Id, blockStates, page, pageSize))) .Select(mapper.Map<Responses.Block>) .ToArray(); // enrich blocks var blockInfobaseDict = pool.Template.ExplorerBlockLinks; foreach(var block in blocks) { // compute infoLink if(blockInfobaseDict != null) { blockInfobaseDict.TryGetValue(!string.IsNullOrEmpty(block.Type) ? block.Type : "block", out var blockInfobaseUrl); if(!string.IsNullOrEmpty(blockInfobaseUrl)) { if(blockInfobaseUrl.Contains(CoinMetaData.BlockHeightPH)) block.InfoLink = blockInfobaseUrl.Replace(CoinMetaData.BlockHeightPH, block.BlockHeight.ToString(CultureInfo.InvariantCulture)); else if(blockInfobaseUrl.Contains(CoinMetaData.BlockHashPH) && !string.IsNullOrEmpty(block.Hash)) block.InfoLink = blockInfobaseUrl.Replace(CoinMetaData.BlockHashPH, block.Hash); } } } return blocks; } [HttpGet("/api/v2/pools/{poolId}/blocks")] public async Task<PagedResultResponse<Responses.Block[]>> PagePoolBlocksV2Async( string poolId, [FromQuery] int page, [FromQuery] int pageSize = 15, [FromQuery] BlockStatus[] state = null) { var pool = GetPool(poolId); var blockStates = state != null && state.Length > 0 ? state : new[] { BlockStatus.Confirmed, BlockStatus.Pending, BlockStatus.Orphaned }; uint pageCount = (uint) Math.Floor((await cf.Run(con => blocksRepo.GetPoolBlockCountAsync(con, poolId))) / (double) pageSize); var blocks = (await cf.Run(con => blocksRepo.PageBlocksAsync(con, pool.Id, blockStates, page, pageSize))) .Select(mapper.Map<Responses.Block>) .ToArray(); // enrich blocks var blockInfobaseDict = pool.Template.ExplorerBlockLinks; foreach(var block in blocks) { // compute infoLink if(blockInfobaseDict != null) { blockInfobaseDict.TryGetValue(!string.IsNullOrEmpty(block.Type) ? block.Type : "block", out var blockInfobaseUrl); if(!string.IsNullOrEmpty(blockInfobaseUrl)) { if(blockInfobaseUrl.Contains(CoinMetaData.BlockHeightPH)) block.InfoLink = blockInfobaseUrl.Replace(CoinMetaData.BlockHeightPH, block.BlockHeight.ToString(CultureInfo.InvariantCulture)); else if(blockInfobaseUrl.Contains(CoinMetaData.BlockHashPH) && !string.IsNullOrEmpty(block.Hash)) block.InfoLink = blockInfobaseUrl.Replace(CoinMetaData.BlockHashPH, block.Hash); } } } var response = new PagedResultResponse<Responses.Block[]>(blocks, pageCount); return response; } [HttpGet("{poolId}/payments")] public async Task<Responses.Payment[]> PagePoolPaymentsAsync( string poolId, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); var payments = (await cf.Run(con => paymentsRepo.PagePaymentsAsync( con, pool.Id, null, page, pageSize))) .Select(mapper.Map<Responses.Payment>) .ToArray(); // enrich payments var txInfobaseUrl = pool.Template.ExplorerTxLink; var addressInfobaseUrl = pool.Template.ExplorerAccountLink; foreach(var payment in payments) { // compute transaction infoLink if(!string.IsNullOrEmpty(txInfobaseUrl)) payment.TransactionInfoLink = string.Format(txInfobaseUrl, payment.TransactionConfirmationData); // pool wallet link if(!string.IsNullOrEmpty(addressInfobaseUrl)) payment.AddressInfoLink = string.Format(addressInfobaseUrl, payment.Address); } return payments; } [HttpGet("/api/v2/pools/{poolId}/payments")] public async Task<PagedResultResponse<Responses.Payment[]>> PagePoolPaymentsV2Async( string poolId, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); uint pageCount = (uint) Math.Floor((await cf.Run(con => paymentsRepo.GetPaymentsCountAsync(con, poolId))) / (double) pageSize); var payments = (await cf.Run(con => paymentsRepo.PagePaymentsAsync( con, pool.Id, null, page, pageSize))) .Select(mapper.Map<Responses.Payment>) .ToArray(); // enrich payments var txInfobaseUrl = pool.Template.ExplorerTxLink; var addressInfobaseUrl = pool.Template.ExplorerAccountLink; foreach(var payment in payments) { // compute transaction infoLink if(!string.IsNullOrEmpty(txInfobaseUrl)) payment.TransactionInfoLink = string.Format(txInfobaseUrl, payment.TransactionConfirmationData); // pool wallet link if(!string.IsNullOrEmpty(addressInfobaseUrl)) payment.AddressInfoLink = string.Format(addressInfobaseUrl, payment.Address); } var response = new PagedResultResponse<Responses.Payment[]>(payments, pageCount); return response; } [HttpGet("{poolId}/miners/{address}")] public async Task<Responses.MinerStats> GetMinerInfoAsync( string poolId, string address, [FromQuery] SampleRange perfMode = SampleRange.Day) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); var statsResult = await cf.RunTx((con, tx) => statsRepo.GetMinerStatsAsync(con, tx, pool.Id, address), true, IsolationLevel.Serializable); Responses.MinerStats stats = null; if(statsResult != null) { stats = mapper.Map<Responses.MinerStats>(statsResult); // optional fields if(statsResult.LastPayment != null) { // Set timestamp of last payment stats.LastPayment = statsResult.LastPayment.Created; // Compute info link var baseUrl = pool.Template.ExplorerTxLink; if(!string.IsNullOrEmpty(baseUrl)) stats.LastPaymentLink = string.Format(baseUrl, statsResult.LastPayment.TransactionConfirmationData); } stats.PerformanceSamples = await GetMinerPerformanceInternal(perfMode, pool, address); } return stats; } [HttpGet("{poolId}/miners/{address}/payments")] public async Task<Responses.Payment[]> PageMinerPaymentsAsync( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); var payments = (await cf.Run(con => paymentsRepo.PagePaymentsAsync( con, pool.Id, address, page, pageSize))) .Select(mapper.Map<Responses.Payment>) .ToArray(); // enrich payments var txInfobaseUrl = pool.Template.ExplorerTxLink; var addressInfobaseUrl = pool.Template.ExplorerAccountLink; foreach(var payment in payments) { // compute transaction infoLink if(!string.IsNullOrEmpty(txInfobaseUrl)) payment.TransactionInfoLink = string.Format(txInfobaseUrl, payment.TransactionConfirmationData); // pool wallet link if(!string.IsNullOrEmpty(addressInfobaseUrl)) payment.AddressInfoLink = string.Format(addressInfobaseUrl, payment.Address); } return payments; } [HttpGet("/api/v2/pools/{poolId}/miners/{address}/payments")] public async Task<PagedResultResponse<Responses.Payment[]>> PageMinerPaymentsV2Async( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); uint pageCount = (uint) Math.Floor((await cf.Run(con => paymentsRepo.GetPaymentsCountAsync(con, poolId, address))) / (double) pageSize); var payments = (await cf.Run(con => paymentsRepo.PagePaymentsAsync( con, pool.Id, address, page, pageSize))) .Select(mapper.Map<Responses.Payment>) .ToArray(); // enrich payments var txInfobaseUrl = pool.Template.ExplorerTxLink; var addressInfobaseUrl = pool.Template.ExplorerAccountLink; foreach(var payment in payments) { // compute transaction infoLink if(!string.IsNullOrEmpty(txInfobaseUrl)) payment.TransactionInfoLink = string.Format(txInfobaseUrl, payment.TransactionConfirmationData); // pool wallet link if(!string.IsNullOrEmpty(addressInfobaseUrl)) payment.AddressInfoLink = string.Format(addressInfobaseUrl, payment.Address); } var response = new PagedResultResponse<Responses.Payment[]>(payments, pageCount); return response; } [HttpGet("{poolId}/miners/{address}/balancechanges")] public async Task<Responses.BalanceChange[]> PageMinerBalanceChangesAsync( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); var balanceChanges = (await cf.Run(con => paymentsRepo.PageBalanceChangesAsync( con, pool.Id, address, page, pageSize))) .Select(mapper.Map<Responses.BalanceChange>) .ToArray(); return balanceChanges; } [HttpGet("/api/v2/pools/{poolId}/miners/{address}/balancechanges")] public async Task<PagedResultResponse<Responses.BalanceChange[]>> PageMinerBalanceChangesV2Async( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); uint pageCount = (uint) Math.Floor((await cf.Run(con => paymentsRepo.GetBalanceChangesCountAsync(con, poolId, address))) / (double) pageSize); var balanceChanges = (await cf.Run(con => paymentsRepo.PageBalanceChangesAsync( con, pool.Id, address, page, pageSize))) .Select(mapper.Map<Responses.BalanceChange>) .ToArray(); var response = new PagedResultResponse<Responses.BalanceChange[]>(balanceChanges, pageCount); return response; } [HttpGet("{poolId}/miners/{address}/earnings/daily")] public async Task<AmountByDate[]> PageMinerEarningsByDayAsync( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); var earnings = (await cf.Run(con => paymentsRepo.PageMinerPaymentsByDayAsync( con, pool.Id, address, page, pageSize))) .ToArray(); return earnings; } [HttpGet("/api/v2/pools/{poolId}/miners/{address}/earnings/daily")] public async Task<PagedResultResponse<AmountByDate[]>> PageMinerEarningsByDayV2Async( string poolId, string address, [FromQuery] int page, [FromQuery] int pageSize = 15) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); uint pageCount = (uint) Math.Floor((await cf.Run(con => paymentsRepo.GetMinerPaymentsByDayCountAsync(con, poolId, address))) / (double) pageSize); var earnings = (await cf.Run(con => paymentsRepo.PageMinerPaymentsByDayAsync( con, pool.Id, address, page, pageSize))) .ToArray(); var response = new PagedResultResponse<AmountByDate[]>(earnings, pageCount); return response; } [HttpGet("{poolId}/miners/{address}/performance")] public async Task<Responses.WorkerPerformanceStatsContainer[]> GetMinerPerformanceAsync( string poolId, string address, [FromQuery] SampleRange mode = SampleRange.Day) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(pool.Template.Family == CoinFamily.Ethereum) address = address.ToLower(); var result = await GetMinerPerformanceInternal(mode, pool, address); return result; } [HttpGet("{poolId}/miners/{address}/settings")] public async Task<Responses.MinerSettings> GetMinerSettingsAsync(string poolId, string address) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); var result = await cf.Run(con=> minerRepo.GetSettings(con, null, pool.Id, address)); if(result == null) throw new ApiException("No settings found", HttpStatusCode.NotFound); return mapper.Map<Responses.MinerSettings>(result); } [HttpPost("{poolId}/miners/{address}/settings")] public async Task<Responses.MinerSettings> SetMinerSettingsAsync(string poolId, string address, [FromBody] Requests.UpdateMinerSettingsRequest request) { var pool = GetPool(poolId); if(string.IsNullOrEmpty(address)) throw new ApiException("Invalid or missing miner address", HttpStatusCode.NotFound); if(request?.Settings == null) throw new ApiException("Invalid or missing settings", HttpStatusCode.BadRequest); if(!IPAddress.TryParse(request.IpAddress, out var requestIp)) throw new ApiException("Invalid IP address", HttpStatusCode.BadRequest); // fetch recent IPs var ips = await cf.Run(con=> shareRepo.GetRecentyUsedIpAddresses(con, null, poolId, address)); // any known ips? if(ips == null || ips.Length == 0) throw new ApiException("Address not recently used for mining", HttpStatusCode.NotFound); // match? if(!ips.Any(x=> IPAddress.TryParse(x, out var ipAddress) && ipAddress.IsEqual(requestIp))) throw new ApiException("None of the recently used IP addresses matches the request", HttpStatusCode.Forbidden); // map settings var mapped = mapper.Map<Persistence.Model.MinerSettings>(request.Settings); // clamp limit if(pool.PaymentProcessing != null) mapped.PaymentThreshold = Math.Max(mapped.PaymentThreshold, pool.PaymentProcessing.MinimumPayment); mapped.PoolId = pool.Id; mapped.Address = address; // finally update the settings return await cf.RunTx(async (con, tx) => { await minerRepo.UpdateSettings(con, tx, mapped); logger.Info(()=> $"Updated settings for pool {pool.Id}, miner {address}"); var result = await minerRepo.GetSettings(con, tx, mapped.PoolId, mapped.Address); return mapper.Map<Responses.MinerSettings>(result); }); } #endregion // Actions private async Task<Responses.WorkerPerformanceStatsContainer[]> GetMinerPerformanceInternal( SampleRange mode, PoolConfig pool, string address) { Persistence.Model.Projections.WorkerPerformanceStatsContainer[] stats = null; var end = clock.Now; DateTime start; switch(mode) { case SampleRange.Hour: end = end.AddSeconds(-end.Second); start = end.AddHours(-1); stats = await cf.Run(con => statsRepo.GetMinerPerformanceBetweenThreeMinutelyAsync(con, pool.Id, address, start, end)); break; case SampleRange.Day: // set range if(end.Minute < 30) end = end.AddHours(-1); end = end.AddMinutes(-end.Minute); end = end.AddSeconds(-end.Second); start = end.AddDays(-1); stats = await cf.Run(con => statsRepo.GetMinerPerformanceBetweenHourlyAsync(con, pool.Id, address, start, end)); break; case SampleRange.Month: if(end.Hour < 12) end = end.AddDays(-1); end = end.Date; // set range start = end.AddMonths(-1); stats = await cf.Run(con => statsRepo.GetMinerPerformanceBetweenDailyAsync(con, pool.Id, address, start, end)); break; } // map var result = mapper.Map<Responses.WorkerPerformanceStatsContainer[]>(stats); return result; } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Reflection; #if ENABLE_CECIL using C = Mono.Cecil; #endif namespace Mono.Debugger.Soft { public class MethodMirror : Mirror { string name; MethodInfo info; TypeMirror declaring_type; DebugInfo debug_info; CustomAttributeDataMirror[] cattrs; ParameterInfoMirror[] param_info; ParameterInfoMirror ret_param; LocalVariable[] locals; LocalScope[] scopes; IList<Location> locations; MethodBodyMirror body; MethodMirror gmd; TypeMirror[] type_args; #if ENABLE_CECIL C.MethodDefinition meta; #endif internal MethodMirror (VirtualMachine vm, long id) : base (vm, id) { } public long GetId() { return id; } public string Name { get { if (name == null) name = vm.conn.Method_GetName (id); return name; } } public TypeMirror DeclaringType { get { if (declaring_type == null) declaring_type = vm.GetType (vm.conn.Method_GetDeclaringType (id)); return declaring_type; } } public TypeMirror ReturnType { get { return ReturnParameter.ParameterType; } } // FIXME: public string FullName { get { string type_namespace = DeclaringType.Namespace; string type_name = DeclaringType.Name; StringBuilder sb = new StringBuilder (); sb.Append (ReturnType.Name); sb.Append (' '); if (type_namespace != String.Empty) sb.Append (type_namespace).Append ("."); sb.Append(type_name); sb.Append(":"); sb.Append(Name); sb.Append(" "); sb.Append("("); var parameters = GetParameters (); for (var i = 0; i < parameters.Length; i++) { sb.Append(parameters[i].ParameterType.Name); if (i != parameters.Length - 1) sb.Append(", "); } sb.Append(")"); return sb.ToString (); } } /* * Creating the custom attributes themselves could modify the behavior of the * debuggee, so we return objects similar to the CustomAttributeData objects * used by the reflection-only functionality on .net. * Since protocol version 2.21 */ public CustomAttributeDataMirror[] GetCustomAttributes (bool inherit) { return GetCAttrs (null, inherit); } /* Since protocol version 2.21 */ public CustomAttributeDataMirror[] GetCustomAttributes (TypeMirror attributeType, bool inherit) { if (attributeType == null) throw new ArgumentNullException ("attributeType"); return GetCAttrs (attributeType, inherit); } CustomAttributeDataMirror[] GetCAttrs (TypeMirror type, bool inherit) { #if ENABLE_CECIL if (cattrs == null && meta != null && !Metadata.HasCustomAttributes) cattrs = new CustomAttributeDataMirror [0]; #endif // FIXME: Handle inherit if (cattrs == null) { CattrInfo[] info = vm.conn.Method_GetCustomAttributes (id, 0, false); cattrs = CustomAttributeDataMirror.Create (vm, info); } var res = new List<CustomAttributeDataMirror> (); foreach (var attr in cattrs) if (type == null || attr.Constructor.DeclaringType == type) res.Add (attr); return res.ToArray (); } MethodInfo GetInfo () { if (info == null) info = vm.conn.Method_GetInfo (id); return info; } public int MetadataToken { get { return GetInfo ().token; } } public MethodAttributes Attributes { get { return (MethodAttributes) GetInfo ().attributes; } } public bool IsPublic { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.Public; } } public bool IsPrivate { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.Private; } } public bool IsFamily { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.Family; } } public bool IsAssembly { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.Assembly; } } public bool IsFamilyAndAssembly { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.FamANDAssem; } } public bool IsFamilyOrAssembly { get { return (Attributes & MethodAttributes.MemberAccessMask) == MethodAttributes.FamORAssem; } } public bool IsStatic { get { return (Attributes & MethodAttributes.Static) != 0; } } public bool IsFinal { get { return (Attributes & MethodAttributes.Final) != 0; } } public bool IsVirtual { get { return (Attributes & MethodAttributes.Virtual) != 0; } } public bool IsHideBySig { get { return (Attributes & MethodAttributes.HideBySig) != 0; } } public bool IsAbstract { get { return (Attributes & MethodAttributes.Abstract) != 0; } } public bool IsSpecialName { get { return (Attributes & MethodAttributes.SpecialName) != 0; } } public bool IsConstructor { get { int attr = (int)Attributes; return ((attr & (int)MethodAttributes.RTSpecialName) != 0 && (Name == ".ctor")); } } // Since protocol version 2.12 public bool IsGenericMethodDefinition { get { vm.CheckProtocolVersion (2, 12); return GetInfo ().is_gmd; } } // Since protocol version 2.12 public bool IsGenericMethod { get { vm.CheckProtocolVersion (2, 12); return GetInfo ().is_generic_method; } } public MethodImplAttributes GetMethodImplementationFlags() { return (MethodImplAttributes)GetInfo ().iattributes; } public ParameterInfoMirror[] GetParameters () { if (param_info == null) { var pi = vm.conn.Method_GetParamInfo (id); param_info = new ParameterInfoMirror [pi.param_count]; // Return ret_param = new ParameterInfoMirror (this, -1, vm.GetType (pi.ret_type), null, ParameterAttributes.Retval); // FIXME: this // FIXME: Attributes for (int i = 0; i < pi.param_count; ++i) { param_info [i] = new ParameterInfoMirror (this, i, vm.GetType (pi.param_types [i]), pi.param_names [i], 0); } } return param_info; } public ParameterInfoMirror ReturnParameter { get { if (ret_param == null) GetParameters (); return ret_param; } } public void ClearCachedLocalsDebugInfo () { locals = null; debug_info = null; locations = null; } public LocalScope [] GetScopes () { vm.CheckProtocolVersion (2, 43); GetLocals (); return scopes; } public LocalVariable[] GetLocals () { if (locals == null) { LocalsInfo li = new LocalsInfo (); try { li = vm.conn.Method_GetLocalsInfo (id); } catch (CommandException) { throw new AbsentInformationException (); } // Add the arguments as well var pi = GetParameters (); locals = new LocalVariable [pi.Length + li.names.Length]; for (int i = 0; i < pi.Length; ++i) locals [i] = new LocalVariable (vm, this, i, pi[i].ParameterType.Id, pi[i].Name, -1, -1, true); for (int i = 0; i < li.names.Length; ++i) locals [i + pi.Length] = new LocalVariable (vm, this, i, li.types [i], li.names [i], li.live_range_start [i], li.live_range_end [i], false); if (vm.Version.AtLeast (2, 43)) { scopes = new LocalScope [li.scopes_start.Length]; for (int i = 0; i < scopes.Length; ++i) scopes [i] = new LocalScope (vm, this, li.scopes_start [i], li.scopes_end [i]); } } return locals; } public LocalVariable GetLocal (string name) { if (name == null) throw new ArgumentNullException ("name"); GetLocals (); LocalVariable res = null; for (int i = 0; i < locals.Length; ++i) { if (locals [i].Name == name) { if (res != null) throw new AmbiguousMatchException ("More that one local has the name '" + name + "'."); res = locals [i]; } } return res; } public MethodBodyMirror GetMethodBody () { if (body == null) { MethodBodyInfo info = vm.conn.Method_GetBody (id); body = new MethodBodyMirror (vm, this, info); } return body; } public MethodMirror GetGenericMethodDefinition () { vm.CheckProtocolVersion (2, 12); if (gmd == null) { if (info.gmd == 0) throw new InvalidOperationException (); gmd = vm.GetMethod (info.gmd); } return gmd; } // Since protocol version 2.15 public TypeMirror[] GetGenericArguments () { vm.CheckProtocolVersion (2, 15); if (type_args == null) type_args = vm.GetTypes (GetInfo ().type_args); return type_args; } // Since protocol version 2.24 public MethodMirror MakeGenericMethod (TypeMirror[] args) { if (args == null) throw new ArgumentNullException ("args"); foreach (var a in args) if (a == null) throw new ArgumentNullException ("args"); if (!IsGenericMethodDefinition) throw new InvalidOperationException ("not a generic method definition"); if (GetGenericArguments ().Length != args.Length) throw new ArgumentException ("Incorrect length"); vm.CheckProtocolVersion (2, 24); long id = -1; try { id = vm.conn.Method_MakeGenericMethod (Id, args.Select (t => t.Id).ToArray ()); } catch (CommandException) { throw new InvalidOperationException (); } return vm.GetMethod (id); } public IList<int> ILOffsets { get { if (debug_info == null) debug_info = vm.conn.Method_GetDebugInfo (id); return Array.AsReadOnly (debug_info.il_offsets); } } public IList<int> LineNumbers { get { if (debug_info == null) debug_info = vm.conn.Method_GetDebugInfo (id); return Array.AsReadOnly (debug_info.line_numbers); } } public string SourceFile { get { if (debug_info == null) debug_info = vm.conn.Method_GetDebugInfo (id); return debug_info.source_files.Length > 0 ? debug_info.source_files [0].source_file : null; } } public IList<Location> Locations { get { if (locations == null) { var il_offsets = ILOffsets; var line_numbers = LineNumbers; IList<Location> res = new Location [ILOffsets.Count]; for (int i = 0; i < il_offsets.Count; ++i) res [i] = new Location (vm, this, -1, il_offsets [i], debug_info.source_files [i].source_file, line_numbers [i], debug_info.column_numbers [i], debug_info.end_line_numbers [i], debug_info.end_column_numbers [i], debug_info.source_files [i].hash); locations = res; } return locations; } } internal int il_offset_to_line_number (int il_offset, out string src_file, out byte[] src_hash, out int column_number, out int end_line_number, out int end_column_number) { if (debug_info == null) debug_info = vm.conn.Method_GetDebugInfo (id); // FIXME: Optimize this src_file = null; src_hash = null; column_number = 0; end_line_number = -1; end_column_number = -1; for (int i = debug_info.il_offsets.Length - 1; i >= 0; --i) { if (debug_info.il_offsets [i] <= il_offset) { src_file = debug_info.source_files [i].source_file; src_hash = debug_info.source_files [i].hash; column_number = debug_info.column_numbers [i]; end_line_number = debug_info.end_line_numbers [i]; end_column_number = debug_info.end_column_numbers [i]; return debug_info.line_numbers [i]; } } return -1; } public Location LocationAtILOffset (int il_offset) { IList<Location> locs = Locations; // FIXME: Optimize this for (int i = locs.Count - 1; i >= 0; --i) { if (locs [i].ILOffset <= il_offset) return locs [i]; } return null; } #if ENABLE_CECIL public C.MethodDefinition Metadata { get { if (meta == null) meta = (C.MethodDefinition)DeclaringType.Assembly.Metadata.MainModule.LookupToken (MetadataToken); return meta; } } #endif // // Evaluate the method on the client using an IL interpreter. // Only supports a subset of IL instructions. Doesn't change // debuggee state. // Returns the result of the evaluation, or null for methods // which return void. // Throws a NotSupportedException if the method body contains // unsupported IL instructions, or if evaluating the method // would change debuggee state. // public Value Evaluate (Value this_val, Value[] args) { var interp = new ILInterpreter (this); return interp.Evaluate (this_val, args); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // This RegexRunner class is a base class for compiled regex code. // Implementation notes: // It provides the driver code that call's the subclass's Go() // method for either scanning or direct execution. // // It also maintains memory allocation for the backtracking stack, // the grouping stack and the longjump crawlstack, and provides // methods to push new subpattern match results into (or remove // backtracked results from) the Match instance. using System.Diagnostics; using System.Globalization; namespace System.Text.RegularExpressions { public abstract class RegexRunner { protected internal int runtextbeg; // beginning of text to search protected internal int runtextend; // end of text to search protected internal int runtextstart; // starting point for search protected internal String runtext; // text to search protected internal int runtextpos; // current position in text protected internal int[] runtrack; // The backtracking stack. Opcodes use this to store data regarding protected internal int runtrackpos; // what they have matched and where to backtrack to. Each "frame" on // the stack takes the form of [CodePosition Data1 Data2...], where // CodePosition is the position of the current opcode and // the data values are all optional. The CodePosition can be negative, and // these values (also called "back2") are used by the BranchMark family of opcodes // to indicate whether they are backtracking after a successful or failed // match. // When we backtrack, we pop the CodePosition off the stack, set the current // instruction pointer to that code position, and mark the opcode // with a backtracking flag ("Back"). Each opcode then knows how to // handle its own data. protected internal int[] runstack; // This stack is used to track text positions across different opcodes. protected internal int runstackpos; // For example, in /(a*b)+/, the parentheses result in a SetMark/CaptureMark // pair. SetMark records the text position before we match a*b. Then // CaptureMark uses that position to figure out where the capture starts. // Opcodes which push onto this stack are always paired with other opcodes // which will pop the value from it later. A successful match should mean // that this stack is empty. protected internal int[] runcrawl; // The crawl stack is used to keep track of captures. Every time a group protected internal int runcrawlpos; // has a capture, we push its group number onto the runcrawl stack. In // the case of a balanced match, we push BOTH groups onto the stack. protected internal int runtrackcount; // count of states that may do backtracking protected internal Match runmatch; // result object protected internal Regex runregex; // regex object private Int32 _timeout; // timeout in milliseconds (needed for actual) private bool _ignoreTimeout; private Int32 _timeoutOccursAt; // We have determined this value in a series of experiments where x86 retail // builds (ono-lab-optimized) were run on different pattern/input pairs. Larger values // of TimeoutCheckFrequency did not tend to increase performance; smaller values // of TimeoutCheckFrequency tended to slow down the execution. private const int TimeoutCheckFrequency = 1000; private int _timeoutChecksToSkip; protected internal RegexRunner() { } /// <summary> /// Scans the string to find the first match. Uses the Match object /// both to feed text in and as a place to store matches that come out. /// /// All the action is in the abstract Go() method defined by subclasses. Our /// responsibility is to load up the class members (as done here) before /// calling Go. /// /// The optimizer can compute a set of candidate starting characters, /// and we could use a separate method Skip() that will quickly scan past /// any characters that we know can't match. /// </summary> protected internal Match Scan(Regex regex, String text, int textbeg, int textend, int textstart, int prevlen, bool quick) { return Scan(regex, text, textbeg, textend, textstart, prevlen, quick, regex.MatchTimeout); } protected internal Match Scan(Regex regex, String text, int textbeg, int textend, int textstart, int prevlen, bool quick, TimeSpan timeout) { int bump; int stoppos; bool initted = false; // We need to re-validate timeout here because Scan is historically protected and // thus there is a possibility it is called from user code: Regex.ValidateMatchTimeout(timeout); _ignoreTimeout = (Regex.InfiniteMatchTimeout == timeout); _timeout = _ignoreTimeout ? (Int32)Regex.InfiniteMatchTimeout.TotalMilliseconds : (Int32)(timeout.TotalMilliseconds + 0.5); // Round runregex = regex; runtext = text; runtextbeg = textbeg; runtextend = textend; runtextstart = textstart; bump = runregex.RightToLeft ? -1 : 1; stoppos = runregex.RightToLeft ? runtextbeg : runtextend; runtextpos = textstart; // If previous match was empty or failed, advance by one before matching if (prevlen == 0) { if (runtextpos == stoppos) return Match.Empty; runtextpos += bump; } StartTimeoutWatch(); for (; ;) { #if DEBUG if (runregex.Debug) { Debug.WriteLine(""); Debug.WriteLine("Search range: from " + runtextbeg.ToString(CultureInfo.InvariantCulture) + " to " + runtextend.ToString(CultureInfo.InvariantCulture)); Debug.WriteLine("Firstchar search starting at " + runtextpos.ToString(CultureInfo.InvariantCulture) + " stopping at " + stoppos.ToString(CultureInfo.InvariantCulture)); } #endif if (FindFirstChar()) { CheckTimeout(); if (!initted) { InitMatch(); initted = true; } #if DEBUG if (runregex.Debug) { Debug.WriteLine("Executing engine starting at " + runtextpos.ToString(CultureInfo.InvariantCulture)); Debug.WriteLine(""); } #endif Go(); if (runmatch._matchcount[0] > 0) { // We'll return a match even if it touches a previous empty match return TidyMatch(quick); } // reset state for another go runtrackpos = runtrack.Length; runstackpos = runstack.Length; runcrawlpos = runcrawl.Length; } // failure! if (runtextpos == stoppos) { TidyMatch(true); return Match.Empty; } // Recognize leading []* and various anchors, and bump on failure accordingly // Bump by one and start again runtextpos += bump; } // We never get here } private void StartTimeoutWatch() { if (_ignoreTimeout) return; _timeoutChecksToSkip = TimeoutCheckFrequency; // We are using Environment.TickCount and not Timewatch for performance reasons. // Environment.TickCount is an int that cycles. We intentionally let timeoutOccursAt // overflow it will still stay ahead of Environment.TickCount for comparisons made // in DoCheckTimeout(): unchecked { _timeoutOccursAt = Environment.TickCount + _timeout; } } protected void CheckTimeout() { if (_ignoreTimeout) return; if (--_timeoutChecksToSkip != 0) return; _timeoutChecksToSkip = TimeoutCheckFrequency; DoCheckTimeout(); } private void DoCheckTimeout() { // Note that both, Environment.TickCount and timeoutOccursAt are ints and can overflow and become negative. // See the comment in StartTimeoutWatch(). int currentMillis = Environment.TickCount; if (currentMillis < _timeoutOccursAt) return; if (0 > _timeoutOccursAt && 0 < currentMillis) return; #if DEBUG if (runregex.Debug) { Debug.WriteLine(""); Debug.WriteLine("RegEx match timeout occurred!"); Debug.WriteLine("Specified timeout: " + TimeSpan.FromMilliseconds(_timeout).ToString()); Debug.WriteLine("Timeout check frequency: " + TimeoutCheckFrequency); Debug.WriteLine("Search pattern: " + runregex.pattern); Debug.WriteLine("Input: " + runtext); Debug.WriteLine("About to throw RegexMatchTimeoutException."); } #endif throw new RegexMatchTimeoutException(runtext, runregex.pattern, TimeSpan.FromMilliseconds(_timeout)); } /// <summary> /// The responsibility of Go() is to run the regular expression at /// runtextpos and call Capture() on all the captured subexpressions, /// then to leave runtextpos at the ending position. It should leave /// runtextpos where it started if there was no match. /// </summary> protected abstract void Go(); /// <summary> /// The responsibility of FindFirstChar() is to advance runtextpos /// until it is at the next position which is a candidate for the /// beginning of a successful match. /// </summary> protected abstract bool FindFirstChar(); /// <summary> /// InitTrackCount must initialize the runtrackcount field; this is /// used to know how large the initial runtrack and runstack arrays /// must be. /// </summary> protected abstract void InitTrackCount(); /// <summary> /// Initializes all the data members that are used by Go() /// </summary> private void InitMatch() { // Use a hashtable'ed Match object if the capture numbers are sparse if (runmatch == null) { if (runregex.caps != null) runmatch = new MatchSparse(runregex, runregex.caps, runregex.capsize, runtext, runtextbeg, runtextend - runtextbeg, runtextstart); else runmatch = new Match(runregex, runregex.capsize, runtext, runtextbeg, runtextend - runtextbeg, runtextstart); } else { runmatch.Reset(runregex, runtext, runtextbeg, runtextend, runtextstart); } // note we test runcrawl, because it is the last one to be allocated // If there is an alloc failure in the middle of the three allocations, // we may still return to reuse this instance, and we want to behave // as if the allocations didn't occur. (we used to test _trackcount != 0) if (runcrawl != null) { runtrackpos = runtrack.Length; runstackpos = runstack.Length; runcrawlpos = runcrawl.Length; return; } InitTrackCount(); int tracksize = runtrackcount * 8; int stacksize = runtrackcount * 8; if (tracksize < 32) tracksize = 32; if (stacksize < 16) stacksize = 16; runtrack = new int[tracksize]; runtrackpos = tracksize; runstack = new int[stacksize]; runstackpos = stacksize; runcrawl = new int[32]; runcrawlpos = 32; } /// <summary> /// Put match in its canonical form before returning it. /// </summary> private Match TidyMatch(bool quick) { if (!quick) { Match match = runmatch; runmatch = null; match.Tidy(runtextpos); return match; } else { // in quick mode, a successful match returns null, and // the allocated match object is left in the cache return null; } } /// <summary> /// Called by the implementation of Go() to increase the size of storage /// </summary> protected void EnsureStorage() { if (runstackpos < runtrackcount * 4) DoubleStack(); if (runtrackpos < runtrackcount * 4) DoubleTrack(); } /// <summary> /// Called by the implementation of Go() to decide whether the pos /// at the specified index is a boundary or not. It's just not worth /// emitting inline code for this logic. /// </summary> protected bool IsBoundary(int index, int startpos, int endpos) { return (index > startpos && RegexCharClass.IsWordChar(runtext[index - 1])) != (index < endpos && RegexCharClass.IsWordChar(runtext[index])); } protected bool IsECMABoundary(int index, int startpos, int endpos) { return (index > startpos && RegexCharClass.IsECMAWordChar(runtext[index - 1])) != (index < endpos && RegexCharClass.IsECMAWordChar(runtext[index])); } protected static bool CharInSet(char ch, String set, String category) { string charClass = RegexCharClass.ConvertOldStringsToClass(set, category); return RegexCharClass.CharInClass(ch, charClass); } protected static bool CharInClass(char ch, String charClass) { return RegexCharClass.CharInClass(ch, charClass); } /// <summary> /// Called by the implementation of Go() to increase the size of the /// backtracking stack. /// </summary> protected void DoubleTrack() { int[] newtrack; newtrack = new int[runtrack.Length * 2]; System.Array.Copy(runtrack, 0, newtrack, runtrack.Length, runtrack.Length); runtrackpos += runtrack.Length; runtrack = newtrack; } /// <summary> /// Called by the implementation of Go() to increase the size of the /// grouping stack. /// </summary> protected void DoubleStack() { int[] newstack; newstack = new int[runstack.Length * 2]; System.Array.Copy(runstack, 0, newstack, runstack.Length, runstack.Length); runstackpos += runstack.Length; runstack = newstack; } /// <summary> /// Increases the size of the longjump unrolling stack. /// </summary> protected void DoubleCrawl() { int[] newcrawl; newcrawl = new int[runcrawl.Length * 2]; System.Array.Copy(runcrawl, 0, newcrawl, runcrawl.Length, runcrawl.Length); runcrawlpos += runcrawl.Length; runcrawl = newcrawl; } /// <summary> /// Save a number on the longjump unrolling stack /// </summary> protected void Crawl(int i) { if (runcrawlpos == 0) DoubleCrawl(); runcrawl[--runcrawlpos] = i; } /// <summary> /// Remove a number from the longjump unrolling stack /// </summary> protected int Popcrawl() { return runcrawl[runcrawlpos++]; } /// <summary> /// Get the height of the stack /// </summary> protected int Crawlpos() { return runcrawl.Length - runcrawlpos; } /// <summary> /// Called by Go() to capture a subexpression. Note that the /// capnum used here has already been mapped to a non-sparse /// index (by the code generator RegexWriter). /// </summary> protected void Capture(int capnum, int start, int end) { if (end < start) { int T; T = end; end = start; start = T; } Crawl(capnum); runmatch.AddMatch(capnum, start, end - start); } /// <summary> /// Called by Go() to capture a subexpression. Note that the /// capnum used here has already been mapped to a non-sparse /// index (by the code generator RegexWriter). /// </summary> protected void TransferCapture(int capnum, int uncapnum, int start, int end) { int start2; int end2; // these are the two intervals that are cancelling each other if (end < start) { int T; T = end; end = start; start = T; } start2 = MatchIndex(uncapnum); end2 = start2 + MatchLength(uncapnum); // The new capture gets the innermost defined interval if (start >= end2) { end = start; start = end2; } else if (end <= start2) { start = start2; } else { if (end > end2) end = end2; if (start2 > start) start = start2; } Crawl(uncapnum); runmatch.BalanceMatch(uncapnum); if (capnum != -1) { Crawl(capnum); runmatch.AddMatch(capnum, start, end - start); } } /* * Called by Go() to revert the last capture */ protected void Uncapture() { int capnum = Popcrawl(); runmatch.RemoveMatch(capnum); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected bool IsMatched(int cap) { return runmatch.IsMatched(cap); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected int MatchIndex(int cap) { return runmatch.MatchIndex(cap); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected int MatchLength(int cap) { return runmatch.MatchLength(cap); } #if DEBUG /// <summary> /// Dump the current state /// </summary> internal virtual void DumpState() { Debug.WriteLine("Text: " + TextposDescription()); Debug.WriteLine("Track: " + StackDescription(runtrack, runtrackpos)); Debug.WriteLine("Stack: " + StackDescription(runstack, runstackpos)); } internal static String StackDescription(int[] a, int index) { var sb = new StringBuilder(); sb.Append(a.Length - index); sb.Append('/'); sb.Append(a.Length); if (sb.Length < 8) sb.Append(' ', 8 - sb.Length); sb.Append('('); for (int i = index; i < a.Length; i++) { if (i > index) sb.Append(' '); sb.Append(a[i]); } sb.Append(')'); return sb.ToString(); } internal virtual String TextposDescription() { var sb = new StringBuilder(); int remaining; sb.Append(runtextpos); if (sb.Length < 8) sb.Append(' ', 8 - sb.Length); if (runtextpos > runtextbeg) sb.Append(RegexCharClass.CharDescription(runtext[runtextpos - 1])); else sb.Append('^'); sb.Append('>'); remaining = runtextend - runtextpos; for (int i = runtextpos; i < runtextend; i++) { sb.Append(RegexCharClass.CharDescription(runtext[i])); } if (sb.Length >= 64) { sb.Length = 61; sb.Append("..."); } else { sb.Append('$'); } return sb.ToString(); } #endif } }
#if !XAMARIN && !WINDOWS_UWP //----------------------------------------------------------------------- // <copyright file="CslaDataProvider.cs" company="Marimer LLC"> // Copyright (c) Marimer LLC. All rights reserved. // Website: https://cslanet.com // </copyright> // <summary>Wraps and creates a CSLA .NET-style object </summary> //----------------------------------------------------------------------- using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Windows.Data; using System.Reflection; using Csla.Reflection; using Csla.Properties; namespace Csla.Xaml { /// <summary> /// Wraps and creates a CSLA .NET-style object /// that you can use as a binding source. /// </summary> public class CslaDataProvider : DataSourceProvider { /// <summary> /// Creates an instance of the object. /// </summary> public CslaDataProvider() { _commandManager = new CslaDataProviderCommandManager(this); _factoryParameters = new ObservableCollection<object>(); _factoryParameters.CollectionChanged += new System.Collections.Specialized.NotifyCollectionChangedEventHandler(_factoryParameters_CollectionChanged); } /// <summary> /// Event raised when the object has been saved. /// </summary> public event EventHandler<Csla.Core.SavedEventArgs> Saved; /// <summary> /// Raise the Saved event when the object has been saved. /// </summary> /// <param name="newObject">New object reference as a result /// of the save operation.</param> /// <param name="error">Reference to an exception object if /// an error occurred.</param> /// <param name="userState">Reference to a userstate object.</param> protected virtual void OnSaved(object newObject, Exception error, object userState) { if (Saved != null) Saved(this, new Csla.Core.SavedEventArgs(newObject, error, userState)); } void _factoryParameters_CollectionChanged( object sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e) { BeginQuery(); } #region Properties private Type _objectType = null; private bool _manageLifetime; private string _factoryMethod = string.Empty; private ObservableCollection<object> _factoryParameters; private bool _isAsynchronous; private CslaDataProviderCommandManager _commandManager; private bool _isBusy; /// <summary> /// Gets an object that can be used to execute /// Save and Undo commands on this CslaDataProvider /// through XAML command bindings. /// </summary> public CslaDataProviderCommandManager CommandManager { get { return _commandManager; } } /// <summary> /// Gets or sets the type of object /// to create an instance of. /// </summary> public Type ObjectType { get { return _objectType; } set { _objectType = value; OnPropertyChanged(new PropertyChangedEventArgs("ObjectType")); } } /// <summary> /// Gets or sets a value indicating whether the /// data control should manage the lifetime of /// the business object, including using n-level /// undo. /// </summary> public bool ManageObjectLifetime { get { return _manageLifetime; } set { _manageLifetime = value; OnPropertyChanged(new PropertyChangedEventArgs("ManageObjectLifetime")); } } private object _dataChangedHandler; /// <summary> /// Gets or sets a reference to an object that /// will handle the DataChanged event raised /// by this data provider. /// </summary> /// <remarks> /// This property is designed to /// reference an IErrorDialog control. /// </remarks> public object DataChangedHandler { get { return _dataChangedHandler; } set { _dataChangedHandler = value; var dialog = value as IErrorDialog; if (dialog != null) dialog.Register(this); OnPropertyChanged(new PropertyChangedEventArgs("DataChangedHandler")); } } /// <summary> /// Gets or sets the name of the static /// (Shared in Visual Basic) factory method /// that should be called to create the /// object instance. /// </summary> public string FactoryMethod { get { return _factoryMethod; } set { _factoryMethod = value; OnPropertyChanged(new PropertyChangedEventArgs("FactoryMethod")); } } /// <summary> /// Get the list of parameters to pass /// to the factory method. /// </summary> public IList FactoryParameters { get { return _factoryParameters; } } /// <summary> /// Gets or sets a value that indicates /// whether to perform object creation in /// a worker thread or in the active context. /// </summary> public bool IsAsynchronous { get { return _isAsynchronous; } set { _isAsynchronous = value; } } /// <summary> /// Gets or sets a reference to the data /// object. /// </summary> public object ObjectInstance { get { return Data; } set { OnQueryFinished(value, null, null, null); OnPropertyChanged(new PropertyChangedEventArgs("ObjectInstance")); } } /// <summary> /// Gets a value indicating if this object is busy. /// </summary> public bool IsBusy { get { return _isBusy; } protected set { _isBusy = value; OnPropertyChanged(new PropertyChangedEventArgs("IsBusy")); } } /// <summary> /// Triggers WPF data binding to rebind to the /// data object. /// </summary> public void Rebind() { object tmp = ObjectInstance; ObjectInstance = null; ObjectInstance = tmp; } #endregion #region Query private bool _firstRun = true; private bool _init = false; private bool _endInitCompete = false; private bool _endInitError = false; /// <summary> /// Indicates that the control is about to initialize. /// </summary> protected override void BeginInit() { _init = true; base.BeginInit(); } /// <summary> /// Indicates that the control has initialized. /// </summary> protected override void EndInit() { _init = false; base.EndInit(); _endInitCompete = true; } /// <summary> /// Overridden. Starts to create the requested object, /// either immediately or on a background thread, /// based on the value of the IsAsynchronous property. /// </summary> protected override void BeginQuery() { if (_init) return; if (_firstRun) { _firstRun = false; if (!IsInitialLoadEnabled) return; } if (_endInitError) { // this handles a case where the WPF form initilizer // invokes the data provider twice when an exception // occurs - we really don't want to try the query twice // or report the error twice _endInitError = false; OnQueryFinished(null); return; } if (this.IsRefreshDeferred) return; QueryRequest request = new QueryRequest(); request.ObjectType = _objectType; request.FactoryMethod = _factoryMethod; request.FactoryParameters = _factoryParameters; request.ManageObjectLifetime = _manageLifetime; IsBusy = true; if (IsAsynchronous) System.Threading.ThreadPool.QueueUserWorkItem(DoQuery, request); else DoQuery(request); } private void DoQuery(object state) { QueryRequest request = (QueryRequest)state; object result = null; Exception exceptionResult = null; object[] parameters = new List<object>(request.FactoryParameters).ToArray(); try { // get factory method info BindingFlags flags = BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy; System.Reflection.MethodInfo factory = request.ObjectType.GetMethod( request.FactoryMethod, flags, null, MethodCaller.GetParameterTypes(parameters), null); if (factory == null) { // strongly typed factory couldn't be found // so find one with the correct number of // parameters int parameterCount = parameters.Length; System.Reflection.MethodInfo[] methods = request.ObjectType.GetMethods(flags); foreach (System.Reflection.MethodInfo method in methods) if (method.Name == request.FactoryMethod && method.GetParameters().Length == parameterCount) { factory = method; break; } } if (factory == null) { // no matching factory could be found // so throw exception throw new InvalidOperationException( string.Format(Resources.NoSuchFactoryMethod, request.FactoryMethod)); } // invoke factory method try { result = factory.Invoke(null, parameters); } catch (Csla.DataPortalException ex) { exceptionResult = ex.BusinessException; } catch (System.Reflection.TargetInvocationException ex) { if (ex.InnerException != null) { exceptionResult = ex.InnerException; var dpe = exceptionResult as Csla.DataPortalException; if (dpe != null && dpe.BusinessException != null) exceptionResult = dpe.BusinessException; } else exceptionResult = ex; } catch (Exception ex) { exceptionResult = ex; } } catch (Exception ex) { exceptionResult = ex; } if (request.ManageObjectLifetime && result != null) { Csla.Core.ISupportUndo undo = result as Csla.Core.ISupportUndo; if (undo != null) undo.BeginEdit(); } //if (!System.Windows.Application.Current.Dispatcher.CheckAccess()) // System.Windows.Application.Current.Dispatcher.Invoke( // new Action(() => { IsBusy = false; }), // new object[] { }); if (!_endInitCompete && exceptionResult != null) _endInitError = true; // return result to base class OnQueryFinished(result, exceptionResult, (o) => { IsBusy = false; return null; }, null); } #region QueryRequest Class private class QueryRequest { private Type _objectType; public Type ObjectType { get { return _objectType; } set { _objectType = value; } } private string _factoryMethod; public string FactoryMethod { get { return _factoryMethod; } set { _factoryMethod = value; } } private ObservableCollection<object> _factoryParameters; public ObservableCollection<object> FactoryParameters { get { return _factoryParameters; } set { _factoryParameters = new ObservableCollection<object>(new List<object>(value)); } } private bool _manageLifetime; public bool ManageObjectLifetime { get { return _manageLifetime; } set { _manageLifetime = value; } } } #endregion #endregion #region Cancel/Update/New/Remove /// <summary> /// Cancels changes to the business object, returning /// it to its previous state. /// </summary> /// <remarks> /// This metod does nothing unless ManageLifetime is /// set to true and the object supports n-level undo. /// </remarks> public void Cancel() { Csla.Core.ISupportUndo undo = this.Data as Csla.Core.ISupportUndo; if (undo != null && _manageLifetime) { IsBusy = true; undo.CancelEdit(); undo.BeginEdit(); IsBusy = false; } } /// <summary> /// Accepts changes to the business object, and /// commits them by calling the object's Save() /// method. /// </summary> /// <remarks> /// <para> /// This method does nothing unless the object /// implements Csla.Core.ISavable. /// </para><para> /// If the object implements IClonable, it /// will be cloned, and the clone will be /// saved. /// </para><para> /// If the object supports n-level undo and /// ManageLifetime is true, then this method /// will automatically call ApplyEdit() and /// BeginEdit() appropriately. /// </para> /// </remarks> public void Save() { // only do something if the object implements // ISavable Csla.Core.ISavable savable = this.Data as Csla.Core.ISavable; if (savable != null) { object result = savable; Exception exceptionResult = null; try { IsBusy = true; // clone the object if possible ICloneable clonable = savable as ICloneable; if (clonable != null) savable = (Csla.Core.ISavable)clonable.Clone(); // apply edits in memory Csla.Core.ISupportUndo undo = savable as Csla.Core.ISupportUndo; if (undo != null && _manageLifetime) undo.ApplyEdit(); // save the clone result = savable.Save(); if (!ReferenceEquals(savable, this.Data) && !Csla.ApplicationContext.AutoCloneOnUpdate) { // raise Saved event from original object Core.ISavable original = this.Data as Core.ISavable; if (original != null) original.SaveComplete(result); } // start editing the resulting object undo = result as Csla.Core.ISupportUndo; if (undo != null && _manageLifetime) undo.BeginEdit(); } catch (Exception ex) { exceptionResult = ex; } // clear previous object OnQueryFinished(null, exceptionResult, null, null); // return result to base class OnQueryFinished(result, null, null, null); IsBusy = false; OnSaved(result, exceptionResult, null); } } /// <summary> /// Adds a new item to the object if the object /// implements IBindingList and AllowNew is true. /// </summary> public object AddNew() { // only do something if the object implements // IBindingList IBindingList list = this.Data as IBindingList; if (list != null && list.AllowNew) return list.AddNew(); else return null; } /// <summary> /// Removes an item from the list if the object /// implements IBindingList and AllowRemove is true. /// </summary> /// <param name="sender">Object invoking this method.</param> /// <param name="e"> /// ExecuteEventArgs, where MethodParameter contains /// the item to be removed from the list. /// </param> public void RemoveItem(object sender, ExecuteEventArgs e) { var item = e.MethodParameter; // only do something if the object implements // IBindingList IBindingList list; Csla.Core.BusinessBase bb = item as Csla.Core.BusinessBase; if (bb != null) list = bb.Parent as IBindingList; else list = this.Data as IBindingList; if (list != null && list.AllowRemove) list.Remove(item); } #endregion } } #endif
using System; using System.Collections.Generic; using DarkMultiPlayerCommon; using MessageStream2; namespace DarkMultiPlayer { public delegate void AcquireEvent(string playerName, string lockName, bool lockResult); public delegate void ReleaseEvent(string playerName, string lockName); public class LockSystem { private Dictionary<string, string> serverLocks = new Dictionary<string, string>(); private List<AcquireEvent> lockAcquireEvents = new List<AcquireEvent>(); private List<ReleaseEvent> lockReleaseEvents = new List<ReleaseEvent>(); private Dictionary<string, double> lastAcquireTime = new Dictionary<string, double>(); private object lockObject = new object(); //Services private Settings dmpSettings; private NetworkWorker networkWorker; public LockSystem(Settings dmpSettings, NetworkWorker networkWorker) { this.dmpSettings = dmpSettings; this.networkWorker = networkWorker; } public void ThrottledAcquireLock(string lockname) { if (lastAcquireTime.ContainsKey(lockname) ? ((Client.realtimeSinceStartup - lastAcquireTime[lockname]) > 5f) : true) { lastAcquireTime[lockname] = Client.realtimeSinceStartup; AcquireLock(lockname, false); } } public void AcquireLock(string lockName, bool force) { lock (lockObject) { using (MessageWriter mw = new MessageWriter()) { mw.Write<int>((int)LockMessageType.ACQUIRE); mw.Write<string>(dmpSettings.playerName); mw.Write<string>(lockName); mw.Write<bool>(force); networkWorker.SendLockSystemMessage(mw.GetMessageBytes()); } } } public void ReleaseLock(string lockName) { lock (lockObject) { using (MessageWriter mw = new MessageWriter()) { mw.Write<int>((int)LockMessageType.RELEASE); mw.Write<string>(dmpSettings.playerName); mw.Write<string>(lockName); networkWorker.SendLockSystemMessage(mw.GetMessageBytes()); } if (LockIsOurs(lockName)) { serverLocks.Remove(lockName); } } } public void ReleasePlayerLocks(string playerName) { lock (lockObject) { List<string> removeList = new List<string>(); foreach (KeyValuePair<string, string> kvp in serverLocks) { if (kvp.Value == playerName) { removeList.Add(kvp.Key); } } foreach (string removeValue in removeList) { serverLocks.Remove(removeValue); FireReleaseEvent(playerName, removeValue); } } } public void ReleasePlayerLocksWithPrefix(string playerName, string prefix) { DarkLog.Debug("Releasing lock with prefix " + prefix + " for " + playerName); lock (lockObject) { List<string> removeList = new List<string>(); foreach (KeyValuePair<string, string> kvp in serverLocks) { if (kvp.Key.StartsWith(prefix) && kvp.Value == playerName) { removeList.Add(kvp.Key); } } foreach (string removeValue in removeList) { if (playerName == dmpSettings.playerName) { DarkLog.Debug("Releasing lock " + removeValue); ReleaseLock(removeValue); } else { serverLocks.Remove(removeValue); FireReleaseEvent(playerName, removeValue); } } } } public void HandleLockMessage(byte[] messageData) { lock (lockObject) { using (MessageReader mr = new MessageReader(messageData)) { LockMessageType lockMessageType = (LockMessageType)mr.Read<int>(); switch (lockMessageType) { case LockMessageType.LIST: { //We shouldn't need to clear this as LIST is only sent once, but better safe than sorry. serverLocks.Clear(); string[] lockKeys = mr.Read<string[]>(); string[] lockValues = mr.Read<string[]>(); for (int i = 0; i < lockKeys.Length; i++) { serverLocks.Add(lockKeys[i], lockValues[i]); } } break; case LockMessageType.ACQUIRE: { string playerName = mr.Read<string>(); string lockName = mr.Read<string>(); bool lockResult = mr.Read<bool>(); if (lockResult) { serverLocks[lockName] = playerName; } FireAcquireEvent(playerName, lockName, lockResult); } break; case LockMessageType.RELEASE: { string playerName = mr.Read<string>(); string lockName = mr.Read<string>(); if (serverLocks.ContainsKey(lockName)) { serverLocks.Remove(lockName); } FireReleaseEvent(playerName, lockName); } break; } } } } public void RegisterAcquireHook(AcquireEvent methodObject) { lockAcquireEvents.Add(methodObject); } public void UnregisterAcquireHook(AcquireEvent methodObject) { if (lockAcquireEvents.Contains(methodObject)) { lockAcquireEvents.Remove(methodObject); } } public void RegisterReleaseHook(ReleaseEvent methodObject) { lockReleaseEvents.Add(methodObject); } public void UnregisterReleaseHook(ReleaseEvent methodObject) { if (lockReleaseEvents.Contains(methodObject)) { lockReleaseEvents.Remove(methodObject); } } private void FireAcquireEvent(string playerName, string lockName, bool lockResult) { foreach (AcquireEvent methodObject in lockAcquireEvents) { try { methodObject(playerName, lockName, lockResult); } catch (Exception e) { DarkLog.Debug("Error thrown in acquire lock event, exception " + e); } } } private void FireReleaseEvent(string playerName, string lockName) { foreach (ReleaseEvent methodObject in lockReleaseEvents) { try { methodObject(playerName, lockName); } catch (Exception e) { DarkLog.Debug("Error thrown in release lock event, exception " + e); } } } public bool LockIsOurs(string lockName) { lock (lockObject) { if (serverLocks.ContainsKey(lockName)) { if (serverLocks[lockName] == dmpSettings.playerName) { return true; } } return false; } } public bool LockExists(string lockName) { lock (lockObject) { return serverLocks.ContainsKey(lockName); } } public string LockOwner(string lockName) { lock (lockObject) { if (serverLocks.ContainsKey(lockName)) { return serverLocks[lockName]; } return ""; } } } }
using System; using Windows.UI; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using RadialMenuControl.Components; using Windows.Foundation; using Windows.UI.Xaml.Media.Animation; namespace RadialMenuControl.UserControl { public sealed partial class PieSlice : Windows.UI.Xaml.Controls.UserControl { // Arcs public OuterPieSlicePath OuterArcElement => OuterPieSlicePath; // Inner Arc Colors public static readonly DependencyProperty InnerNormalColorProperty = DependencyProperty.Register("InnerNormalColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty InnerHoverColorProperty = DependencyProperty.Register("InnerHoverColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty InnerTappedColorProperty = DependencyProperty.Register("InnerTappedColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty InnerReleasedColorProperty = DependencyProperty.Register("InnerReleasedColor", typeof(Color), typeof(PieSlice), null); /// <summary> /// Hover color for the inner portion of the PieSlice /// </summary> public Color InnerHoverColor { get { return (Color)GetValue(InnerHoverColorProperty); } set { SetValue(InnerHoverColorProperty, value); } } /// <summary> /// Normal color for the inner portion of the PieSlice /// </summary> public Color InnerNormalColor { get { return (Color)GetValue(InnerNormalColorProperty); } set { SetValue(InnerNormalColorProperty, value); } } /// <summary> /// Tapped color for the inner portion of the PieSlice /// </summary> public Color InnerTappedColor { get { return (Color)GetValue(InnerTappedColorProperty); } set { SetValue(InnerTappedColorProperty, value); } } /// <summary> /// Released color for the inner portion of the PieSlice /// </summary> public Color InnerReleasedColor { get { return (Color)GetValue(InnerReleasedColorProperty); } set { SetValue(InnerReleasedColorProperty, value); } } // Indication Arc public static readonly DependencyProperty IndicationArcStartPointProperty = DependencyProperty.Register("InidicationArcStartPoint", typeof(Point), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcEndPointProperty = DependencyProperty.Register("InidicationArcEndPoint", typeof(Point), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcSizeProperty = DependencyProperty.Register("InidicationArcSize", typeof(Size), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcIsLargeArcProperty = DependencyProperty.Register("IndicationArcIsLargeArc", typeof(bool), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcColorProperty = DependencyProperty.Register("IndicationArcColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcSweepAngleProperty = DependencyProperty.Register("IndicationArcSweepAngle", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcStrokeThicknessProperty = DependencyProperty.Register("IndicationArcStrokeThickness", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty IndicationArcDistanceFromEdgeProperty = DependencyProperty.Register("IndicationArcDistanceFromEdge", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty UseIndicationArcsProperty = DependencyProperty.Register("UseIndicationArc", typeof(bool), typeof(PieSlice), null); public Point IndicationArcStartPoint { get { return (Point)GetValue(IndicationArcStartPointProperty); } set { SetValue(IndicationArcStartPointProperty, value); } } public Point IndicationArcEndPoint { get { return (Point)GetValue(IndicationArcEndPointProperty); } set { SetValue(IndicationArcEndPointProperty, value); } } public Size IndicationArcSize { get { return (Size)GetValue(IndicationArcSizeProperty); } set { SetValue(IndicationArcSizeProperty, value); } } public Color IndicationArcColor { get { return (Color)GetValue(IndicationArcColorProperty); } set { SetValue(IndicationArcColorProperty, value); } } public double IndicationArcSweepAngle { get { return (double)GetValue(IndicationArcSweepAngleProperty); } set { SetValue(IndicationArcSweepAngleProperty, value); } } public double IndicationArcStrokeThickness { get { return (double)GetValue(IndicationArcStrokeThicknessProperty); } set { SetValue(IndicationArcStrokeThicknessProperty, value); } } public bool IndicationArcIsLargeArc { get { return (bool)GetValue(IndicationArcIsLargeArcProperty); } set { SetValue(IndicationArcIsLargeArcProperty, value); } } public double IndicationArcDistanceFromEdge { get { return (double)GetValue(IndicationArcDistanceFromEdgeProperty); } set { SetValue(IndicationArcDistanceFromEdgeProperty, value); } } public bool UseIndicationArc { get { return (bool)GetValue(UseIndicationArcsProperty); } set { SetValue(UseIndicationArcsProperty, value); } } // Outer Arc Colors public static readonly DependencyProperty OuterNormalColorProperty = DependencyProperty.Register("OuterNormalColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty OuterHoverColorProperty = DependencyProperty.Register("OuterHoverColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty OuterTappedColorProperty = DependencyProperty.Register("OuterTappedColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty OuterDisabledColorProperty = DependencyProperty.Register("OuterDisabledColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty StrokeColorProperty = DependencyProperty.Register("StrokeColor", typeof(Color), typeof(PieSlice), null); public static readonly DependencyProperty StrokeThicknessProperty = DependencyProperty.Register("StrokeThickness", typeof(double), typeof(PieSlice), null); /// <summary> /// Hover color for the outer portion of the PieSlice /// </summary> public Color OuterHoverColor { get { return (Color)GetValue(OuterHoverColorProperty); } set { SetValue(OuterHoverColorProperty, value); } } /// <summary> /// Normal color for the outer portion of the PieSlice /// </summary> public Color OuterNormalColor { get { return (Color)GetValue(OuterNormalColorProperty); } set { SetValue(OuterNormalColorProperty, value); } } /// <summary> /// Disabled color for the outer portion of the PieSlice /// </summary> public Color OuterDisabledColor { get { return (Color)GetValue(OuterDisabledColorProperty); } set { SetValue(OuterDisabledColorProperty, value); } } /// <summary> /// Tapped color for the outer portion of the PieSlice /// </summary> public Color OuterTappedColor { get { return (Color)GetValue(OuterTappedColorProperty); } set { SetValue(OuterTappedColorProperty, value); } } // Stroke /// <summary> /// Color of the stroke around the PieSLice /// </summary> public Color StrokeColor { get { return (Color)GetValue(StrokeColorProperty); } set { SetValue(StrokeColorProperty, value); } } /// <summary> /// Thickness of the stroke around the PieSlice /// </summary> public double StrokeThickness { get { return (double)GetValue(StrokeThicknessProperty); } set { SetValue(StrokeThicknessProperty, value); } } // Angles & Radius public static readonly DependencyProperty StartAngleProperty = DependencyProperty.Register("StartAngle", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty AngleProperty = DependencyProperty.Register("Angle", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty RadiusProperty = DependencyProperty.Register("Radius", typeof(double), typeof(PieSlice), null); public static readonly DependencyProperty OuterArcThicknessProperty = DependencyProperty.Register("OuterThickness", typeof(double), typeof(PieSlice), null); /// <summary> /// Starting angle of this PieSlice (with 0 being the "north top") /// </summary> public double StartAngle { get { return (double)GetValue(StartAngleProperty); } set { SetValue(StartAngleProperty, value); } } /// <summary> /// Angle (aka size) of this PieSlice /// </summary> public double Angle { get { return (double)GetValue(AngleProperty); } set { SetValue(AngleProperty, value); } } /// <summary> /// Radius of the (invisible) full circle used as drawing basis for this PieSlice /// </summary> public double Radius { get { return (double)GetValue(RadiusProperty); } set { SetValue(RadiusProperty, value); } } /// <summary> /// Thickness of the outer arc /// </summary> public double OuterArcThickness { get { return (double)GetValue(OuterArcThicknessProperty); } set { SetValue(OuterArcThicknessProperty, value); } } // Label public static readonly DependencyProperty IsLabelHiddenProperty = DependencyProperty.Register("IsLabelHidden", typeof(bool), typeof(PieSlice), null); public static readonly DependencyProperty InnerAccessKeyProperty = DependencyProperty.Register("InnerAccessKey", typeof(string), typeof(PieSlice), null); public static readonly DependencyProperty OuterAccessKeyProperty = DependencyProperty.Register("OuterAccessKey", typeof(string), typeof(PieSlice), null); public static readonly DependencyProperty LabelProperty = DependencyProperty.Register("Label", typeof(string), typeof(PieSlice), null); public static readonly DependencyProperty LabelSizeProperty = DependencyProperty.Register("LabelSize", typeof(int), typeof(PieSlice), null); public static readonly DependencyProperty IconProperty = DependencyProperty.Register("Icon", typeof(string), typeof(PieSlice), null); public static readonly DependencyProperty IconForegroundBrushProperty = DependencyProperty.Register("IconForegroundBrush", typeof(Brush), typeof(PieSlice), null); public static readonly DependencyProperty IconFontFamilyProperty = DependencyProperty.Register("IconFontFamily", typeof(FontFamily), typeof(PieSlice), null); public static readonly DependencyProperty IconSizeProperty = DependencyProperty.Register("IconSize", typeof(int), typeof(PieSlice), null); public static readonly DependencyProperty IconImageProperty = DependencyProperty.Register("IconImage", typeof(ImageSource), typeof(PieSlice), null); public static readonly DependencyProperty IconImageSideLengthProperty = DependencyProperty.Register("IconImageSideLength", typeof(ImageSource), typeof(PieSlice), null); public static readonly DependencyProperty CustomValueProperty = DependencyProperty.Register("CustomValue", typeof(string), typeof(PieSlice), null); /// <summary> /// Outer slice path access key /// </summary> public string OuterAccessKey { get { return (string)GetValue(OuterAccessKeyProperty); } set { SetValue(OuterAccessKeyProperty, value); } } /// <summary> /// Inner slice path access key /// </summary> public string InnerAccessKey { get { return (string)GetValue(InnerAccessKeyProperty); } set { SetValue(InnerAccessKeyProperty, value); } } /// <summary> /// Text label /// </summary> public string Label { get { return (string)GetValue(LabelProperty); } set { SetValue(LabelProperty, value); } } /// <summary> /// Font size for the text label /// </summary> public int LabelSize { get { return (int)GetValue(LabelSizeProperty); } set { SetValue(LabelSizeProperty, value); } } /// <summary> /// Should the label be hidden? /// </summary> public bool IsLabelHidden { get { return (bool)GetValue(IsLabelHiddenProperty); } set { SetValue(IsLabelHiddenProperty, value); } } /// <summary> /// Icon (string-based), to be used with icon fonts like Segoe Symbol /// </summary> public string Icon { get { return (string)GetValue(IconProperty); } set { SetValue(IconProperty, value); } } /// <summary> /// Foreground brush for the icon, allowing color change for text-based icons /// </summary> public Brush IconForegroundBrush { get { return (Brush)GetValue(IconForegroundBrushProperty); } set { SetValue(IconForegroundBrushProperty, value); } } /// <summary> /// Font family for the text-based icon /// </summary> public FontFamily IconFontFamily { get { return (FontFamily)GetValue(IconFontFamilyProperty); } set { IconTextElement.FontFamily = value; SetValue(IconFontFamilyProperty, value); } } /// <summary> /// Font size for the text-based icon /// </summary> public int IconSize { get { return (int)GetValue(IconSizeProperty); } set { SetValue(IconSizeProperty, value); } } /// <summary> /// ImageSource for an image icon - if set, the text-based icon will not be displayed /// </summary> public ImageSource IconImage { get { return (ImageSource)GetValue(IconImageProperty); } set { SetValue(IconImageProperty, value); } } /// <summary> /// Length of the image-based icon /// </summary> public double IconImageSideLength { get { return (double)GetValue(IconImageSideLengthProperty); } set { SetValue(IconImageSideLengthProperty, value); } } /// <summary> /// Value for custom button /// </summary> public string CustomValue { get { return (string)GetValue(CustomValueProperty); } set { SetValue(CustomValueProperty, value); OriginalRadialMenuButton.Value = value; } } /// <summary> /// Visibility of the text block - determined by checking whether or not an IconImage is set /// </summary> public Visibility TextBlockVisibility => IconImage == null ? Visibility.Visible : Visibility.Collapsed; public static readonly DependencyProperty OriginalRadialMenuButtonProperty = DependencyProperty.Register("OriginalRadialMenuButton", typeof(RadialMenuButton), typeof(PieSlice), null); /// <summary> /// Reference to the original RadialMenuButton that was used to create this PieSLice /// </summary> public RadialMenuButton OriginalRadialMenuButton; /// <summary> /// Reference to the TextBox control for this PieSLice when the button type is custom /// </summary> private TextBox CustomTextBox; /// <summary> /// Delegate for a ChangeMenuRequest, asking the parent RadialMenu to change the menu to a submenu on a button /// </summary> /// <param name="sender">Sending object</param> /// <param name="submenu">RadialMenu to change to</param> public delegate void ChangeMenuRequestHandler(object sender, MenuBase submenu); public event ChangeMenuRequestHandler ChangeMenuRequestEvent; /// <summary> /// A delegate for the ChangeSelected event, fired whenever a radio or toggle button changes its value /// </summary> /// <param name="sender"></param> /// <param name="slice"></param> public delegate void ChangeSelectedHandler(object sender, PieSlice slice); public event ChangeSelectedHandler ChangeSelectedEvent; // Tooltips private bool _areAccessKeyToolTipsVisible = false; /// <summary> /// Are little popups showing the access keys for the inner and outer arc visible? /// </summary> public bool AreAccessKeyToolTipsVisible { get { return _areAccessKeyToolTipsVisible; } set { _areAccessKeyToolTipsVisible = value; OuterAccessKeyPopup.IsOpen = OriginalRadialMenuButton.HasOuterArcAction && (OuterAccessKey != null) && value; InnerAccessKeyPopup.IsOpen = (InnerAccessKey != null) && value; } } private void OnChangeMenuRequest(object s, MenuBase sm) { ChangeMenuRequestEvent?.Invoke(s, sm); } public PieSlice() { InitializeComponent(); DataContext = this; Loaded += OnLoaded; } private void SetupIndicationArc() { var indicationArcRadius = Radius - OuterArcThickness - IndicationArcDistanceFromEdge; IndicationArc.Size = new Size(indicationArcRadius, indicationArcRadius); double startAngle = (StartAngle + .10 * Angle) * (Math.PI / 180), endAngle = (StartAngle + .90 * Angle) * (Math.PI / 180); double startX = Radius + indicationArcRadius * Math.Sin(startAngle), startY = Radius - indicationArcRadius * Math.Cos(startAngle), endX = Radius + indicationArcRadius * Math.Sin(endAngle), endY = Radius - indicationArcRadius * Math.Cos(endAngle); IndicationArcPathFigure.StartPoint = new Point(startX, startY); IndicationArc.Point = new Point(endX, endY); IndicationArcPath.StrokeThickness = IndicationArcStrokeThickness; IndicationArcPath.Stroke = new SolidColorBrush(IndicationArcColor); // make all arcs initiall invisibile IndicationArcPath.Opacity = 0.0; var appearAnimation = new DoubleAnimation() { From = 0.0, To = 1.0, }; appearAnimation.SetValue(Storyboard.TargetNameProperty, "IndicationArcPath"); appearAnimation.SetValue(Storyboard.TargetPropertyProperty, "Opacity"); InnerReleasedStoryBoard.Stop(); InnerReleasedStoryBoard.Children.Add(appearAnimation); } /// <summary> /// Math and drawing operations for the path elements is handled in this OnLoaded event handler /// </summary> /// <param name="sender"></param> /// <param name="routedEventArgs"></param> private void OnLoaded(object sender, RoutedEventArgs routedEventArgs) { if (UseIndicationArc) { SetupIndicationArc(); } // Setup outer arc OuterPieSlicePath.Radius = Radius; OuterPieSlicePath.StartAngle = StartAngle; OuterPieSlicePath.Angle = Angle; OuterPieSlicePath.Thickness = OuterArcThickness; var middleRadian = (Math.PI / 180) * (StartAngle + (Angle / 2)); if (!OriginalRadialMenuButton.HasOuterArcAction) { OuterPieSlicePath.Fill = new SolidColorBrush(OuterDisabledColor); } else { OuterPieSlicePath.Fill = new SolidColorBrush(OuterNormalColor); OuterPieSlicePath.PointerPressed += outerPieSlicePath_PointerPressed; OuterPieSlicePath.PointerReleased += outerPieSlicePath_PointerReleased; OuterPieSlicePath.PointerEntered += outerPieSlicePath_PointerEntered; OuterPieSlicePath.PointerExited += outerPieSlicePath_PointerExited; // Setup Caret CaretRotateTransform.Angle = (StartAngle + (Angle / 2)); CaretTranslate.X = (Radius-OuterArcThickness/2 + 3) * Math.Sin(middleRadian); CaretTranslate.Y = -(Radius-OuterArcThickness/2 + 3) * Math.Cos(middleRadian); } // Setup inner arc InnerPieSlicePath.Radius = Radius - OuterArcThickness; InnerPieSlicePath.StartAngle = StartAngle; InnerPieSlicePath.Angle = Angle; InnerPieSlicePath.Fill = new SolidColorBrush(InnerNormalColor); // Setup textbox for custom type RadialMenuButton if (OriginalRadialMenuButton.Type == RadialMenuButton.ButtonType.Custom) CreateCustomTextBox(); // Stroke OuterPieSlicePath.StrokeThickness = StrokeThickness; OuterPieSlicePath.Stroke = new SolidColorBrush(StrokeColor); InnerPieSlicePath.StrokeThickness = StrokeThickness; InnerPieSlicePath.Stroke = new SolidColorBrush(StrokeColor); // Setup icon and text IconTranslate.X = ((Radius - OuterArcThickness) / 2 + 20) * Math.Sin(middleRadian); IconTranslate.Y = -((Radius - OuterArcThickness) / 2 + 20) * Math.Cos(middleRadian); // Setup Access Key Popups InnerAccessKeyPopup.HorizontalOffset = Radius; InnerAccessKeyPopup.VerticalOffset = Radius; OuterAccessKeyPopup.HorizontalOffset = Radius; OuterAccessKeyPopup.VerticalOffset = Radius; OuterAccessKeyPopupTranslate.X = (Radius - OuterArcThickness / 2 + 3) * Math.Sin(middleRadian); OuterAccessKeyPopupTranslate.Y = -(Radius - OuterArcThickness / 2 + 3) * Math.Cos(middleRadian); // Go to correct visual state UpdateSliceForToggle(); UpdateSliceForRadio(); } /// <summary> /// Creates a textbox to allow input of custom values for custom type RadialMenuButtons /// </summary> private void CreateCustomTextBox() { CustomTextBox = new TextBox { Name = "CustomTextBox", FontSize = LabelSize, Margin = new Thickness(0, 67, 0, 0), HorizontalAlignment = HorizontalAlignment.Center, VerticalAlignment = VerticalAlignment.Center, BorderThickness = new Thickness(0), TextAlignment = TextAlignment.Center, Background = new SolidColorBrush(Colors.Transparent), AcceptsReturn = false, Style = (Style)this.Resources["TransparentTextBox"] }; CustomTextBox.Padding = new Thickness(0); CustomTextBox.GotFocus += (sender, args) => LabelTextElement.Opacity = 0; CustomTextBox.LostFocus += (sender, args) => { OriginalRadialMenuButton.Value = ((TextBox)sender).Text; LabelTextElement.Opacity = 1; OriginalRadialMenuButton.OnValueChanged(args); }; CustomTextBox.SetBinding(TextBox.TextProperty, new Windows.UI.Xaml.Data.Binding() { Source = this.CustomValue }); TextLabelGrid.Children.Add(CustomTextBox); } /// <summary> /// Programmatically "click" the inner arc in the PieSlice /// </summary> public void ClickInner() { innerPieSlicePath_PointerPressed(this, new RoutedEventArgs() as PointerRoutedEventArgs); innerPieSlicePath_PointerReleased(this, new RoutedEventArgs() as PointerRoutedEventArgs); } /// <summary> /// Programmatically "click" the outer arc in the PieSlice /// </summary> public void ClickOuter() { outerPieSlicePath_PointerPressed(this, new RoutedEventArgs() as PointerRoutedEventArgs); outerPieSlicePath_PointerReleased(this, new RoutedEventArgs() as PointerRoutedEventArgs); } /// <summary> /// Event handler for a pointer press (mouse, touch, stylus) for on the outer arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void outerPieSlicePath_PointerPressed(object sender, PointerRoutedEventArgs e) { VisualStateManager.GoToState(this, "OuterPressed", true); // Check for Submenu & Custom Menu if (OriginalRadialMenuButton.Submenu != null) { OnChangeMenuRequest(OriginalRadialMenuButton, OriginalRadialMenuButton.Submenu); } else if (OriginalRadialMenuButton.CustomMenu != null) { OnChangeMenuRequest(OriginalRadialMenuButton, OriginalRadialMenuButton.CustomMenu); } OriginalRadialMenuButton.OnOuterArcPressed(e); } /// <summary> /// Event handler for a pointer relese (mouse, touch, stylus) on the outer arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void outerPieSlicePath_PointerReleased(object sender, PointerRoutedEventArgs e) { VisualStateManager.GoToState(this, "OuterHover", true); OriginalRadialMenuButton.OnOuterArcReleased(e); } /// <summary> /// Event handler for a pointer enter (mouse, touch, stylus) on the outer arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void outerPieSlicePath_PointerEntered(object sender, PointerRoutedEventArgs e) { VisualStateManager.GoToState(this, "OuterHover", true); } /// <summary> /// Event handler for a pointer exit (mouse, touch, stylus) on the outer arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void outerPieSlicePath_PointerExited(object sender, PointerRoutedEventArgs e) { VisualStateManager.GoToState(this, "OuterNormal", true); } /// <summary> /// Event handler for a pointer enter (mouse, touch, stylus) on the inner arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void innerPieSlicePath_PointerEntered(object sender, PointerRoutedEventArgs e) { switch (OriginalRadialMenuButton.Type) { case RadialMenuButton.ButtonType.Toggle: VisualStateManager.GoToState(this, (OriginalRadialMenuButton.Value != null && ((bool)OriginalRadialMenuButton.Value)) ? "InnerReleased" : "InnerHover", true); break; case RadialMenuButton.ButtonType.Radio: VisualStateManager.GoToState(this, OriginalRadialMenuButton.MenuSelected ? "InnerReleased" : "InnerHover", true); break; default: VisualStateManager.GoToState(this, "InnerHover", true); break; } } /// <summary> /// Event handler for a pointer exit (mouse, touch, stylus) on the inner arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void innerPieSlicePath_PointerExited(object sender, PointerRoutedEventArgs e) { switch (OriginalRadialMenuButton.Type) { case RadialMenuButton.ButtonType.Toggle: UpdateSliceForToggle(); break; case RadialMenuButton.ButtonType.Radio: UpdateSliceForRadio(); break; default: VisualStateManager.GoToState(this, "InnerNormal", true); break; } } /// <summary> /// Event handler for a pointer press (mouse, touch, stylus) on the inner arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void innerPieSlicePath_PointerPressed(object sender, PointerRoutedEventArgs e) { if (OriginalRadialMenuButton.Type == RadialMenuButton.ButtonType.Custom) { CustomTextBox.Focus(FocusState.Keyboard); CustomTextBox.SelectAll(); e.Handled = true; } else { VisualStateManager.GoToState(this, "InnerPressed", true); OriginalRadialMenuButton.OnInnerArcPressed(e); switch (OriginalRadialMenuButton.Type) { case RadialMenuButton.ButtonType.Toggle: VisualStateManager.GoToState(this, (OriginalRadialMenuButton.Value != null && ((bool)OriginalRadialMenuButton.Value)) ? "InnerReleased" : "InnerNormal", true); break; case RadialMenuButton.ButtonType.Radio: VisualStateManager.GoToState(this, "InnerReleased", true); // get all other menus to release now that this menu has been selected ChangeSelectedEvent?.Invoke(sender, this); break; default: VisualStateManager.GoToState(this, "InnerNormal", true); break; } } } /// <summary> /// Event handler for a pointer release (mouse, touch, stylus) on the inner arc of the PieSlice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void innerPieSlicePath_PointerReleased(object sender, PointerRoutedEventArgs e) { OriginalRadialMenuButton.OnInnerArcReleased(e); switch (OriginalRadialMenuButton.Type) { case RadialMenuButton.ButtonType.Toggle: VisualStateManager.GoToState(this, (OriginalRadialMenuButton.Value != null && ((bool) OriginalRadialMenuButton.Value)) ? "InnerReleased" : "InnerNormal", true); break; case RadialMenuButton.ButtonType.Radio: VisualStateManager.GoToState(this, "InnerReleased", true); // get all other menus to release now that this menu has been selected ChangeSelectedEvent?.Invoke(sender, this); break; default: VisualStateManager.GoToState(this, "InnerNormal", true); break; } } /// <summary> /// If the PieSlice has been generated by a "radio" RadialMenuButton, this method ensures the correct visual state /// </summary> public void UpdateSliceForRadio() { if (OriginalRadialMenuButton.Type != RadialMenuButton.ButtonType.Radio) return; VisualStateManager.GoToState(this, OriginalRadialMenuButton.MenuSelected ? "InnerReleased" : "InnerNormal", true); } /// <summary> /// If the PieSlice has been generated by a "toggle" RadialMenuButton, this method ensures the correct visual state /// </summary> public void UpdateSliceForToggle() { if (OriginalRadialMenuButton.Type != RadialMenuButton.ButtonType.Toggle) return; VisualStateManager.GoToState(this, (OriginalRadialMenuButton.Value != null && ((bool)OriginalRadialMenuButton.Value)) ? "InnerReleased" : "InnerNormal", true); } } }
using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Text; using FLS.Data.WebApi; using FLS.Server.Data.Enums; namespace FLS.Server.Data.DbEntities { public partial class Flight : IFLSMetaData { public Flight() { FlightCrews = new HashSet<FlightCrew>(); TowedFlights = new HashSet<Flight>(); Deliveries = new HashSet<Delivery>(); DeliveryCreationTests = new HashSet<DeliveryCreationTest>(); ProcessStateId = (int) FLS.Data.WebApi.Flight.FlightProcessState.NotProcessed; } public Guid FlightId { get; set; } public Guid AircraftId { get; set; } public int? StartPosition { get; set; } [Column(TypeName = "Date")] public DateTime? FlightDate { get; set; } [Column(TypeName = "datetime2")] public DateTime? StartDateTime { get; set; } [Column(TypeName = "datetime2")] public DateTime? LdgDateTime { get; set; } /// <summary> /// Gets or sets the engine operating counter before start engine (units see EngineOperatingCounterUnitTypeId) /// </summary> public Nullable<long> EngineStartOperatingCounterInSeconds { get; set; } /// <summary> /// Gets or sets the engine operating counter after engine shutdown (units see EngineOperatingCounterUnitTypeId) /// </summary> public Nullable<long> EngineEndOperatingCounterInSeconds { get; set; } [Column(TypeName = "datetime2")] public DateTime? BlockStartDateTime { get; set; } [Column(TypeName = "datetime2")] public DateTime? BlockEndDateTime { get; set; } public Guid? StartLocationId { get; set; } public Guid? LdgLocationId { get; set; } [StringLength(5)] public string StartRunway { get; set; } [StringLength(5)] public string LdgRunway { get; set; } [StringLength(50)] public string OutboundRoute { get; set; } [StringLength(50)] public string InboundRoute { get; set; } public Guid? FlightTypeId { get; set; } public bool IsSoloFlight { get; set; } [Column("StartType")] public int? StartTypeId { get; set; } public Guid? TowFlightId { get; set; } public int? NrOfLdgs { get; set; } public int? NrOfLdgsOnStartLocation { get; set; } public bool NoStartTimeInformation { get; set; } public bool NoLdgTimeInformation { get; set; } public int AirStateId { get; set; } public int ProcessStateId { get; set; } public int FlightAircraftType { get; set; } public string Comment { get; set; } public string IncidentComment { get; set; } [StringLength(20)] public string CouponNumber { get; set; } [Column("FlightCostBalanceType")] public int? FlightCostBalanceTypeId { get; set; } [Column(TypeName = "datetime2")] public DateTime? DeliveryCreatedOn { get; set; } [Column(TypeName = "datetime2")] public DateTime? ValidatedOn { get; set; } public string ValidationErrors { get; set; } public int? NrOfPassengers { get; set; } [Column(TypeName = "datetime2")] public DateTime CreatedOn { get; set; } public Guid CreatedByUserId { get; set; } [Column(TypeName = "datetime2")] public DateTime? ModifiedOn { get; set; } public Guid? ModifiedByUserId { get; set; } [Column(TypeName = "datetime2")] public DateTime? DeletedOn { get; set; } public Guid? DeletedByUserId { get; set; } public int? RecordState { get; set; } public Guid OwnerId { get; set; } public int OwnershipType { get; set; } public bool IsDeleted { get; set; } [Column(TypeName = "datetime2")] public DateTime? FlightReportSentOn { get; set; } /// <summary> /// Gets or sets a value indicating whether [do not update meta data]. /// Used for workflow processes to not create a modified user error when trying to save records. /// </summary> /// <value> /// <c>true</c> if [do not update meta data]; otherwise, <c>false</c>. /// </value> public bool DoNotUpdateMetaData { get; set; } public virtual Aircraft Aircraft { get; set; } public virtual FlightCostBalanceType FlightCostBalanceType { get; set; } public virtual ICollection<FlightCrew> FlightCrews { get; set; } public virtual ICollection<Flight> TowedFlights { get; set; } public virtual ICollection<Delivery> Deliveries { get; set; } public virtual ICollection<DeliveryCreationTest> DeliveryCreationTests { get; set; } public virtual Flight TowFlight { get; set; } public virtual FlightAirState FlightAirState { get; set; } public virtual FlightProcessState FlightProcessState { get; set; } public virtual FlightType FlightType { get; set; } public virtual Location LdgLocation { get; set; } public virtual Location StartLocation { get; set; } public virtual StartType StartType { get; set; } #region additional methods internal int GetCalculatedFlightAirStateId() { if (LdgDateTime.HasValue) { return (int)FLS.Data.WebApi.Flight.FlightAirState.Landed; } if (NoLdgTimeInformation) { if (StartDateTime.HasValue) { return (int)FLS.Data.WebApi.Flight.FlightAirState.MightBeLandedOrInAir; } } if (StartDateTime.HasValue) { return (int)FLS.Data.WebApi.Flight.FlightAirState.Started; } if (NoStartTimeInformation) { return (int)FLS.Data.WebApi.Flight.FlightAirState.MightBeStarted; } if (AirStateId == (int)FLS.Data.WebApi.Flight.FlightAirState.FlightPlanOpen) { return (int)FLS.Data.WebApi.Flight.FlightAirState.FlightPlanOpen; } return (int)FLS.Data.WebApi.Flight.FlightAirState.New; } /// <summary> /// Gets the calculated nr of landings based on the current FlightState. /// </summary> /// <returns></returns> internal int? GetCalculatedNrOfLandings(bool isTowingOrWinchRequired) { var currentFlightAirStateId = GetCalculatedFlightAirStateId(); if (currentFlightAirStateId <= (int)FLS.Data.WebApi.Flight.FlightAirState.Started) { // not or might be started or just started flights do not have a landing return 0; } if (currentFlightAirStateId == (int)FLS.Data.WebApi.Flight.FlightAirState.Landed && (NrOfLdgs.HasValue == false || NrOfLdgs.Value <= 0)) { // only set a value if it is landed and has no value return 1; } if (isTowingOrWinchRequired) { // it is a glider flight without engine or selfstart ability, so maximum landings is fix 1! return 1; } return NrOfLdgs; } /// <summary> /// returns diffrence between landing and starting of flight (flight duration), /// or since how long flight is started, or zero when flight is not started /// </summary> [NotMapped] public TimeSpan FlightDurationZeroBased { get { if (StartDateTime.HasValue == false) { return TimeSpan.Zero; } TimeSpan ret; if (LdgDateTime.HasValue) { ret = LdgDateTime.Value - StartDateTime.Value; } else { if (StartDateTime.Value.Kind == DateTimeKind.Local) { ret = DateTime.Now - StartDateTime.Value; } else { ret = DateTime.UtcNow - StartDateTime.Value; } } return TimeSpan.FromSeconds(Math.Round(ret.TotalSeconds)); } } [NotMapped] public TimeSpan? FlightDuration { get { if (StartDateTime.HasValue == false || LdgDateTime.HasValue == false || NoStartTimeInformation || NoLdgTimeInformation) { return null; } return LdgDateTime.Value - StartDateTime.Value; } } [NotMapped] public TimeSpan EngineDurationZeroBased { get { if (EngineStartOperatingCounterInSeconds.HasValue && EngineEndOperatingCounterInSeconds.HasValue) { return TimeSpan.FromSeconds(EngineEndOperatingCounterInSeconds.Value - EngineStartOperatingCounterInSeconds.Value); } return TimeSpan.Zero; } } /// <summary> /// returns the pilot of the flightcrew /// </summary> public FlightCrew Pilot { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.PilotOrStudent)); } } /// <summary> /// returns the copilot of the flightcrew /// </summary> public FlightCrew CoPilot { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.CoPilot)); } } /// <summary> /// returns the instructor of the flightcrew /// </summary> public FlightCrew Instructor { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.FlightInstructor)); } } public FlightCrew ObserverPerson { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.Observer)); } } /// <summary> /// returns the first passenger of the flightcrew. /// </summary> public FlightCrew Passenger { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.Passenger)); } } public List<FlightCrew> Passengers { get { return FlightCrews.Where(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.Passenger)).ToList(); } } public FlightCrew WinchOperator { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.WinchOperator)); } } public FlightCrew InvoiceRecipient { get { return FlightCrews.FirstOrDefault(crew => crew.FlightCrewTypeId.Equals((int)FLS.Data.WebApi.Flight.FlightCrewType.FlightCostInvoiceRecipient)); } } /// <summary> /// returns if flight is towed by aircraft. If the StartType is not set, it returns null. /// </summary> public bool? IsTowed { get { if (StartTypeId.HasValue == false) return null; return StartTypeId == (int)AircraftStartType.TowingByAircraft; } } public Guid Id { get { return FlightId; } set { FlightId = value; } } public bool IsGliderFlight { get { return FlightAircraftType == (int)FlightAircraftTypeValue.GliderFlight; } } public bool IsTowFlight { get { return FlightAircraftType == (int)FlightAircraftTypeValue.TowFlight; } } public bool IsMotorFlight { get { return FlightAircraftType == (int)FlightAircraftTypeValue.MotorFlight; } } public string PilotDisplayName { get { if (Pilot != null && Pilot.Person != null) { return Pilot.Person.DisplayName; } return string.Empty; } } public string InstructorDisplayName { get { if (Instructor != null && Instructor.Person != null) { return Instructor.Person.DisplayName; } return string.Empty; } } public string CoPilotDisplayName { get { if (CoPilot != null && CoPilot.Person != null) { return CoPilot.Person.DisplayName; } return string.Empty; } } public string PassengerDisplayName { get { if (Passenger != null && Passenger.Person != null) { return Passenger.Person.DisplayName; } return string.Empty; } } public string AircraftImmatriculation { get { if (Aircraft != null) { return Aircraft.Immatriculation; } return string.Empty; } } public bool IsStarted { get { return StartDateTime.HasValue && LdgDateTime.HasValue == false; } } public override string ToString() { var sb = new StringBuilder(); if (IsGliderFlight) { sb.Append("Glider-Flight: "); } else if (IsTowFlight) { sb.Append("Tow-Flight: "); } else if (IsMotorFlight) { sb.Append("Motor-Flight: "); } else { sb.Append("Unknown Flight: "); } sb.Append(AircraftImmatriculation); sb.Append(", Pilot: "); sb.Append(PilotDisplayName); if (StartDateTime.HasValue) { sb.Append(", Flight-Date: "); sb.Append(StartDateTime.Value.ToShortDateString()); sb.Append(", Starttime: "); sb.Append(StartDateTime.Value.ToShortTimeString()); } if (LdgDateTime.HasValue) { sb.Append(", LdgTime: "); sb.Append(LdgDateTime.Value.ToShortTimeString()); } if (FlightType != null) { sb.Append(", Flightcode: "); sb.Append(FlightType.FlightCode); } sb.Append($", Nr of Ldgs: {NrOfLdgs.GetValueOrDefault(0)}/{NrOfLdgsOnStartLocation.GetValueOrDefault(0)}"); sb.Append(", Flight-Air-State: "); sb.Append(AirStateId); sb.Append(", Flight-Process-State: "); sb.Append(ProcessStateId); if (TowFlight != null) { sb.Append(", TowFlight: "); sb.Append(TowFlight.AircraftImmatriculation); sb.Append(", TowPilot: "); sb.Append(TowFlight.PilotDisplayName); sb.Append(", Tow-Flight-Air-State: "); sb.Append(TowFlight.AirStateId); sb.Append(", Tow-Flight-Process-State: "); sb.Append(TowFlight.ProcessStateId); } return sb.ToString(); } public bool? GetCalculatedIsSoloFlight(Aircraft aircraft = null, FlightType flightType = null) { if (aircraft != null) { if (aircraft.NrOfSeats.HasValue && aircraft.NrOfSeats.Value == 1) { return true; } } if (flightType != null) { if (flightType.IsSoloFlight) { IsSoloFlight = true; } else if (flightType.IsPassengerFlight) { return false; } } return null; } internal void ValidateFlight() { ValidatedOn = DateTime.UtcNow; if (AircraftId == Guid.Empty || Pilot == null || Pilot.PersonId == Guid.Empty || (StartDateTime.HasValue == false && NoStartTimeInformation == false) || (LdgDateTime.HasValue == false && NoLdgTimeInformation == false) || StartLocationId.HasValue == false || LdgLocationId.HasValue == false || StartTypeId.HasValue == false || FlightTypeId.HasValue == false || NrOfLdgs.HasValue == false || NrOfLdgs.Value < 1) { ProcessStateId = (int) FLS.Data.WebApi.Flight.FlightProcessState.Invalid; return; } if (FlightAircraftType == (int) FlightAircraftTypeValue.TowFlight) { //validation finished ProcessStateId = (int)FLS.Data.WebApi.Flight.FlightProcessState.Valid; return; } if (StartTypeId.Value == (int)AircraftStartType.TowingByAircraft) { if (TowFlightId == Guid.Empty || TowFlight == null) { ProcessStateId = (int)FLS.Data.WebApi.Flight.FlightProcessState.Invalid; } if (TowFlight != null) { TowFlight.ValidateFlight(); } } else if (StartTypeId.Value == (int)AircraftStartType.ExternalStart) { if (TowFlightId.HasValue) { ProcessStateId = (int)FLS.Data.WebApi.Flight.FlightProcessState.Invalid; } } else if (StartTypeId.Value == (int)AircraftStartType.WinchLaunch) { if (WinchOperator == null || WinchOperator.HasPerson == false) { ProcessStateId = (int)FLS.Data.WebApi.Flight.FlightProcessState.Invalid; } } else if (StartTypeId.Value == (int)AircraftStartType.SelfStart) { } else if (StartTypeId.Value == (int)AircraftStartType.MotorFlightStart) { } ProcessStateId = (int)FLS.Data.WebApi.Flight.FlightProcessState.Valid; } /// <summary> /// This method sets the flight process state back to the locked state, to create a new delivery during next workflow process. /// </summary> public void DeletedDeliveryForFlight() { ProcessStateId = (int) FLS.Data.WebApi.Flight.FlightProcessState.Locked; } #endregion additional methods } }
using System; using System.Security.Cryptography; using System.Text; using System.Text.RegularExpressions; namespace Wall { /// <summary> /// Extension methods for String class. /// </summary> public static class StringExtensions { /// <summary> /// Adds a char to end of given string if it does not ends with the char. /// </summary> public static string EnsureEndsWith(this string str, char c) { return EnsureEndsWith(str, c, StringComparison.Ordinal); } /// <summary> /// Adds a char to end of given string if it does not ends with the char. /// </summary> public static string EnsureEndsWith(this string str, char c, StringComparison comparisonType) { if (str == null) throw new ArgumentNullException(nameof(str)); if (str.EndsWith(c.ToString(), comparisonType)) return str; return str + c; } /// <summary> /// Adds a char to beginning of given string if it does not starts with the char. /// </summary> public static string EnsureStartsWith(this string str, char c) { return EnsureStartsWith(str, c, StringComparison.Ordinal); } /// <summary> /// Adds a char to beginning of given string if it does not starts with the char. /// </summary> public static string EnsureStartsWith(this string str, char c, StringComparison comparisonType) { if (str == null) throw new ArgumentNullException(nameof(str)); if (str.StartsWith(c.ToString(), comparisonType)) return str; return c + str; } /// <summary> /// Indicates whether this string is null or an System.String.Empty string. /// </summary> public static bool IsNullOrEmpty(this string str) { return string.IsNullOrEmpty(str); } /// <summary> /// indicates whether this string is null, empty, or consists only of white-space characters. /// </summary> public static bool IsNullOrWhiteSpace(this string str) { return string.IsNullOrWhiteSpace(str); } /// <summary> /// Gets a substring of a string from beginning of the string. /// </summary> /// <exception cref="ArgumentNullException">Thrown if <paramref name="str"/> is null</exception> /// <exception cref="ArgumentException">Thrown if <paramref name="len"/> is bigger that string's length</exception> public static string Left(this string str, int len) { if (str == null) throw new ArgumentNullException(nameof(str)); if (str.Length < len) throw new ArgumentException("len argument can not be bigger than given string's length!"); return str.Substring(0, len); } /// <summary> /// Converts line endings in the string to <see cref="Environment.NewLine"/>. /// </summary> public static string NormalizeLineEndings(this string str) { if (str == null) throw new ArgumentNullException(nameof(str)); return str.Replace("\r\n", "\n").Replace("\r", "\n").Replace("\n", Environment.NewLine); } /// <summary> /// Gets a substring of a string from end of the string. /// </summary> /// <exception cref="ArgumentNullException">Thrown if <paramref name="str"/> is null</exception> /// <exception cref="ArgumentException">Thrown if <paramref name="len"/> is bigger that string's length</exception> public static string Right(this string str, int len) { if (str == null) throw new ArgumentNullException(nameof(str)); if (str.Length < len) throw new ArgumentException("len argument can not be bigger than given string's length!"); return str.Substring(str.Length - len, len); } /// <summary> /// Uses string.Split method to split given string by given separator. /// </summary> public static string[] Split(this string str, string separator) { if (str == null) throw new ArgumentNullException(nameof(str)); return str.Split(new[] { separator }, StringSplitOptions.None); } /// <summary> /// Uses string.Split method to split given string by given separator. /// </summary> public static string[] Split(this string str, string separator, StringSplitOptions options) { if (str == null) throw new ArgumentNullException(nameof(str)); return str.Split(new[] { separator }, options); } /// <summary> /// Converts PascalCase string to camelCase string. /// </summary> /// <param name="str">String to convert</param> /// <param name="invariantCulture">Invariant culture</param> /// <returns>camelCase of the string</returns> public static string ToCamelCase(this string str, bool invariantCulture = true) { if (string.IsNullOrWhiteSpace(str)) return str; if (str.Length == 1) return invariantCulture ? str.ToLowerInvariant() : str.ToLower(); return (invariantCulture ? char.ToLowerInvariant(str[0]) : char.ToLower(str[0])) + str.Substring(1); } /// <summary> /// Converts given PascalCase/camelCase string to sentence (by splitting words by space). /// Example: "ThisIsSampleSentence" is converted to "This is a sample sentence". /// </summary> /// <param name="str">String to convert.</param> /// <param name="invariantCulture">Invariant culture</param> public static string ToSentenceCase(this string str, bool invariantCulture = false) { if (string.IsNullOrWhiteSpace(str)) return str; return Regex.Replace( str, "[a-z][A-Z]", m => m.Value[0] + " " + (invariantCulture ? char.ToLowerInvariant(m.Value[1]) : char.ToLower(m.Value[1])) ); } /// <summary> /// Converts string to enum value. /// </summary> /// <typeparam name="T">Type of enum</typeparam> /// <param name="value">String value to convert</param> /// <returns>Returns enum object</returns> public static T ToEnum<T>(this string value) where T : struct { if (value == null) throw new ArgumentNullException(nameof(value)); return (T)Enum.Parse(typeof(T), value); } /// <summary> /// Converts string to enum value. /// </summary> /// <typeparam name="T">Type of enum</typeparam> /// <param name="value">String value to convert</param> /// <param name="ignoreCase">Ignore case</param> /// <returns>Returns enum object</returns> public static T ToEnum<T>(this string value, bool ignoreCase) where T : struct { if (value == null) throw new ArgumentNullException(nameof(value)); return (T)Enum.Parse(typeof(T), value, ignoreCase); } public static string ToMd5(this string str) { using (var md5 = MD5.Create()) { var inputBytes = Encoding.UTF8.GetBytes(str); var hashBytes = md5.ComputeHash(inputBytes); var sb = new StringBuilder(); foreach (var hashByte in hashBytes) sb.Append(hashByte.ToString("X2")); return sb.ToString(); } } /// <summary> /// Converts camelCase string to PascalCase string. /// </summary> /// <param name="str">String to convert</param> /// <param name="invariantCulture">Invariant culture</param> /// <returns>PascalCase of the string</returns> public static string ToPascalCase(this string str, bool invariantCulture = true) { if (string.IsNullOrWhiteSpace(str)) return str; if (str.Length == 1) return invariantCulture ? str.ToUpperInvariant() : str.ToUpper(); return (invariantCulture ? char.ToUpperInvariant(str[0]) : char.ToUpper(str[0])) + str.Substring(1); } /// <summary> /// Gets a substring of a string from beginning of the string if it exceeds maximum length. /// </summary> /// <exception cref="ArgumentNullException">Thrown if <paramref name="str"/> is null</exception> public static string Truncate(this string str, int maxLength) { if (str == null) return null; return str.Length <= maxLength ? str : str.Left(maxLength); } /// <summary> /// Gets a substring of a string from beginning of the string if it exceeds maximum length. /// It adds a "..." postfix to end of the string if it's truncated. /// Returning string can not be longer than maxLength. /// </summary> /// <exception cref="ArgumentNullException">Thrown if <paramref name="str"/> is null</exception> public static string TruncateWithPostfix(this string str, int maxLength) { return TruncateWithPostfix(str, maxLength, "..."); } /// <summary> /// Gets a substring of a string from beginning of the string if it exceeds maximum length. /// It adds given <paramref name="postfix"/> to end of the string if it's truncated. /// Returning string can not be longer than maxLength. /// </summary> /// <exception cref="ArgumentNullException">Thrown if <paramref name="str"/> is null</exception> public static string TruncateWithPostfix(this string str, int maxLength, string postfix) { if (str == null) return null; if (str == string.Empty || maxLength == 0) return string.Empty; if (str.Length <= maxLength) return str; if (maxLength <= postfix.Length) return postfix.Left(maxLength); return str.Left(maxLength - postfix.Length) + postfix; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; using FormattingEnum = Newtonsoft.Json.Formatting; namespace WebApiContrib.Formatting.Xlsx.Sample.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, FormattingEnum.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
//#define ASTAR_FAST_NO_EXCEPTIONS //@SHOWINEDITOR Needs to be enabled for the iPhone build setting Fast But No Exceptions to work. using UnityEngine; using System.Collections; using System.Collections.Generic; //using Pathfinding; using Pathfinding.Util; using System.Reflection; #if UNITY_WINRT && !UNITY_EDITOR //using MarkerMetro.Unity.WinLegacy.IO; //using MarkerMetro.Unity.WinLegacy.Reflection; #endif namespace Pathfinding { [System.Serializable] /** Stores the navigation graphs for the A* Pathfinding System. * \ingroup relevant * * An instance of this class is assigned to AstarPath.astarData, from it you can access all graphs loaded through the #graphs variable.\n * This class also handles a lot of the high level serialization. */ public class AstarData { /** Shortcut to AstarPath.active */ public AstarPath active { get { return AstarPath.active; } } #region Fields [System.NonSerialized] public NavMeshGraph navmesh; /**< Shortcut to the first NavMeshGraph. Updated at scanning time. This is the only reference to NavMeshGraph in the core pathfinding scripts */ [System.NonSerialized] public GridGraph gridGraph; /**< Shortcut to the first GridGraph. Updated at scanning time. This is the only reference to GridGraph in the core pathfinding scripts */ [System.NonSerialized] public PointGraph pointGraph; /**< Shortcut to the first PointGraph. Updated at scanning time. This is the only reference to PointGraph in the core pathfinding scripts */ /** All supported graph types. Populated through reflection search */ public System.Type[] graphTypes = null; #if ASTAR_FAST_NO_EXCEPTIONS || UNITY_WINRT /** Graph types to use when building with Fast But No Exceptions for iPhone. * If you add any custom graph types, you need to add them to this hard-coded list. */ public static readonly System.Type[] DefaultGraphTypes = new System.Type[] { typeof(GridGraph), typeof(PointGraph), typeof(NavMeshGraph), }; #endif [System.NonSerialized] /** All graphs this instance holds. * This will be filled only after deserialization has completed. * May contain null entries if graph have been removed. */ public NavGraph[] graphs = new NavGraph[0]; /** Links placed by the user in the scene view. */ [System.NonSerialized] public UserConnection[] userConnections = new UserConnection[0]; //Serialization Settings /** Has the data been reverted by an undo operation. * Used by the editor's undo logic to check if the AstarData has been reverted by an undo operation and should be deserialized. * \version Only used by Unity versions < U4.5 */ public bool hasBeenReverted = false; [SerializeField] /** Serialized data for all graphs and settings. */ private byte[] data; public uint dataChecksum; /** Backup data if deserialization failed. */ public byte[] data_backup; /** Serialized data for cached startup. * If set, on start the graphs will be deserialized from this file. */ public TextAsset file_cachedStartup; /** Serialized data for cached startup. * * \deprecated */ public byte[] data_cachedStartup; /** Should graph-data be cached. * Caching the startup means saving the whole graphs, not only the settings to an internal array (#data_cachedStartup) which can * be loaded faster than scanning all graphs at startup. This is setup from the editor. */ [SerializeField] public bool cacheStartup = false; //End Serialization Settings #endregion public byte[] GetData () { return data; } public void SetData (byte[] data, uint checksum) { this.data = data; dataChecksum = checksum; } /** Loads the graphs from memory, will load cached graphs if any exists */ public void Awake () { /* Set up default values, to not throw null reference errors */ userConnections = new UserConnection[0]; graphs = new NavGraph[0]; /* End default values */ if (cacheStartup && file_cachedStartup != null) { LoadFromCache (); } else { DeserializeGraphs (); } } /** Updates shortcuts to the first graph of different types. * Hard coding references to some graph types is not really a good thing imo. I want to keep it dynamic and flexible. * But these references ease the use of the system, so I decided to keep them.\n */ public void UpdateShortcuts () { navmesh = (NavMeshGraph)FindGraphOfType (typeof(NavMeshGraph)); gridGraph = (GridGraph)FindGraphOfType (typeof(GridGraph)); pointGraph = (PointGraph)FindGraphOfType (typeof(PointGraph)); } /** Load from data from #file_cachedStartup */ public void LoadFromCache () { AstarPath.active.BlockUntilPathQueueBlocked(); if (file_cachedStartup != null) { var bytes = file_cachedStartup.bytes; DeserializeGraphs (bytes); GraphModifier.TriggerEvent (GraphModifier.EventType.PostCacheLoad); } else { Debug.LogError ("Can't load from cache since the cache is empty"); } } #region Serialization /** Serializes all graphs settings to a byte array. * \see DeserializeGraphs(byte[]) */ public byte[] SerializeGraphs () { return SerializeGraphs (Pathfinding.Serialization.SerializeSettings.Settings); } /** Serializes all graphs settings and optionally node data to a byte array. * \see DeserializeGraphs(byte[]) * \see Pathfinding.Serialization.SerializeSettings */ public byte[] SerializeGraphs (Pathfinding.Serialization.SerializeSettings settings) { uint checksum; return SerializeGraphs (settings, out checksum); } /** Main serializer function. * Serializes all graphs to a byte array * A similar function exists in the AstarPathEditor.cs script to save additional info */ public byte[] SerializeGraphs (Pathfinding.Serialization.SerializeSettings settings, out uint checksum) { AstarPath.active.BlockUntilPathQueueBlocked(); Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this, settings); sr.OpenSerialize(); SerializeGraphsPart (sr); byte[] bytes = sr.CloseSerialize(); checksum = sr.GetChecksum (); return bytes; } /** Serializes common info to the serializer. * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void SerializeGraphsPart (Pathfinding.Serialization.AstarSerializer sr) { sr.SerializeGraphs(graphs); sr.SerializeUserConnections (userConnections); sr.SerializeNodes(); sr.SerializeExtraInfo(); } /** Deserializes graphs from #data */ public void DeserializeGraphs () { if (data != null) { DeserializeGraphs (data); } } /** Destroys all graphs and sets graphs to null */ void ClearGraphs () { if ( graphs == null ) return; for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null) graphs[i].OnDestroy (); } graphs = null; UpdateShortcuts (); } public void OnDestroy () { ClearGraphs (); } /** Deserializes graphs from the specified byte array. * If an error occured, it will try to deserialize using the old deserializer. * A warning will be logged if all deserializers failed. */ public void DeserializeGraphs (byte[] bytes) { AstarPath.active.BlockUntilPathQueueBlocked(); try { if (bytes != null) { Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this); if (sr.OpenDeserialize(bytes)) { DeserializeGraphsPart (sr); sr.CloseDeserialize(); UpdateShortcuts (); } else { Debug.Log ("Invalid data file (cannot read zip).\nThe data is either corrupt or it was saved using a 3.0.x or earlier version of the system"); } } else { throw new System.ArgumentNullException ("Bytes should not be null when passed to DeserializeGraphs"); } active.VerifyIntegrity (); } catch (System.Exception e) { Debug.LogWarning ("Caught exception while deserializing data.\n"+e); data_backup = bytes; } } /** Deserializes graphs from the specified byte array additively. * If an error ocurred, it will try to deserialize using the old deserializer. * A warning will be logged if all deserializers failed. * This function will add loaded graphs to the current ones */ public void DeserializeGraphsAdditive (byte[] bytes) { AstarPath.active.BlockUntilPathQueueBlocked(); try { if (bytes != null) { Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this); if (sr.OpenDeserialize(bytes)) { DeserializeGraphsPartAdditive (sr); sr.CloseDeserialize(); } else { Debug.Log ("Invalid data file (cannot read zip)."); } } else { throw new System.ArgumentNullException ("Bytes should not be null when passed to DeserializeGraphs"); } active.VerifyIntegrity (); } catch (System.Exception e) { Debug.LogWarning ("Caught exception while deserializing data.\n"+e); } } /** Deserializes common info. * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void DeserializeGraphsPart (Pathfinding.Serialization.AstarSerializer sr) { ClearGraphs (); graphs = sr.DeserializeGraphs (); if ( graphs != null ) for ( int i = 0; i<graphs.Length;i++ ) if ( graphs[i] != null ) graphs[i].graphIndex = (uint)i; userConnections = sr.DeserializeUserConnections(); //sr.DeserializeNodes(); sr.DeserializeExtraInfo(); //Assign correct graph indices. for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (delegate (GraphNode node) { node.GraphIndex = (uint)i; return true; }); } sr.PostDeserialization(); } /** Deserializes common info additively * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void DeserializeGraphsPartAdditive (Pathfinding.Serialization.AstarSerializer sr) { if (graphs == null) graphs = new NavGraph[0]; if (userConnections == null) userConnections = new UserConnection[0]; List<NavGraph> gr = new List<NavGraph>(graphs); gr.AddRange (sr.DeserializeGraphs ()); graphs = gr.ToArray(); if ( graphs != null ) for ( int i = 0; i<graphs.Length;i++ ) if ( graphs[i] != null ) graphs[i].graphIndex = (uint)i; List<UserConnection> conns = new List<UserConnection>(userConnections); conns.AddRange (sr.DeserializeUserConnections()); userConnections = conns.ToArray (); sr.DeserializeNodes(); //Assign correct graph indices. Issue #21 for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (delegate (GraphNode node) { //GraphNode[] nodes = graphs[i].nodes; node.GraphIndex = (uint)i; return true; }); } sr.DeserializeExtraInfo(); sr.PostDeserialization(); for (int i=0;i<graphs.Length;i++) { for (int j=i+1;j<graphs.Length;j++) { if (graphs[i] != null && graphs[j] != null && graphs[i].guid == graphs[j].guid) { Debug.LogWarning ("Guid Conflict when importing graphs additively. Imported graph will get a new Guid.\nThis message is (relatively) harmless."); graphs[i].guid = Pathfinding.Util.Guid.NewGuid (); break; } } } } #endregion /** Find all graph types supported in this build. * Using reflection, the assembly is searched for types which inherit from NavGraph. */ public void FindGraphTypes () { #if !ASTAR_FAST_NO_EXCEPTIONS && !UNITY_WINRT System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly (typeof(AstarPath)); System.Type[] types = asm.GetTypes (); List<System.Type> graphList = new List<System.Type> (); foreach (System.Type type in types) { #if NETFX_CORE && !UNITY_EDITOR System.Type baseType = type.GetTypeInfo().BaseType; #else System.Type baseType = type.BaseType; #endif while (baseType != null) { if (System.Type.Equals ( baseType, typeof(NavGraph) )) { graphList.Add (type); break; } #if NETFX_CORE && !UNITY_EDITOR baseType = baseType.GetTypeInfo().BaseType; #else baseType = baseType.BaseType; #endif } } graphTypes = graphList.ToArray (); #else graphTypes = DefaultGraphTypes; #endif } #region GraphCreation /** \returns A System.Type which matches the specified \a type string. If no mathing graph type was found, null is returned */ public System.Type GetGraphType (string type) { for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { return graphTypes[i]; } } return null; } /** Creates a new instance of a graph of type \a type. If no matching graph type was found, an error is logged and null is returned * \returns The created graph * \see CreateGraph(System.Type) */ public NavGraph CreateGraph (string type) { Debug.Log ("Creating Graph of type '"+type+"'"); for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { return CreateGraph (graphTypes[i]); } } Debug.LogError ("Graph type ("+type+") wasn't found"); return null; } /** Creates a new graph instance of type \a type * \see CreateGraph(string) */ public NavGraph CreateGraph (System.Type type) { NavGraph g = System.Activator.CreateInstance (type) as NavGraph; g.active = active; return g; } /** Adds a graph of type \a type to the #graphs array */ public NavGraph AddGraph (string type) { NavGraph graph = null; for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { graph = CreateGraph (graphTypes[i]); } } if (graph == null) { Debug.LogError ("No NavGraph of type '"+type+"' could be found"); return null; } AddGraph (graph); return graph; } /** Adds a graph of type \a type to the #graphs array */ public NavGraph AddGraph (System.Type type) { NavGraph graph = null; for (int i=0;i<graphTypes.Length;i++) { if (System.Type.Equals (graphTypes[i], type)) { graph = CreateGraph (graphTypes[i]); } } if (graph == null) { Debug.LogError ("No NavGraph of type '"+type+"' could be found, "+graphTypes.Length+" graph types are avaliable"); return null; } AddGraph (graph); return graph; } /** Adds the specified graph to the #graphs array */ public void AddGraph (NavGraph graph) { // Make sure to not interfere with pathfinding AstarPath.active.BlockUntilPathQueueBlocked(); //Try to fill in an empty position for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { graphs[i] = graph; graph.active = active; graph.Awake (); graph.graphIndex = (uint)i; UpdateShortcuts (); return; } } if (graphs != null && graphs.Length >= GraphNode.MaxGraphIndex) { throw new System.Exception("Graph Count Limit Reached. You cannot have more than " + GraphNode.MaxGraphIndex + " graphs. Some compiler directives can change this limit, e.g ASTAR_MORE_AREAS, look under the " + "'Optimizations' tab in the A* Inspector"); } //Add a new entry to the list List<NavGraph> ls = new List<NavGraph> (graphs); ls.Add (graph); graphs = ls.ToArray (); UpdateShortcuts (); graph.active = active; graph.Awake (); graph.graphIndex = (uint)(graphs.Length-1); } /** Removes the specified graph from the #graphs array and Destroys it in a safe manner. * To avoid changing graph indices for the other graphs, the graph is simply nulled in the array instead * of actually removing it from the array. * The empty position will be reused if a new graph is added. * * \returns True if the graph was sucessfully removed (i.e it did exist in the #graphs array). False otherwise. * * \see NavGraph.SafeOnDestroy * * \version Changed in 3.2.5 to call SafeOnDestroy before removing * and nulling it in the array instead of removing the element completely in the #graphs array. * */ public bool RemoveGraph (NavGraph graph) { //Safe OnDestroy is called since there is a risk that the pathfinding is searching through the graph right now, //and if we don't wait until the search has completed we could end up with evil NullReferenceExceptions graph.SafeOnDestroy (); int i=0; for (;i<graphs.Length;i++) if (graphs[i] == graph) break; if (i == graphs.Length) { return false; } graphs[i] = null; UpdateShortcuts (); return true; } #endregion #region GraphUtility /** Returns the graph which contains the specified node. The graph must be in the #graphs array. * \returns Returns the graph which contains the node. Null if the graph wasn't found */ public static NavGraph GetGraph (GraphNode node) { if (node == null) return null; AstarPath script = AstarPath.active; if (script == null) return null; AstarData data = script.astarData; if (data == null) return null; if (data.graphs == null) return null; uint graphIndex = node.GraphIndex; if (graphIndex >= data.graphs.Length) { return null; } return data.graphs[(int)graphIndex]; } /** Returns the node at \a graphs[graphIndex].nodes[nodeIndex]. All kinds of error checking is done to make sure no exceptions are thrown. */ public GraphNode GetNode (int graphIndex, int nodeIndex) { return GetNode (graphIndex,nodeIndex, graphs); } /** Returns the node at \a graphs[graphIndex].nodes[nodeIndex]. The graphIndex refers to the specified graphs array.\n * All kinds of error checking is done to make sure no exceptions are thrown */ public GraphNode GetNode (int graphIndex, int nodeIndex, NavGraph[] graphs) { throw new System.NotImplementedException (); /* if (graphs == null) { return null; } if (graphIndex < 0 || graphIndex >= graphs.Length) { Debug.LogError ("Graph index is out of range"+graphIndex+ " [0-"+(graphs.Length-1)+"]"); return null; } NavGraph graph = graphs[graphIndex]; if (graph.nodes == null) { return null; } if (nodeIndex < 0 || nodeIndex >= graph.nodes.Length) { Debug.LogError ("Node index is out of range : "+nodeIndex+ " [0-"+(graph.nodes.Length-1)+"]"+" (graph "+graphIndex+")"); return null; } return graph.nodes[nodeIndex];*/ } /** Returns the first graph of type \a type found in the #graphs array. Returns null if none was found */ public NavGraph FindGraphOfType (System.Type type) { if ( graphs != null ) { for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && System.Type.Equals (graphs[i].GetType (), type)) { return graphs[i]; } } } return null; } /** Loop through this function to get all graphs of type 'type' * \code foreach (GridGraph graph in AstarPath.astarData.FindGraphsOfType (typeof(GridGraph))) { * //Do something with the graph * } \endcode * \see AstarPath.RegisterSafeNodeUpdate */ public IEnumerable FindGraphsOfType (System.Type type) { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && System.Type.Equals (graphs[i].GetType (), type)) { yield return graphs[i]; } } } /** All graphs which implements the UpdateableGraph interface * \code foreach (IUpdatableGraph graph in AstarPath.astarData.GetUpdateableGraphs ()) { * //Do something with the graph * } \endcode * \see AstarPath.RegisterSafeNodeUpdate * \see Pathfinding.IUpdatableGraph */ public IEnumerable GetUpdateableGraphs () { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i] is IUpdatableGraph) { yield return graphs[i]; } } } /** All graphs which implements the UpdateableGraph interface * \code foreach (IRaycastableGraph graph in AstarPath.astarData.GetRaycastableGraphs ()) { * //Do something with the graph * } \endcode * \see Pathfinding.IRaycastableGraph*/ public IEnumerable GetRaycastableGraphs () { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i] is IRaycastableGraph) { yield return graphs[i]; } } } /** Gets the index of the NavGraph in the #graphs array */ public int GetGraphIndex (NavGraph graph) { if (graph == null) throw new System.ArgumentNullException ("graph"); if ( graphs != null ) { for (int i=0;i<graphs.Length;i++) { if (graph == graphs[i]) { return i; } } } Debug.LogError ("Graph doesn't exist"); return -1; } /** Tries to find a graph with the specified GUID in the #graphs array. * If a graph is found it returns its index, otherwise it returns -1 * \see GuidToGraph */ public int GuidToIndex (Guid guid) { if (graphs == null) { return -1; //CollectGraphs (); } for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { continue; } if (graphs[i].guid == guid) { return i; } } return -1; } /** Tries to find a graph with the specified GUID in the #graphs array. Returns null if none is found * \see GuidToIndex */ public NavGraph GuidToGraph (Guid guid) { if (graphs == null) { return null; //CollectGraphs (); } for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { continue; } if (graphs[i].guid == guid) { return graphs[i]; } } return null; } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.Diagnostics; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.SolutionCrawler { internal partial class SolutionCrawlerRegistrationService { private partial class WorkCoordinator { private abstract class AsyncWorkItemQueue<TKey> : IDisposable where TKey : class { private readonly object _gate; private readonly SemaphoreSlim _semaphore; private readonly Workspace _workspace; private readonly SolutionCrawlerProgressReporter _progressReporter; // map containing cancellation source for the item given out. private readonly Dictionary<object, CancellationTokenSource> _cancellationMap; public AsyncWorkItemQueue(SolutionCrawlerProgressReporter progressReporter, Workspace workspace) { _gate = new object(); _semaphore = new SemaphoreSlim(initialCount: 0); _cancellationMap = new Dictionary<object, CancellationTokenSource>(); _workspace = workspace; _progressReporter = progressReporter; } protected abstract int WorkItemCount_NoLock { get; } protected abstract void Dispose_NoLock(); protected abstract bool AddOrReplace_NoLock(WorkItem item); protected abstract bool TryTake_NoLock(TKey key, out WorkItem workInfo); protected abstract bool TryTakeAnyWork_NoLock(ProjectId preferableProjectId, ProjectDependencyGraph dependencyGraph, IDiagnosticAnalyzerService service, out WorkItem workItem); public bool HasAnyWork { get { lock (_gate) { return HasAnyWork_NoLock; } } } public void RemoveCancellationSource(object key) { lock (_gate) { // just remove cancellation token from the map. // the cancellation token might be passed out to other service // so don't call cancel on the source only because we are done using it. _cancellationMap.Remove(key); } } public virtual Task WaitAsync(CancellationToken cancellationToken) { return _semaphore.WaitAsync(cancellationToken); } public bool AddOrReplace(WorkItem item) { if (!HasAnyWork) { // first work is added. _progressReporter.Start(); } lock (_gate) { if (AddOrReplace_NoLock(item)) { // increase count _semaphore.Release(); return true; } return false; } } public void RequestCancellationOnRunningTasks() { List<CancellationTokenSource> cancellations; lock (_gate) { // request to cancel all running works cancellations = CancelAll_NoLock(); } RaiseCancellation_NoLock(cancellations); } public void Dispose() { List<CancellationTokenSource> cancellations; lock (_gate) { // here we don't need to care about progress reporter since // it will be only called when host is shutting down. // we do the below since we want to kill any pending tasks Dispose_NoLock(); cancellations = CancelAll_NoLock(); } RaiseCancellation_NoLock(cancellations); } private bool HasAnyWork_NoLock => WorkItemCount_NoLock > 0; protected Workspace Workspace => _workspace; private static void RaiseCancellation_NoLock(List<CancellationTokenSource> cancellations) { if (cancellations == null) { return; } // cancel can cause outer code to be run inlined, run it outside of the lock. cancellations.Do(s => s.Cancel()); } private List<CancellationTokenSource> CancelAll_NoLock() { // nothing to do if (_cancellationMap.Count == 0) { return null; } // make a copy var cancellations = _cancellationMap.Values.ToList(); // clear cancellation map _cancellationMap.Clear(); return cancellations; } protected void Cancel_NoLock(object key) { if (_cancellationMap.TryGetValue(key, out var source)) { source.Cancel(); _cancellationMap.Remove(key); } } public bool TryTake(TKey key, out WorkItem workInfo, out CancellationTokenSource source) { lock (_gate) { if (TryTake_NoLock(key, out workInfo)) { if (!HasAnyWork_NoLock) { // last work is done. _progressReporter.Stop(); } source = GetNewCancellationSource_NoLock(key); workInfo.AsyncToken.Dispose(); return true; } else { source = null; return false; } } } public bool TryTakeAnyWork( ProjectId preferableProjectId, ProjectDependencyGraph dependencyGraph, IDiagnosticAnalyzerService analyzerService, out WorkItem workItem, out CancellationTokenSource source) { lock (_gate) { // there must be at least one item in the map when this is called unless host is shutting down. if (TryTakeAnyWork_NoLock(preferableProjectId, dependencyGraph, analyzerService, out workItem)) { if (!HasAnyWork_NoLock) { // last work is done. _progressReporter.Stop(); } source = GetNewCancellationSource_NoLock(workItem.Key); workItem.AsyncToken.Dispose(); return true; } else { source = null; return false; } } } protected CancellationTokenSource GetNewCancellationSource_NoLock(object key) { Contract.Requires(!_cancellationMap.ContainsKey(key)); var source = new CancellationTokenSource(); _cancellationMap.Add(key, source); return source; } protected ProjectId GetBestProjectId_NoLock<T>( Dictionary<ProjectId, T> workQueue, ProjectId projectId, ProjectDependencyGraph dependencyGraph, IDiagnosticAnalyzerService analyzerService) { if (projectId != null) { if (workQueue.ContainsKey(projectId)) { return projectId; } // prefer project that directly depends on the given project and has diagnostics as next project to // process foreach (var dependingProjectId in dependencyGraph.GetProjectsThatDirectlyDependOnThisProject(projectId)) { if (workQueue.ContainsKey(dependingProjectId) && analyzerService?.ContainsDiagnostics(Workspace, dependingProjectId) == true) { return dependingProjectId; } } } // prefer a project that has diagnostics as next project to process. foreach (var pendingProjectId in workQueue.Keys) { if (analyzerService?.ContainsDiagnostics(Workspace, pendingProjectId) == true) { return pendingProjectId; } } // explicitly iterate so that we can use struct enumerator foreach (var pair in workQueue) { return pair.Key; } return Contract.FailWithReturn<ProjectId>("Shouldn't reach here"); } } } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using NUnit.Framework; using osu.Framework.Extensions; using osu.Framework.Extensions.ObjectExtensions; using osu.Framework.Logging; using osu.Game.Beatmaps; using osu.Game.Database; using osu.Game.IO.Archives; using osu.Game.Models; using osu.Game.Stores; using osu.Game.Tests.Resources; using Realms; using SharpCompress.Archives; using SharpCompress.Archives.Zip; using SharpCompress.Common; using SharpCompress.Writers.Zip; #nullable enable namespace osu.Game.Tests.Database { [TestFixture] public class BeatmapImporterTests : RealmTest { [Test] public void TestImportBeatmapThenCleanup() { RunTestWithRealmAsync(async (realmFactory, storage) => { using (var importer = new BeatmapImporter(realmFactory, storage)) using (new RealmRulesetStore(realmFactory, storage)) { ILive<RealmBeatmapSet>? imported; using (var reader = new ZipArchiveReader(TestResources.GetTestBeatmapStream())) imported = await importer.Import(reader); Assert.AreEqual(1, realmFactory.Context.All<RealmBeatmapSet>().Count()); Assert.NotNull(imported); Debug.Assert(imported != null); imported.PerformWrite(s => s.DeletePending = true); Assert.AreEqual(1, realmFactory.Context.All<RealmBeatmapSet>().Count(s => s.DeletePending)); } }); Logger.Log("Running with no work to purge pending deletions"); RunTestWithRealm((realmFactory, _) => { Assert.AreEqual(0, realmFactory.Context.All<RealmBeatmapSet>().Count()); }); } [Test] public void TestImportWhenClosed() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); await LoadOszIntoStore(importer, realmFactory.Context); }); } [Test] public void TestImportThenDelete() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); deleteBeatmapSet(imported, realmFactory.Context); }); } [Test] public void TestImportThenDeleteFromStream() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var tempPath = TestResources.GetTestBeatmapForImport(); ILive<RealmBeatmapSet>? importedSet; using (var stream = File.OpenRead(tempPath)) { importedSet = await importer.Import(new ImportTask(stream, Path.GetFileName(tempPath))); ensureLoaded(realmFactory.Context); } Assert.NotNull(importedSet); Debug.Assert(importedSet != null); Assert.IsTrue(File.Exists(tempPath), "Stream source file somehow went missing"); File.Delete(tempPath); var imported = realmFactory.Context.All<RealmBeatmapSet>().First(beatmapSet => beatmapSet.ID == importedSet.ID); deleteBeatmapSet(imported, realmFactory.Context); }); } [Test] public void TestImportThenImport() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); var importedSecondTime = await LoadOszIntoStore(importer, realmFactory.Context); // check the newly "imported" beatmap is actually just the restored previous import. since it matches hash. Assert.IsTrue(imported.ID == importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID == importedSecondTime.Beatmaps.First().ID); checkBeatmapSetCount(realmFactory.Context, 1); checkSingleReferencedFileCount(realmFactory.Context, 18); }); } [Test] public void TestImportThenImportWithReZip() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; Directory.CreateDirectory(extractedFolder); try { var imported = await LoadOszIntoStore(importer, realmFactory.Context); string hashBefore = hashFile(temp); using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(extractedFolder); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } // zip files differ because different compression or encoder. Assert.AreNotEqual(hashBefore, hashFile(temp)); var importedSecondTime = await importer.Import(new ImportTask(temp)); ensureLoaded(realmFactory.Context); Assert.NotNull(importedSecondTime); Debug.Assert(importedSecondTime != null); // but contents doesn't, so existing should still be used. Assert.IsTrue(imported.ID == importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID == importedSecondTime.PerformRead(s => s.Beatmaps.First().ID)); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] public void TestImportThenImportWithChangedHashedFile() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; Directory.CreateDirectory(extractedFolder); try { var imported = await LoadOszIntoStore(importer, realmFactory.Context); await createScoreForBeatmap(realmFactory.Context, imported.Beatmaps.First()); using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(extractedFolder); // arbitrary write to hashed file // this triggers the special BeatmapManager.PreImport deletion/replacement flow. using (var sw = new FileInfo(Directory.GetFiles(extractedFolder, "*.osu").First()).AppendText()) await sw.WriteLineAsync("// changed"); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } var importedSecondTime = await importer.Import(new ImportTask(temp)); ensureLoaded(realmFactory.Context); // check the newly "imported" beatmap is not the original. Assert.NotNull(importedSecondTime); Debug.Assert(importedSecondTime != null); Assert.IsTrue(imported.ID != importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID != importedSecondTime.PerformRead(s => s.Beatmaps.First().ID)); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] [Ignore("intentionally broken by import optimisations")] public void TestImportThenImportWithChangedFile() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; Directory.CreateDirectory(extractedFolder); try { var imported = await LoadOszIntoStore(importer, realmFactory.Context); using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(extractedFolder); // arbitrary write to non-hashed file using (var sw = new FileInfo(Directory.GetFiles(extractedFolder, "*.mp3").First()).AppendText()) await sw.WriteLineAsync("text"); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } var importedSecondTime = await importer.Import(new ImportTask(temp)); ensureLoaded(realmFactory.Context); Assert.NotNull(importedSecondTime); Debug.Assert(importedSecondTime != null); // check the newly "imported" beatmap is not the original. Assert.IsTrue(imported.ID != importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID != importedSecondTime.PerformRead(s => s.Beatmaps.First().ID)); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] public void TestImportThenImportWithDifferentFilename() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; Directory.CreateDirectory(extractedFolder); try { var imported = await LoadOszIntoStore(importer, realmFactory.Context); using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(extractedFolder); // change filename var firstFile = new FileInfo(Directory.GetFiles(extractedFolder).First()); firstFile.MoveTo(Path.Combine(firstFile.DirectoryName.AsNonNull(), $"{firstFile.Name}-changed{firstFile.Extension}")); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } var importedSecondTime = await importer.Import(new ImportTask(temp)); ensureLoaded(realmFactory.Context); Assert.NotNull(importedSecondTime); Debug.Assert(importedSecondTime != null); // check the newly "imported" beatmap is not the original. Assert.IsTrue(imported.ID != importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID != importedSecondTime.PerformRead(s => s.Beatmaps.First().ID)); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] [Ignore("intentionally broken by import optimisations")] public void TestImportCorruptThenImport() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); var firstFile = imported.Files.First(); long originalLength; using (var stream = storage.GetStream(firstFile.File.StoragePath)) originalLength = stream.Length; using (var stream = storage.GetStream(firstFile.File.StoragePath, FileAccess.Write, FileMode.Create)) stream.WriteByte(0); var importedSecondTime = await LoadOszIntoStore(importer, realmFactory.Context); using (var stream = storage.GetStream(firstFile.File.StoragePath)) Assert.AreEqual(stream.Length, originalLength, "Corruption was not fixed on second import"); // check the newly "imported" beatmap is actually just the restored previous import. since it matches hash. Assert.IsTrue(imported.ID == importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID == importedSecondTime.Beatmaps.First().ID); checkBeatmapSetCount(realmFactory.Context, 1); checkSingleReferencedFileCount(realmFactory.Context, 18); }); } [Test] public void TestRollbackOnFailure() { RunTestWithRealmAsync(async (realmFactory, storage) => { int loggedExceptionCount = 0; Logger.NewEntry += l => { if (l.Target == LoggingTarget.Database && l.Exception != null) Interlocked.Increment(ref loggedExceptionCount); }; using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); realmFactory.Context.Write(() => imported.Hash += "-changed"); checkBeatmapSetCount(realmFactory.Context, 1); checkBeatmapCount(realmFactory.Context, 12); checkSingleReferencedFileCount(realmFactory.Context, 18); var brokenTempFilename = TestResources.GetTestBeatmapForImport(); MemoryStream brokenOsu = new MemoryStream(); MemoryStream brokenOsz = new MemoryStream(await File.ReadAllBytesAsync(brokenTempFilename)); File.Delete(brokenTempFilename); using (var outStream = File.Open(brokenTempFilename, FileMode.CreateNew)) using (var zip = ZipArchive.Open(brokenOsz)) { zip.AddEntry("broken.osu", brokenOsu, false); zip.SaveTo(outStream, CompressionType.Deflate); } // this will trigger purging of the existing beatmap (online set id match) but should rollback due to broken osu. try { await importer.Import(new ImportTask(brokenTempFilename)); } catch { } checkBeatmapSetCount(realmFactory.Context, 1); checkBeatmapCount(realmFactory.Context, 12); checkSingleReferencedFileCount(realmFactory.Context, 18); Assert.AreEqual(1, loggedExceptionCount); File.Delete(brokenTempFilename); }); } [Test] public void TestImportThenDeleteThenImport() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); deleteBeatmapSet(imported, realmFactory.Context); var importedSecondTime = await LoadOszIntoStore(importer, realmFactory.Context); // check the newly "imported" beatmap is actually just the restored previous import. since it matches hash. Assert.IsTrue(imported.ID == importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID == importedSecondTime.Beatmaps.First().ID); }); } [Test] public void TestImportThenDeleteThenImportWithOnlineIDsMissing() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var imported = await LoadOszIntoStore(importer, realmFactory.Context); realmFactory.Context.Write(() => { foreach (var b in imported.Beatmaps) b.OnlineID = -1; }); deleteBeatmapSet(imported, realmFactory.Context); var importedSecondTime = await LoadOszIntoStore(importer, realmFactory.Context); // check the newly "imported" beatmap has been reimported due to mismatch (even though hashes matched) Assert.IsTrue(imported.ID != importedSecondTime.ID); Assert.IsTrue(imported.Beatmaps.First().ID != importedSecondTime.Beatmaps.First().ID); }); } [Test] public void TestImportWithDuplicateBeatmapIDs() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var metadata = new RealmBeatmapMetadata { Artist = "SomeArtist", Author = "SomeAuthor" }; var ruleset = realmFactory.Context.All<RealmRuleset>().First(); var toImport = new RealmBeatmapSet { OnlineID = 1, Beatmaps = { new RealmBeatmap(ruleset, new RealmBeatmapDifficulty(), metadata) { OnlineID = 2, }, new RealmBeatmap(ruleset, new RealmBeatmapDifficulty(), metadata) { OnlineID = 2, Status = BeatmapSetOnlineStatus.Loved, } } }; var imported = await importer.Import(toImport); Assert.NotNull(imported); Debug.Assert(imported != null); Assert.AreEqual(-1, imported.PerformRead(s => s.Beatmaps[0].OnlineID)); Assert.AreEqual(-1, imported.PerformRead(s => s.Beatmaps[1].OnlineID)); }); } [Test] public void TestImportWhenFileOpen() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); using (File.OpenRead(temp)) await importer.Import(temp); ensureLoaded(realmFactory.Context); File.Delete(temp); Assert.IsFalse(File.Exists(temp), "We likely held a read lock on the file when we shouldn't"); }); } [Test] public void TestImportWithDuplicateHashes() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; Directory.CreateDirectory(extractedFolder); try { using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(extractedFolder); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.AddEntry("duplicate.osu", Directory.GetFiles(extractedFolder, "*.osu").First()); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } await importer.Import(temp); ensureLoaded(realmFactory.Context); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] public void TestImportNestedStructure() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; string subfolder = Path.Combine(extractedFolder, "subfolder"); Directory.CreateDirectory(subfolder); try { using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(subfolder); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } var imported = await importer.Import(new ImportTask(temp)); Assert.NotNull(imported); Debug.Assert(imported != null); ensureLoaded(realmFactory.Context); Assert.IsFalse(imported.PerformRead(s => s.Files.Any(f => f.Filename.Contains("subfolder"))), "Files contain common subfolder"); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] public void TestImportWithIgnoredDirectoryInArchive() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); string extractedFolder = $"{temp}_extracted"; string dataFolder = Path.Combine(extractedFolder, "actual_data"); string resourceForkFolder = Path.Combine(extractedFolder, "__MACOSX"); string resourceForkFilePath = Path.Combine(resourceForkFolder, ".extracted"); Directory.CreateDirectory(dataFolder); Directory.CreateDirectory(resourceForkFolder); using (var resourceForkFile = File.CreateText(resourceForkFilePath)) { await resourceForkFile.WriteLineAsync("adding content so that it's not empty"); } try { using (var zip = ZipArchive.Open(temp)) zip.WriteToDirectory(dataFolder); using (var zip = ZipArchive.Create()) { zip.AddAllFromDirectory(extractedFolder); zip.SaveTo(temp, new ZipWriterOptions(CompressionType.Deflate)); } var imported = await importer.Import(new ImportTask(temp)); Assert.NotNull(imported); Debug.Assert(imported != null); ensureLoaded(realmFactory.Context); Assert.IsFalse(imported.PerformRead(s => s.Files.Any(f => f.Filename.Contains("__MACOSX"))), "Files contain resource fork folder, which should be ignored"); Assert.IsFalse(imported.PerformRead(s => s.Files.Any(f => f.Filename.Contains("actual_data"))), "Files contain common subfolder"); } finally { Directory.Delete(extractedFolder, true); } }); } [Test] public void TestUpdateBeatmapInfo() { RunTestWithRealmAsync(async (realmFactory, storage) => { using var importer = new BeatmapImporter(realmFactory, storage); using var store = new RealmRulesetStore(realmFactory, storage); var temp = TestResources.GetTestBeatmapForImport(); await importer.Import(temp); // Update via the beatmap, not the beatmap info, to ensure correct linking RealmBeatmapSet setToUpdate = realmFactory.Context.All<RealmBeatmapSet>().First(); var beatmapToUpdate = setToUpdate.Beatmaps.First(); realmFactory.Context.Write(() => beatmapToUpdate.DifficultyName = "updated"); RealmBeatmap updatedInfo = realmFactory.Context.All<RealmBeatmap>().First(b => b.ID == beatmapToUpdate.ID); Assert.That(updatedInfo.DifficultyName, Is.EqualTo("updated")); }); } public static async Task<RealmBeatmapSet?> LoadQuickOszIntoOsu(BeatmapImporter importer, Realm realm) { var temp = TestResources.GetQuickTestBeatmapForImport(); var importedSet = await importer.Import(new ImportTask(temp)); Assert.NotNull(importedSet); ensureLoaded(realm); waitForOrAssert(() => !File.Exists(temp), "Temporary file still exists after standard import", 5000); return realm.All<RealmBeatmapSet>().FirstOrDefault(beatmapSet => beatmapSet.ID == importedSet!.ID); } public static async Task<RealmBeatmapSet> LoadOszIntoStore(BeatmapImporter importer, Realm realm, string? path = null, bool virtualTrack = false) { var temp = path ?? TestResources.GetTestBeatmapForImport(virtualTrack); var importedSet = await importer.Import(new ImportTask(temp)); Assert.NotNull(importedSet); Debug.Assert(importedSet != null); ensureLoaded(realm); waitForOrAssert(() => !File.Exists(temp), "Temporary file still exists after standard import", 5000); return realm.All<RealmBeatmapSet>().First(beatmapSet => beatmapSet.ID == importedSet.ID); } private void deleteBeatmapSet(RealmBeatmapSet imported, Realm realm) { realm.Write(() => imported.DeletePending = true); checkBeatmapSetCount(realm, 0); checkBeatmapSetCount(realm, 1, true); Assert.IsTrue(realm.All<RealmBeatmapSet>().First(_ => true).DeletePending); } private static Task createScoreForBeatmap(Realm realm, RealmBeatmap beatmap) { // TODO: reimplement when we have score support in realm. // return ImportScoreTest.LoadScoreIntoOsu(osu, new ScoreInfo // { // OnlineScoreID = 2, // Beatmap = beatmap, // BeatmapInfoID = beatmap.ID // }, new ImportScoreTest.TestArchiveReader()); return Task.CompletedTask; } private static void checkBeatmapSetCount(Realm realm, int expected, bool includeDeletePending = false) { Assert.AreEqual(expected, includeDeletePending ? realm.All<RealmBeatmapSet>().Count() : realm.All<RealmBeatmapSet>().Count(s => !s.DeletePending)); } private static string hashFile(string filename) { using (var s = File.OpenRead(filename)) return s.ComputeMD5Hash(); } private static void checkBeatmapCount(Realm realm, int expected) { Assert.AreEqual(expected, realm.All<RealmBeatmap>().Where(_ => true).ToList().Count); } private static void checkSingleReferencedFileCount(Realm realm, int expected) { int singleReferencedCount = 0; foreach (var f in realm.All<RealmFile>()) { if (f.BacklinksCount == 1) singleReferencedCount++; } Assert.AreEqual(expected, singleReferencedCount); } private static void ensureLoaded(Realm realm, int timeout = 60000) { IQueryable<RealmBeatmapSet>? resultSets = null; waitForOrAssert(() => (resultSets = realm.All<RealmBeatmapSet>().Where(s => !s.DeletePending && s.OnlineID == 241526)).Any(), @"BeatmapSet did not import to the database in allocated time.", timeout); // ensure we were stored to beatmap database backing... Assert.IsTrue(resultSets?.Count() == 1, $@"Incorrect result count found ({resultSets?.Count()} but should be 1)."); IEnumerable<RealmBeatmapSet> queryBeatmapSets() => realm.All<RealmBeatmapSet>().Where(s => !s.DeletePending && s.OnlineID == 241526); var set = queryBeatmapSets().First(); // ReSharper disable once PossibleUnintendedReferenceComparison IEnumerable<RealmBeatmap> queryBeatmaps() => realm.All<RealmBeatmap>().Where(s => s.BeatmapSet != null && s.BeatmapSet == set); waitForOrAssert(() => queryBeatmaps().Count() == 12, @"Beatmaps did not import to the database in allocated time", timeout); waitForOrAssert(() => queryBeatmapSets().Count() == 1, @"BeatmapSet did not import to the database in allocated time", timeout); int countBeatmapSetBeatmaps = 0; int countBeatmaps = 0; waitForOrAssert(() => (countBeatmapSetBeatmaps = queryBeatmapSets().First().Beatmaps.Count) == (countBeatmaps = queryBeatmaps().Count()), $@"Incorrect database beatmap count post-import ({countBeatmaps} but should be {countBeatmapSetBeatmaps}).", timeout); foreach (RealmBeatmap b in set.Beatmaps) Assert.IsTrue(set.Beatmaps.Any(c => c.OnlineID == b.OnlineID)); Assert.IsTrue(set.Beatmaps.Count > 0); } private static void waitForOrAssert(Func<bool> result, string failureMessage, int timeout = 60000) { const int sleep = 200; while (timeout > 0) { Thread.Sleep(sleep); timeout -= sleep; if (result()) return; } Assert.Fail(failureMessage); } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. // Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Threading; using osu.Framework.Development; using osu.Framework.Extensions.IEnumerableExtensions; using osu.Framework.Logging; using osu.Framework.Threading; namespace osu.Framework.Platform { /// <summary> /// Runs a game host in a specific threading mode. /// </summary> public class ThreadRunner { private readonly InputThread mainThread; private readonly List<GameThread> threads = new List<GameThread>(); public IReadOnlyCollection<GameThread> Threads { get { lock (threads) return threads.ToArray(); } } private double maximumUpdateHz = GameThread.DEFAULT_ACTIVE_HZ; public double MaximumUpdateHz { set { maximumUpdateHz = value; updateMainThreadRates(); } } private double maximumInactiveHz = GameThread.DEFAULT_INACTIVE_HZ; public double MaximumInactiveHz { set { maximumInactiveHz = value; updateMainThreadRates(); } } private readonly object startStopLock = new object(); /// <summary> /// Construct a new ThreadRunner instance. /// </summary> /// <param name="mainThread">The main window thread. Used for input in multi-threaded execution; all game logic in single-threaded execution.</param> /// <exception cref="NotImplementedException"></exception> public ThreadRunner(InputThread mainThread) { this.mainThread = mainThread; AddThread(mainThread); } /// <summary> /// Add a new non-main thread. In single-threaded execution, threads will be executed in the order they are added. /// </summary> public void AddThread(GameThread thread) { lock (threads) { if (!threads.Contains(thread)) threads.Add(thread); } } /// <summary> /// Remove a non-main thread. /// </summary> public void RemoveThread(GameThread thread) { lock (threads) threads.Remove(thread); } private ExecutionMode? activeExecutionMode; public ExecutionMode ExecutionMode { private get; set; } = ExecutionMode.MultiThreaded; public virtual void RunMainLoop() { // propagate any requested change in execution mode at a safe point in frame execution ensureCorrectExecutionMode(); Debug.Assert(activeExecutionMode != null); switch (activeExecutionMode.Value) { case ExecutionMode.SingleThread: { lock (threads) { foreach (var t in threads) t.RunSingleFrame(); } break; } case ExecutionMode.MultiThreaded: // still need to run the main/input thread on the window loop mainThread.RunSingleFrame(); break; } ThreadSafety.ResetAllForCurrentThread(); } public void Start() => ensureCorrectExecutionMode(); public void Suspend() { lock (startStopLock) { pauseAllThreads(); activeExecutionMode = null; } } public void Stop() { const int thread_join_timeout = 30000; Threads.ForEach(t => t.Exit()); Threads.Where(t => t.Running).ForEach(t => { var thread = t.Thread; if (thread == null) { // has already been cleaned up (or never started) return; } if (!thread.Join(thread_join_timeout)) Logger.Log($"Thread {t.Name} failed to exit in allocated time ({thread_join_timeout}ms).", LoggingTarget.Runtime, LogLevel.Important); }); // as the input thread isn't actually handled by a thread, the above join does not necessarily mean it has been completed to an exiting state. mainThread.WaitForState(GameThreadState.Exited); ThreadSafety.ResetAllForCurrentThread(); } private void ensureCorrectExecutionMode() { // locking is required as this method may be called from two different threads. lock (startStopLock) { // pull into a local variable as the property is not locked during writes. var executionMode = ExecutionMode; if (executionMode == activeExecutionMode) return; activeExecutionMode = ThreadSafety.ExecutionMode = executionMode; Logger.Log($"Execution mode changed to {activeExecutionMode}"); } pauseAllThreads(); switch (activeExecutionMode) { case ExecutionMode.MultiThreaded: { // switch to multi-threaded foreach (var t in Threads) t.Start(); break; } case ExecutionMode.SingleThread: { // switch to single-threaded. foreach (var t in Threads) { // only throttle for the main thread t.Initialize(withThrottling: t == mainThread); } // this is usually done in the execution loop, but required here for the initial game startup, // which would otherwise leave values in an incorrect state. ThreadSafety.ResetAllForCurrentThread(); break; } } updateMainThreadRates(); } private void pauseAllThreads() { // shut down threads in reverse to ensure audio stops last (other threads may be waiting on a queued event otherwise) foreach (var t in Threads.Reverse()) t.Pause(); } private void updateMainThreadRates() { if (activeExecutionMode == ExecutionMode.SingleThread) { mainThread.ActiveHz = maximumUpdateHz; mainThread.InactiveHz = maximumInactiveHz; } else { mainThread.ActiveHz = GameThread.DEFAULT_ACTIVE_HZ; mainThread.InactiveHz = GameThread.DEFAULT_INACTIVE_HZ; } } /// <summary> /// Sets the current culture of all threads to the supplied <paramref name="culture"/>. /// </summary> public void SetCulture(CultureInfo culture) { // for single-threaded mode, switch the current (assumed to be main) thread's culture, since it's actually the one that's running the frames. Thread.CurrentThread.CurrentCulture = culture; // for multi-threaded mode, schedule the culture change on all threads. // note that if the threads haven't been created yet (e.g. if the game started single-threaded), this will only store the culture in GameThread.CurrentCulture. // in that case, the stored value will be set on the actual threads after the next Start() call. foreach (var t in Threads) { t.Scheduler.Add(() => t.CurrentCulture = culture); } } } }
//----------------------------------------------------------------------------- // Copyright (c) 2012 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- $PostFXManager::defaultPreset = "core/scripts/client/postFx/default.postfxpreset.cs"; function PostFXManager::settingsSetEnabled(%this, %bEnablePostFX) { $PostFXManager::PostFX::Enabled = %bEnablePostFX; //if to enable the postFX, apply the ones that are enabled if ( %bEnablePostFX ) { //SSAO, HDR, LightRays, DOF if ( $PostFXManager::PostFX::EnableSSAO ) SSAOPostFx.enable(); else SSAOPostFx.disable(); if ( $PostFXManager::PostFX::EnableHDR ) HDRPostFX.enable(); else HDRPostFX.disable(); if ( $PostFXManager::PostFX::EnableLightRays ) LightRayPostFX.enable(); else LightRayPostFX.disable(); if ( $PostFXManager::PostFX::EnableDOF ) DOFPostEffect.enable(); else DOFPostEffect.disable(); postVerbose("% - PostFX Manager - PostFX enabled"); } else { //Disable all postFX SSAOPostFx.disable(); HDRPostFX.disable(); LightRayPostFX.disable(); DOFPostEffect.disable(); postVerbose("% - PostFX Manager - PostFX disabled"); } } function PostFXManager::settingsEffectSetEnabled(%this, %sName, %bEnable) { %postEffect = 0; //Determine the postFX to enable, and apply the boolean if(%sName $= "SSAO") { %postEffect = SSAOPostFx; $PostFXManager::PostFX::EnableSSAO = %bEnable; //$pref::PostFX::SSAO::Enabled = %bEnable; } else if(%sName $= "HDR") { %postEffect = HDRPostFX; $PostFXManager::PostFX::EnableHDR = %bEnable; //$pref::PostFX::HDR::Enabled = %bEnable; } else if(%sName $= "LightRays") { %postEffect = LightRayPostFX; $PostFXManager::PostFX::EnableLightRays = %bEnable; //$pref::PostFX::LightRays::Enabled = %bEnable; } else if(%sName $= "DOF") { %postEffect = DOFPostEffect; $PostFXManager::PostFX::EnableDOF = %bEnable; //$pref::PostFX::DOF::Enabled = %bEnable; } // Apply the change if ( %bEnable == true ) { %postEffect.enable(); postVerbose("% - PostFX Manager - " @ %sName @ " enabled"); } else { %postEffect.disable(); postVerbose("% - PostFX Manager - " @ %sName @ " disabled"); } } function PostFXManager::settingsRefreshSSAO(%this) { //Apply the enabled flag ppOptionsEnableSSAO.setValue($PostFXManager::PostFX::EnableSSAO); //Add the items we need to display ppOptionsSSAOQuality.clear(); ppOptionsSSAOQuality.add("Low", 0); ppOptionsSSAOQuality.add("Medium", 1); ppOptionsSSAOQuality.add("High", 2); //Set the selected, after adding the items! ppOptionsSSAOQuality.setSelected($SSAOPostFx::quality); //SSAO - Set the values of the sliders, General Tab ppOptionsSSAOOverallStrength.setValue($SSAOPostFx::overallStrength); ppOptionsSSAOBlurDepth.setValue($SSAOPostFx::blurDepthTol); ppOptionsSSAOBlurNormal.setValue($SSAOPostFx::blurNormalTol); //SSAO - Set the values for the near tab ppOptionsSSAONearDepthMax.setValue($SSAOPostFx::sDepthMax); ppOptionsSSAONearDepthMin.setValue($SSAOPostFx::sDepthMin); ppOptionsSSAONearRadius.setValue($SSAOPostFx::sRadius); ppOptionsSSAONearStrength.setValue($SSAOPostFx::sStrength); ppOptionsSSAONearToleranceNormal.setValue($SSAOPostFx::sNormalTol); ppOptionsSSAONearTolerancePower.setValue($SSAOPostFx::sNormalPow); //SSAO - Set the values for the far tab ppOptionsSSAOFarDepthMax.setValue($SSAOPostFx::lDepthMax); ppOptionsSSAOFarDepthMin.setValue($SSAOPostFx::lDepthMin); ppOptionsSSAOFarRadius.setValue($SSAOPostFx::lRadius); ppOptionsSSAOFarStrength.setValue($SSAOPostFx::lStrength); ppOptionsSSAOFarToleranceNormal.setValue($SSAOPostFx::lNormalTol); ppOptionsSSAOFarTolerancePower.setValue($SSAOPostFx::lNormalPow); } function PostFXManager::settingsRefreshHDR(%this) { //Apply the enabled flag ppOptionsEnableHDR.setValue($PostFXManager::PostFX::EnableHDR); ppOptionsHDRBloom.setValue($HDRPostFX::enableBloom); ppOptionsHDRBloomBlurBrightPassThreshold.setValue($HDRPostFX::brightPassThreshold); ppOptionsHDRBloomBlurMean.setValue($HDRPostFX::gaussMean); ppOptionsHDRBloomBlurMultiplier.setValue($HDRPostFX::gaussMultiplier); ppOptionsHDRBloomBlurStdDev.setValue($HDRPostFX::gaussStdDev); ppOptionsHDRBrightnessAdaptRate.setValue($HDRPostFX::adaptRate); ppOptionsHDREffectsBlueShift.setValue($HDRPostFX::enableBlueShift); ppOptionsHDREffectsBlueShiftColor.BaseColor = $HDRPostFX::blueShiftColor; ppOptionsHDREffectsBlueShiftColor.PickColor = $HDRPostFX::blueShiftColor; ppOptionsHDRKeyValue.setValue($HDRPostFX::keyValue); ppOptionsHDRMinLuminance.setValue($HDRPostFX::minLuminace); ppOptionsHDRToneMapping.setValue($HDRPostFX::enableToneMapping); ppOptionsHDRToneMappingAmount.setValue($HDRPostFX::enableToneMapping); ppOptionsHDRWhiteCutoff.setValue($HDRPostFX::whiteCutoff); %this-->ColorCorrectionFileName.Text = $HDRPostFX::colorCorrectionRamp; } function PostFXManager::settingsRefreshLightrays(%this) { //Apply the enabled flag ppOptionsEnableLightRays.setValue($PostFXManager::PostFX::EnableLightRays); ppOptionsLightRaysBrightScalar.setValue($LightRayPostFX::brightScalar); } function PostFXManager::settingsRefreshDOF(%this) { //Apply the enabled flag ppOptionsEnableDOF.setValue($PostFXManager::PostFX::EnableDOF); //ppOptionsDOFEnableDOF.setValue($PostFXManager::PostFX::EnableDOF); ppOptionsDOFEnableAutoFocus.setValue($DOFPostFx::EnableAutoFocus); ppOptionsDOFFarBlurMinSlider.setValue($DOFPostFx::BlurMin); ppOptionsDOFFarBlurMaxSlider.setValue($DOFPostFx::BlurMax); ppOptionsDOFFocusRangeMinSlider.setValue($DOFPostFx::FocusRangeMin); ppOptionsDOFFocusRangeMaxSlider.setValue($DOFPostFx::FocusRangeMax); ppOptionsDOFBlurCurveNearSlider.setValue($DOFPostFx::BlurCurveNear); ppOptionsDOFBlurCurveFarSlider.setValue($DOFPostFx::BlurCurveFar); } function PostFXManager::settingsRefreshAll(%this) { $PostFXManager::PostFX::Enabled = $pref::enablePostEffects; $PostFXManager::PostFX::EnableSSAO = SSAOPostFx.isEnabled(); $PostFXManager::PostFX::EnableHDR = HDRPostFX.isEnabled(); $PostFXManager::PostFX::EnableLightRays = LightRayPostFX.isEnabled(); $PostFXManager::PostFX::EnableDOF = DOFPostEffect.isEnabled(); //For all the postFX here, apply the active settings in the system //to the gui controls. %this.settingsRefreshSSAO(); %this.settingsRefreshHDR(); %this.settingsRefreshLightrays(); %this.settingsRefreshDOF(); ppOptionsEnable.setValue($PostFXManager::PostFX::Enabled); postVerbose("% - PostFX Manager - GUI values updated."); } function PostFXManager::settingsApplyFromPreset(%this) { postVerbose("% - PostFX Manager - Applying from preset"); //SSAO Settings $SSAOPostFx::blurDepthTol = $PostFXManager::Settings::SSAO::blurDepthTol; $SSAOPostFx::blurNormalTol = $PostFXManager::Settings::SSAO::blurNormalTol; $SSAOPostFx::lDepthMax = $PostFXManager::Settings::SSAO::lDepthMax; $SSAOPostFx::lDepthMin = $PostFXManager::Settings::SSAO::lDepthMin; $SSAOPostFx::lDepthPow = $PostFXManager::Settings::SSAO::lDepthPow; $SSAOPostFx::lNormalPow = $PostFXManager::Settings::SSAO::lNormalPow; $SSAOPostFx::lNormalTol = $PostFXManager::Settings::SSAO::lNormalTol; $SSAOPostFx::lRadius = $PostFXManager::Settings::SSAO::lRadius; $SSAOPostFx::lStrength = $PostFXManager::Settings::SSAO::lStrength; $SSAOPostFx::overallStrength = $PostFXManager::Settings::SSAO::overallStrength; $SSAOPostFx::quality = $PostFXManager::Settings::SSAO::quality; $SSAOPostFx::sDepthMax = $PostFXManager::Settings::SSAO::sDepthMax; $SSAOPostFx::sDepthMin = $PostFXManager::Settings::SSAO::sDepthMin; $SSAOPostFx::sDepthPow = $PostFXManager::Settings::SSAO::sDepthPow; $SSAOPostFx::sNormalPow = $PostFXManager::Settings::SSAO::sNormalPow; $SSAOPostFx::sNormalTol = $PostFXManager::Settings::SSAO::sNormalTol; $SSAOPostFx::sRadius = $PostFXManager::Settings::SSAO::sRadius; $SSAOPostFx::sStrength = $PostFXManager::Settings::SSAO::sStrength; //HDR settings $HDRPostFX::adaptRate = $PostFXManager::Settings::HDR::adaptRate; $HDRPostFX::blueShiftColor = $PostFXManager::Settings::HDR::blueShiftColor; $HDRPostFX::brightPassThreshold = $PostFXManager::Settings::HDR::brightPassThreshold; $HDRPostFX::enableBloom = $PostFXManager::Settings::HDR::enableBloom; $HDRPostFX::enableBlueShift = $PostFXManager::Settings::HDR::enableBlueShift; $HDRPostFX::enableToneMapping = $PostFXManager::Settings::HDR::enableToneMapping; $HDRPostFX::gaussMean = $PostFXManager::Settings::HDR::gaussMean; $HDRPostFX::gaussMultiplier = $PostFXManager::Settings::HDR::gaussMultiplier; $HDRPostFX::gaussStdDev = $PostFXManager::Settings::HDR::gaussStdDev; $HDRPostFX::keyValue = $PostFXManager::Settings::HDR::keyValue; $HDRPostFX::minLuminace = $PostFXManager::Settings::HDR::minLuminace; $HDRPostFX::whiteCutoff = $PostFXManager::Settings::HDR::whiteCutoff; $HDRPostFX::colorCorrectionRamp = $PostFXManager::Settings::ColorCorrectionRamp; //Light rays settings $LightRayPostFX::brightScalar = $PostFXManager::Settings::LightRays::brightScalar; //DOF settings $DOFPostFx::EnableAutoFocus = $PostFXManager::Settings::DOF::EnableAutoFocus; $DOFPostFx::BlurMin = $PostFXManager::Settings::DOF::BlurMin; $DOFPostFx::BlurMax = $PostFXManager::Settings::DOF::BlurMax; $DOFPostFx::FocusRangeMin = $PostFXManager::Settings::DOF::FocusRangeMin; $DOFPostFx::FocusRangeMax = $PostFXManager::Settings::DOF::FocusRangeMax; $DOFPostFx::BlurCurveNear = $PostFXManager::Settings::DOF::BlurCurveNear; $DOFPostFx::BlurCurveFar = $PostFXManager::Settings::DOF::BlurCurveFar; if ( $PostFXManager::forceEnableFromPresets ) { $PostFXManager::PostFX::Enabled = $PostFXManager::Settings::EnablePostFX; $PostFXManager::PostFX::EnableDOF = $PostFXManager::Settings::EnableDOF; $PostFXManager::PostFX::EnableLightRays = $PostFXManager::Settings::EnableLightRays; $PostFXManager::PostFX::EnableHDR = $PostFXManager::Settings::EnableHDR; $PostFXManager::PostFX::EnableSSAO = $PostFXManager::Settings::EnabledSSAO; %this.settingsSetEnabled( true ); } //make sure we apply the correct settings to the DOF ppOptionsUpdateDOFSettings(); // Update the actual GUI controls if its awake ( otherwise it will when opened ). if ( PostFXManager.isAwake() ) %this.settingsRefreshAll(); } function PostFXManager::settingsApplySSAO(%this) { $PostFXManager::Settings::SSAO::blurDepthTol = $SSAOPostFx::blurDepthTol; $PostFXManager::Settings::SSAO::blurNormalTol = $SSAOPostFx::blurNormalTol; $PostFXManager::Settings::SSAO::lDepthMax = $SSAOPostFx::lDepthMax; $PostFXManager::Settings::SSAO::lDepthMin = $SSAOPostFx::lDepthMin; $PostFXManager::Settings::SSAO::lDepthPow = $SSAOPostFx::lDepthPow; $PostFXManager::Settings::SSAO::lNormalPow = $SSAOPostFx::lNormalPow; $PostFXManager::Settings::SSAO::lNormalTol = $SSAOPostFx::lNormalTol; $PostFXManager::Settings::SSAO::lRadius = $SSAOPostFx::lRadius; $PostFXManager::Settings::SSAO::lStrength = $SSAOPostFx::lStrength; $PostFXManager::Settings::SSAO::overallStrength = $SSAOPostFx::overallStrength; $PostFXManager::Settings::SSAO::quality = $SSAOPostFx::quality; $PostFXManager::Settings::SSAO::sDepthMax = $SSAOPostFx::sDepthMax; $PostFXManager::Settings::SSAO::sDepthMin = $SSAOPostFx::sDepthMin; $PostFXManager::Settings::SSAO::sDepthPow = $SSAOPostFx::sDepthPow; $PostFXManager::Settings::SSAO::sNormalPow = $SSAOPostFx::sNormalPow; $PostFXManager::Settings::SSAO::sNormalTol = $SSAOPostFx::sNormalTol; $PostFXManager::Settings::SSAO::sRadius = $SSAOPostFx::sRadius; $PostFXManager::Settings::SSAO::sStrength = $SSAOPostFx::sStrength; postVerbose("% - PostFX Manager - Settings Saved - SSAO"); } function PostFXManager::settingsApplyHDR(%this) { $PostFXManager::Settings::HDR::adaptRate = $HDRPostFX::adaptRate; $PostFXManager::Settings::HDR::blueShiftColor = $HDRPostFX::blueShiftColor; $PostFXManager::Settings::HDR::brightPassThreshold = $HDRPostFX::brightPassThreshold; $PostFXManager::Settings::HDR::enableBloom = $HDRPostFX::enableBloom; $PostFXManager::Settings::HDR::enableBlueShift = $HDRPostFX::enableBlueShift; $PostFXManager::Settings::HDR::enableToneMapping = $HDRPostFX::enableToneMapping; $PostFXManager::Settings::HDR::gaussMean = $HDRPostFX::gaussMean; $PostFXManager::Settings::HDR::gaussMultiplier = $HDRPostFX::gaussMultiplier; $PostFXManager::Settings::HDR::gaussStdDev = $HDRPostFX::gaussStdDev; $PostFXManager::Settings::HDR::keyValue = $HDRPostFX::keyValue; $PostFXManager::Settings::HDR::minLuminace = $HDRPostFX::minLuminace; $PostFXManager::Settings::HDR::whiteCutoff = $HDRPostFX::whiteCutoff; $PostFXManager::Settings::ColorCorrectionRamp = $HDRPostFX::colorCorrectionRamp; postVerbose("% - PostFX Manager - Settings Saved - HDR"); } function PostFXManager::settingsApplyLightRays(%this) { $PostFXManager::Settings::LightRays::brightScalar = $LightRayPostFX::brightScalar; postVerbose("% - PostFX Manager - Settings Saved - Light Rays"); } function PostFXManager::settingsApplyDOF(%this) { $PostFXManager::Settings::DOF::EnableAutoFocus = $DOFPostFx::EnableAutoFocus; $PostFXManager::Settings::DOF::BlurMin = $DOFPostFx::BlurMin; $PostFXManager::Settings::DOF::BlurMax = $DOFPostFx::BlurMax; $PostFXManager::Settings::DOF::FocusRangeMin = $DOFPostFx::FocusRangeMin; $PostFXManager::Settings::DOF::FocusRangeMax = $DOFPostFx::FocusRangeMax; $PostFXManager::Settings::DOF::BlurCurveNear = $DOFPostFx::BlurCurveNear; $PostFXManager::Settings::DOF::BlurCurveFar = $DOFPostFx::BlurCurveFar; postVerbose("% - PostFX Manager - Settings Saved - DOF"); } function PostFXManager::settingsApplyAll(%this, %sFrom) { // Apply settings which control if effects are on/off altogether. $PostFXManager::Settings::EnablePostFX = $PostFXManager::PostFX::Enabled; $PostFXManager::Settings::EnableDOF = $PostFXManager::PostFX::EnableDOF; $PostFXManager::Settings::EnableLightRays = $PostFXManager::PostFX::EnableLightRays; $PostFXManager::Settings::EnableHDR = $PostFXManager::PostFX::EnableHDR; $PostFXManager::Settings::EnabledSSAO = $PostFXManager::PostFX::EnableSSAO; // Apply settings should save the values in the system to the // the preset structure ($PostFXManager::Settings::*) // SSAO Settings %this.settingsApplySSAO(); // HDR settings %this.settingsApplyHDR(); // Light rays settings %this.settingsApplyLightRays(); // DOF %this.settingsApplyDOF(); postVerbose("% - PostFX Manager - All Settings applied to $PostFXManager::Settings"); } function PostFXManager::settingsApplyDefaultPreset(%this) { PostFXManager::loadPresetHandler($PostFXManager::defaultPreset); }
// Copyright (c) 2010-2013 SharpDoc - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text.RegularExpressions; using System.Xml; using System.Xml.Serialization; using HtmlAgilityPack; using SharpDoc.Logging; namespace SharpDoc.Model { /// <summary> /// A delegate to create an HTML string from a template name used by <see cref="NTopic"/> /// </summary> /// <param name="filePath">Name of the template (currently supported are an html file or markdown file).</param> /// <returns>A HTML content</returns> public delegate string TopicContentLoaderDelegate(string filePath); /// <summary> /// Documentation topic store in an external file. /// </summary> [XmlType("topic")] public class NTopic : IModelReference { /// <summary> /// Id for the default class library topic /// </summary> public const string ClassLibraryTopicId = "X:ClassLibraryReference"; /// <summary> /// Id for the default search results topic /// </summary> public const string SearchResultsTopicId = "X:SearchResults"; /// <summary> /// Initializes a new instance of the <see cref="NTopic"/> class. /// </summary> public NTopic() { SubTopics = new List<NTopic>(); Resources = new List<string>(); Excludes = new List<string>(); Category = "Article"; } /// <summary> /// Initializes a new instance of the <see cref="NTopic"/> class. /// </summary> /// <param name="reference">The reference.</param> public NTopic(IModelReference reference) { SubTopics = new List<NTopic>(); Resources = new List<string>(); Excludes = new List<string>(); Id = reference.Id; Index = reference.Index; PageId = reference.PageId; PageTitle = reference.PageTitle; Name = reference.Name; FullName = reference.FullName; Category = reference.Category; } /// <summary> /// Gets or sets the XML generated comment ID. /// See http://msdn.microsoft.com/en-us/library/fsbx0t7x.aspx for more information. /// </summary> /// <value>The id.</value> [XmlAttribute("id")] public string Id { get; set; } /// <summary> /// Gets or sets the unique index of this node. /// </summary> /// <value> /// The unique index. /// </value> [XmlAttribute("index")] public int Index { get; set; } /// <summary> /// Gets or sets the normalized id. This is a normalized version of the <see cref="IModelReference.Id"/> that /// can be used for filename. /// </summary> /// <value>The file id.</value> [XmlAttribute("page-id")] public string PageId { get; set; } /// <summary> /// Gets or sets the page title. /// </summary> /// <value> /// The page title. /// </value> [XmlAttribute("page-title")] public string PageTitle { get; set; } /// <summary> /// Gets or sets the name of this instance. /// </summary> /// <value>The name.</value> [XmlAttribute("name")] public string Name { get; set; } /// <summary> /// Gets or sets the full name of this instance. /// </summary> /// <value>The full name.</value> [XmlAttribute("fullname")] public string FullName { get; set; } /// <summary> /// Gets or sets the category. /// </summary> /// <value> /// The category. /// </value> [XmlAttribute("category")] public string Category { get; set; } [XmlIgnore] public IModelReference Assembly { get; set; } /// <summary> /// Gets or sets the name of the file that contains the documentation. /// </summary> /// <value>The name of the file.</value> [XmlAttribute("filename")] public string FileName { get; set; } /// <summary> /// Gets or sets a value indicating whether [use page id URL]. /// </summary> /// <value> /// <c>true</c> if [use page id URL]; otherwise, <c>false</c>. /// </value> [XmlAttribute("on-url")] public bool IsPageIdOnUrl { get; set; } /// <summary> /// Gets or sets the sub topics. /// </summary> /// <value>The sub topics.</value> [XmlElement("topic")] public List<NTopic> SubTopics { get; set; } /// <summary> /// Gets or sets the parameters. /// </summary> /// <value> /// The parameters. /// </value> [XmlElement("param")] public List<ConfigParam> Parameters { get; set; } /// <summary> /// Gets or sets the excludes. /// </summary> /// <value> /// The excludes. /// </value> [XmlElement("exclude")] public List<string> Excludes { get; set; } /// <summary> /// Gets or sets the attached resources. /// </summary> /// <value> /// The attached resources. /// </value> [XmlElement("resource")] public List<string> Resources { get; set; } /// <summary> /// Gets or sets the web document. /// </summary> /// <value>The web document.</value> [XmlAttribute("webdoc")] public string WebDoc { get; set; } /// <summary> /// Gets or sets the html content. This is loaded from the filename. /// </summary> /// <value>The content.</value> [XmlIgnore] public string Content { get; set; } /// <summary> /// Gets or sets the parent topic. /// </summary> /// <value>The parent topic.</value> [XmlIgnore] public NTopic Parent { get; set; } /// <summary> /// Gets or sets the class node. /// </summary> /// <value> /// The class node. /// </value> [XmlIgnore] public NModelBase AttachedClassNode { get; set; } /// <inheritdoc/> [XmlIgnore] public XmlNode DocNode { get; set; } /// <inheritdoc/> [XmlIgnore] public string Description { get; set; } /// <inheritdoc/> [XmlIgnore] public string Remarks { get; set; } /// <inheritdoc/> [XmlIgnore] public XmlNode WebDocPage { get; set; } /// <inheritdoc/> [XmlIgnore] public XmlNode InheritDoc { get; set; } [XmlIgnore] public Config Config { get; set; } /// <summary> /// Gets a value indicating whether this instance is class library. /// </summary> /// <value> /// <c>true</c> if this instance is class library; otherwise, <c>false</c>. /// </value> [XmlIgnore] public bool IsClassLibrary { get { return Id == ClassLibraryTopicId; } } /// <summary> /// Gets a value indicating whether this instance is search result. /// </summary> /// <value> /// <c>true</c> if this instance is search result; otherwise, <c>false</c>. /// </value> [XmlIgnore] public bool IsSearchResult { get { return Id == SearchResultsTopicId; } } /// <summary> /// Finds the topic by id. /// </summary> /// <param name="topicId">The topic id.</param> /// <returns></returns> public NTopic FindTopicById(string topicId) { if (Id == topicId) return this; return FindTopicById(SubTopics, topicId); } /// <summary> /// Performs an action on each each topic. /// </summary> /// <param name="topicFunction">The topic function.</param> public void ForEachTopic(Action<NTopic> topicFunction) { topicFunction(this); foreach (var subTopic in SubTopics) { subTopic.ForEachTopic(topicFunction); } } /// <summary> /// Finds the topic by id. /// </summary> /// <param name="topics">The topics.</param> /// <param name="topicId">The topic id.</param> /// <returns></returns> public static NTopic FindTopicById(IEnumerable<NTopic> topics, string topicId) { NTopic topicFound = null; foreach (var topic in topics) { topicFound = topic.FindTopicById(topicId); if (topicFound != null) break; } return topicFound; } /// <summary> /// Associate topics with their parent /// </summary> public void BuildParents(NTopic parentTopic = null) { Parent = parentTopic; foreach (var subTopic in SubTopics) subTopic.BuildParents(this); } /// <summary> /// Gets the parents of this instance. /// </summary> /// <returns>Parents of this instance</returns> /// <remarks> /// The parents is ordered from the root level to this instance (excluding this instance) /// </remarks> public List<NTopic> GetParents() { var topics = new List<NTopic>(); var topic = Parent; while (topic != null) { topics.Insert(0, topic); topic = topic.Parent; } return topics; } /// <summary> /// Loads the content of this topic. /// </summary> /// <param name="contentLoader">The template factory.</param> /// <exception cref="System.ArgumentNullException">contentLoader</exception> public void Init(TopicContentLoaderDelegate contentLoader) { if (contentLoader == null) throw new ArgumentNullException("contentLoader"); // Check that id is valid if (string.IsNullOrEmpty(Id)) Logger.Error("Missing id for topic [{0}]", this); // Check that name is valid if (string.IsNullOrEmpty(Name)) { if (Id == ClassLibraryTopicId) { Name = "Class Library"; } else { Logger.Error("Missing name for topic [{0}]", this); } } // Copy Name to Fullname if empty if (string.IsNullOrEmpty(FullName)) FullName = Name; if (string.IsNullOrEmpty(PageTitle)) PageTitle = Name; var rootPath = Path.GetDirectoryName(Config.FilePath); rootPath = rootPath ?? ""; // Initialize sub topics foreach(var topic in SubTopics) topic.Init(contentLoader); // Create non existing topic files based on template if (Config.TopicTemplate != null && File.Exists(Config.TopicTemplate)) { var regex = new Regex(@"(\$\w+)"); if (FileName != null && !File.Exists(FileName)) { var content = File.ReadAllText(Config.TopicTemplate); content = regex.Replace( content, match => { var propertyName = match.Groups[1].Value.Substring(1); var value = this.GetType().GetProperty(propertyName).GetValue(this, null); return value == null ? string.Empty : value.ToString(); }); File.WriteAllText(FileName, content); } } if (Id != ClassLibraryTopicId) { // Load content file if (!string.IsNullOrEmpty(FileName)) { string filePath = null; try { filePath = Path.Combine(rootPath, FileName); var rawContent = contentLoader(filePath); if (rawContent == null) { Logger.Warning("Cannot use template documentation [{0}] not supported", FileName); return; } var htmlDocument = new HtmlDocument(); htmlDocument.LoadHtml(rawContent); // Override title var titleNode = htmlDocument.DocumentNode.SelectSingleNode("html/head/title"); if (titleNode != null) { PageTitle = titleNode.InnerText; Name = titleNode.InnerText; } // Get body var bodyNode = htmlDocument.DocumentNode.Descendants("body").FirstOrDefault(); Content = bodyNode == null ? rawContent : bodyNode.InnerHtml; Content = Content.Trim(); } catch (Exception ex) { Logger.Error("Cannot load content for topic [{0}] from path [{1}]. Reason: {2}", this, filePath, ex.Message); } } else if (!string.IsNullOrEmpty(WebDoc)) { Content = "<webdoc>" + WebDoc + "</webdoc>"; } else { // Check that filename is valid Logger.Error("Filname or WebDoc for topic [{0}] cannot be empty", this); } } } /// <summary> /// Gets the default class library topic. /// </summary> /// <value>The default class library topic.</value> public static NTopic DefaultClassLibraryTopic { get { return new NTopic() { Index = 0, Id = ClassLibraryTopicId, PageId = "api", Name = "Class Library Reference", PageTitle = "Class Library Reference", }; } } /// <summary> /// Gets the default search results topic. /// </summary> /// <value>The default search results topic.</value> public static NTopic DefaultSearchResultsTopic { get { return new NTopic() { Id = SearchResultsTopicId, PageId = "search-results", Name = "Search results", PageTitle = "Search results", }; } } /// <summary> /// Returns a <see cref="System.String"/> that represents this instance. /// </summary> /// <returns> /// A <see cref="System.String"/> that represents this instance. /// </returns> public override string ToString() { return string.Format(System.Globalization.CultureInfo.InvariantCulture, "Id: {0}, PageId: {1}, Name: {2}, FullName: {3}, FileName: {4}, SubTopics.Count: {5}", Id, PageId, Name, FullName, FileName, SubTopics.Count); } } }
using System; using System.Collections.Generic; using System.Net.Sockets; using System.Text; using Microsoft.Extensions.Logging; using Orleans.Messaging; using Orleans.Serialization; namespace Orleans.Runtime.Messaging { internal class SiloMessageSender : OutgoingMessageSender { private readonly MessageCenter messageCenter; private const int DEFAULT_MAX_RETRIES = 0; private readonly Dictionary<SiloAddress, DateTime> lastConnectionFailure; internal const string RETRY_COUNT_TAG = "RetryCount"; internal static readonly TimeSpan CONNECTION_RETRY_DELAY = TimeSpan.FromMilliseconds(1000); internal SiloMessageSender(string nameSuffix, MessageCenter msgCtr, SerializationManager serializationManager, ExecutorService executorService, ILoggerFactory loggerFactory) : base(nameSuffix, serializationManager, executorService, loggerFactory) { messageCenter = msgCtr; lastConnectionFailure = new Dictionary<SiloAddress, DateTime>(); OnFault = FaultBehavior.RestartOnFault; } protected override SocketDirection GetSocketDirection() { return SocketDirection.SiloToSilo; } protected override bool PrepareMessageForSend(Message msg) { // Don't send messages that have already timed out if (msg.IsExpired) { msg.DropExpiredMessage(MessagingStatisticsGroup.Phase.Send); return false; } // Fill in the outbound message with our silo address, if it's not already set if (msg.SendingSilo == null) msg.SendingSilo = messageCenter.MyAddress; // If there's no target silo set, then we shouldn't see this message; send it back if (msg.TargetSilo == null) { FailMessage(msg, "No target silo provided -- internal error"); return false; } // If we know this silo is dead, don't bother if ((messageCenter.SiloDeadOracle != null) && messageCenter.SiloDeadOracle(msg.TargetSilo)) { FailMessage(msg, String.Format("Target {0} silo is known to be dead", msg.TargetSilo.ToLongString())); return false; } // If we had a bad connection to this address recently, don't even try DateTime failure; if (lastConnectionFailure.TryGetValue(msg.TargetSilo, out failure)) { var since = DateTime.UtcNow.Subtract(failure); if (since < CONNECTION_RETRY_DELAY) { FailMessage(msg, String.Format("Recent ({0} ago, at {1}) connection failure trying to reach target silo {2}. Going to drop {3} msg {4} without sending. CONNECTION_RETRY_DELAY = {5}.", since, LogFormatter.PrintDate(failure), msg.TargetSilo.ToLongString(), msg.Direction, msg.Id, CONNECTION_RETRY_DELAY)); return false; } } return true; } protected override bool GetSendingSocket(Message msg, out Socket socket, out SiloAddress targetSilo, out string error) { socket = null; targetSilo = msg.TargetSilo; error = null; try { socket = messageCenter.SocketManager.GetSendingSocket(targetSilo.Endpoint); if (socket.Connected) return true; messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint); socket = messageCenter.SocketManager.GetSendingSocket(targetSilo.Endpoint); return true; } catch (Exception ex) { error = "Exception getting a sending socket to endpoint " + targetSilo.ToString(); Log.Warn(ErrorCode.Messaging_UnableToGetSendingSocket, error, ex); messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint); lastConnectionFailure[targetSilo] = DateTime.UtcNow; return false; } } protected override void OnGetSendingSocketFailure(Message msg, string error) { FailMessage(msg, error); } protected override void OnMessageSerializationFailure(Message msg, Exception exc) { // we only get here if we failed to serialize the msg (or any other catastrophic failure). // Request msg fails to serialize on the sending silo, so we just enqueue a rejection msg. // Response msg fails to serialize on the responding silo, so we try to send an error response back. this.Log.LogWarning( (int)ErrorCode.MessagingUnexpectedSendError, "Unexpected error serializing message {Message}: {Exception}", msg, exc); msg.ReleaseBodyAndHeaderBuffers(); MessagingStatisticsGroup.OnFailedSentMessage(msg); var retryCount = msg.RetryCount ?? 0; if (msg.Direction == Message.Directions.Request) { messageCenter.SendRejection(msg, Message.RejectionTypes.Unrecoverable, exc.ToString()); } else if (msg.Direction == Message.Directions.Response && retryCount < 1) { // if we failed sending an original response, turn the response body into an error and reply with it. // unless we have already tried sending the response multiple times. msg.Result = Message.ResponseTypes.Error; msg.BodyObject = Response.ExceptionResponse(exc); msg.RetryCount = retryCount + 1; this.messageCenter.SendMessage(msg); } else { this.Log.LogWarning( (int)ErrorCode.Messaging_OutgoingMS_DroppingMessage, "Silo {SiloAddress} is dropping message which failed during serialization: {Message}. Exception = {Exception}", this.messageCenter.MyAddress, msg, exc); MessagingStatisticsGroup.OnDroppedSentMessage(msg); } } protected override void OnSendFailure(Socket socket, SiloAddress targetSilo) { messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint); } protected override void ProcessMessageAfterSend(Message msg, bool sendError, string sendErrorStr) { if (sendError) { msg.ReleaseHeadersOnly(); RetryMessage(msg); } else { msg.ReleaseBodyAndHeaderBuffers(); if (Log.IsEnabled(LogLevel.Trace)) Log.Trace("Sending queue delay time for: {0} is {1}", msg, DateTime.UtcNow.Subtract(msg.QueuedTime ?? DateTime.UtcNow)); } } protected override void FailMessage(Message msg, string reason) { msg.ReleaseBodyAndHeaderBuffers(); MessagingStatisticsGroup.OnFailedSentMessage(msg); if (msg.Direction == Message.Directions.Request) { if (Log.IsEnabled(LogLevel.Debug)) Log.Debug(ErrorCode.MessagingSendingRejection, "Silo {siloAddress} is rejecting message: {message}. Reason = {reason}", messageCenter.MyAddress, msg, reason); // Done retrying, send back an error instead messageCenter.SendRejection(msg, Message.RejectionTypes.Transient, String.Format("Silo {0} is rejecting message: {1}. Reason = {2}", messageCenter.MyAddress, msg, reason)); } else { Log.Info(ErrorCode.Messaging_OutgoingMS_DroppingMessage, "Silo {siloAddress} is dropping message: {message}. Reason = {reason}", messageCenter.MyAddress, msg, reason); MessagingStatisticsGroup.OnDroppedSentMessage(msg); } } private void RetryMessage(Message msg, Exception ex = null) { if (msg == null) return; int maxRetries = msg.MaxRetries ?? DEFAULT_MAX_RETRIES; int retryCount = msg.RetryCount ?? 0; if (retryCount < maxRetries) { msg.RetryCount = retryCount + 1; messageCenter.OutboundQueue.SendMessage(msg); } else { var reason = new StringBuilder("Retry count exceeded. "); if (ex != null) { reason.Append("Original exception is: ").Append(ex.ToString()); } reason.Append("Msg is: ").Append(msg); FailMessage(msg, reason.ToString()); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using log4net; using Nini.Config; using OpenSim.Framework; using OpenMetaverse; using OpenSim.Region.Physics.Manager; /* * Steps to add a new prioritization policy: * * - Add a new value to the UpdatePrioritizationSchemes enum. * - Specify this new value in the [InterestManagement] section of your * OpenSim.ini. The name in the config file must match the enum value name * (although it is not case sensitive). * - Write a new GetPriorityBy*() method in this class. * - Add a new entry to the switch statement in GetUpdatePriority() that calls * your method. */ namespace OpenSim.Region.Framework.Scenes { public enum UpdatePrioritizationSchemes { Time = 0, Distance = 1, SimpleAngularDistance = 2, FrontBack = 3, BestAvatarResponsiveness = 4, } public class Prioritizer { private static readonly ILog m_log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); private Scene m_scene; public Prioritizer(Scene scene) { m_scene = scene; } /// <summary> /// Returns the priority queue into which the update should be placed. Updates within a /// queue will be processed in arrival order. There are currently 12 priority queues /// implemented in PriorityQueue class in LLClientView. Queue 0 is generally retained /// for avatar updates. The fair queuing discipline for processing the priority queues /// assumes that the number of entities in each priority queues increases exponentially. /// So for example... if queue 1 contains all updates within 10m of the avatar or camera /// then queue 2 at 20m is about 3X bigger in space & about 3X bigger in total number /// of updates. /// </summary> public uint GetUpdatePriority(IClientAPI client, ISceneEntity entity) { // If entity is null we have a serious problem if (entity == null) { m_log.WarnFormat("[PRIORITIZER] attempt to prioritize null entity"); throw new InvalidOperationException("Prioritization entity not defined"); } // If this is an update for our own avatar give it the highest priority if (client.AgentId == entity.UUID) return 0; uint priority; switch (m_scene.UpdatePrioritizationScheme) { case UpdatePrioritizationSchemes.Time: priority = GetPriorityByTime(client, entity); break; case UpdatePrioritizationSchemes.Distance: priority = GetPriorityByDistance(client, entity); break; case UpdatePrioritizationSchemes.SimpleAngularDistance: priority = GetPriorityByDistance(client, entity); // TODO: Reimplement SimpleAngularDistance break; case UpdatePrioritizationSchemes.FrontBack: priority = GetPriorityByFrontBack(client, entity); break; case UpdatePrioritizationSchemes.BestAvatarResponsiveness: priority = GetPriorityByBestAvatarResponsiveness(client, entity); break; default: throw new InvalidOperationException("UpdatePrioritizationScheme not defined."); } return priority; } private uint GetPriorityByTime(IClientAPI client, ISceneEntity entity) { // And anything attached to this avatar gets top priority as well if (entity is SceneObjectPart) { SceneObjectPart sop = (SceneObjectPart)entity; if (sop.ParentGroup.RootPart.IsAttachment && client.AgentId == sop.ParentGroup.RootPart.AttachedAvatar) return 1; } return PriorityQueue.NumberOfImmediateQueues; // first queue past the immediate queues } private uint GetPriorityByDistance(IClientAPI client, ISceneEntity entity) { // And anything attached to this avatar gets top priority as well if (entity is SceneObjectPart) { SceneObjectPart sop = (SceneObjectPart)entity; if (sop.ParentGroup.RootPart.IsAttachment && client.AgentId == sop.ParentGroup.RootPart.AttachedAvatar) return 1; } return ComputeDistancePriority(client,entity,false); } private uint GetPriorityByFrontBack(IClientAPI client, ISceneEntity entity) { // And anything attached to this avatar gets top priority as well if (entity is SceneObjectPart) { SceneObjectPart sop = (SceneObjectPart)entity; if (sop.ParentGroup.RootPart.IsAttachment && client.AgentId == sop.ParentGroup.RootPart.AttachedAvatar) return 1; } return ComputeDistancePriority(client,entity,true); } private uint GetPriorityByBestAvatarResponsiveness(IClientAPI client, ISceneEntity entity) { uint pqueue = ComputeDistancePriority(client,entity,true); ScenePresence presence = m_scene.GetScenePresence(client.AgentId); if (presence != null) { if (!presence.IsChildAgent) { // All avatars other than our own go into pqueue 1 if (entity is ScenePresence) return 1; if (entity is SceneObjectPart) { // Attachments are high priority, if (((SceneObjectPart)entity).ParentGroup.RootPart.IsAttachment) return 1; // Non physical prims are lower priority than physical prims PhysicsActor physActor = ((SceneObjectPart)entity).ParentGroup.RootPart.PhysActor; if (physActor == null || !physActor.IsPhysical) pqueue++; } } } return pqueue; } private uint ComputeDistancePriority(IClientAPI client, ISceneEntity entity, bool useFrontBack) { // Get this agent's position ScenePresence presence = m_scene.GetScenePresence(client.AgentId); if (presence == null) { // this shouldn't happen, it basically means that we are prioritizing // updates to send to a client that doesn't have a presence in the scene // seems like there's race condition here... // m_log.WarnFormat("[PRIORITIZER] attempt to use agent {0} not in the scene",client.AgentId); // throw new InvalidOperationException("Prioritization agent not defined"); return PriorityQueue.NumberOfQueues - 1; } // Use group position for child prims, since we are putting child prims in // the same queue with the root of the group, the root prim (which goes into // the queue first) should always be sent first, no need to adjust child prim // priorities Vector3 entityPos = entity.AbsolutePosition; if (entity is SceneObjectPart) { SceneObjectGroup group = (entity as SceneObjectPart).ParentGroup; if (group != null) entityPos = group.AbsolutePosition; } // Use the camera position for local agents and avatar position for remote agents Vector3 presencePos = (presence.IsChildAgent) ? presence.AbsolutePosition : presence.CameraPosition; // Compute the distance... double distance = Vector3.Distance(presencePos, entityPos); // And convert the distance to a priority queue, this computation gives queues // at 10, 20, 40, 80, 160, 320, 640, and 1280m uint pqueue = PriorityQueue.NumberOfImmediateQueues; uint queues = PriorityQueue.NumberOfQueues - PriorityQueue.NumberOfImmediateQueues; for (int i = 0; i < queues - 1; i++) { if (distance < 10 * Math.Pow(2.0,i)) break; pqueue++; } // If this is a root agent, then determine front & back // Bump up the priority queue (drop the priority) for any objects behind the avatar if (useFrontBack && ! presence.IsChildAgent) { // Root agent, decrease priority for objects behind us Vector3 camPosition = presence.CameraPosition; Vector3 camAtAxis = presence.CameraAtAxis; // Plane equation float d = -Vector3.Dot(camPosition, camAtAxis); float p = Vector3.Dot(camAtAxis, entityPos) + d; if (p < 0.0f) pqueue++; } return pqueue; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Security; using System.Threading; using System.Threading.Tasks; namespace System.IO.Pipes { public abstract partial class PipeStream : Stream { // The Windows implementation of PipeStream sets the stream's handle during // creation, and as such should always have a handle, but the Unix implementation // sometimes sets the handle not during creation but later during connection. // As such, validation during member access needs to verify a valid handle on // Windows, but can't assume a valid handle on Unix. internal const bool CheckOperationsRequiresSetHandle = false; internal static string GetPipePath(string serverName, string pipeName) { if (serverName != "." && serverName != Interop.libc.gethostname()) { // Cross-machine pipes are not supported. throw new PlatformNotSupportedException(); } if (pipeName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) { // Since pipes are stored as files in the file system, we don't support // pipe names that are actually paths or that otherwise have invalid // filename characters in them. throw new PlatformNotSupportedException(); } // Return the pipe path return Path.Combine(EnsurePipeDirectoryPath(), pipeName); } /// <summary>Throws an exception if the supplied handle does not represent a valid pipe.</summary> /// <param name="safePipeHandle">The handle to validate.</param> internal void ValidateHandleIsPipe(SafePipeHandle safePipeHandle) { SysCall(safePipeHandle, (fd, _, __) => { Interop.Sys.FileStatus status; int result = Interop.Sys.FStat(fd, out status); if (result == 0) { if ((status.Mode & Interop.Sys.FileTypes.S_IFMT) != Interop.Sys.FileTypes.S_IFIFO) { throw new IOException(SR.IO_InvalidPipeHandle); } } return result; }); } /// <summary>Initializes the handle to be used asynchronously.</summary> /// <param name="handle">The handle.</param> [SecurityCritical] private void InitializeAsyncHandle(SafePipeHandle handle) { // nop } private void UninitializeAsyncHandle() { // nop } [SecurityCritical] private unsafe int ReadCore(byte[] buffer, int offset, int count) { Debug.Assert(_handle != null, "_handle is null"); Debug.Assert(!_handle.IsClosed, "_handle is closed"); Debug.Assert(CanRead, "can't read"); Debug.Assert(buffer != null, "buffer is null"); Debug.Assert(offset >= 0, "offset is negative"); Debug.Assert(count >= 0, "count is negative"); fixed (byte* bufPtr = buffer) { return (int)SysCall(_handle, (fd, ptr, len) => { long result = (long)Interop.libc.read(fd, (byte*)ptr, (IntPtr)len); Debug.Assert(result <= len); return result; }, (IntPtr)(bufPtr + offset), count); } } [SecuritySafeCritical] private Task<int> ReadAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { // Delegate to the base Stream's ReadAsync, which will just invoke Read asynchronously. return base.ReadAsync(buffer, offset, count, cancellationToken); } [SecurityCritical] private unsafe void WriteCore(byte[] buffer, int offset, int count) { Debug.Assert(_handle != null, "_handle is null"); Debug.Assert(!_handle.IsClosed, "_handle is closed"); Debug.Assert(CanWrite, "can't write"); Debug.Assert(buffer != null, "buffer is null"); Debug.Assert(offset >= 0, "offset is negative"); Debug.Assert(count >= 0, "count is negative"); fixed (byte* bufPtr = buffer) { while (count > 0) { int bytesWritten = (int)SysCall(_handle, (fd, ptr, len) => { long result = (long)Interop.libc.write(fd, (byte*)ptr, (IntPtr)len); Debug.Assert(result <= len); return result; }, (IntPtr)(bufPtr + offset), count); count -= bytesWritten; offset += bytesWritten; } } } [SecuritySafeCritical] private Task WriteAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { // Delegate to the base Stream's WriteAsync, which will just invoke Write asynchronously. return base.WriteAsync(buffer, offset, count, cancellationToken); } // Blocks until the other end of the pipe has read in all written buffer. [SecurityCritical] public void WaitForPipeDrain() { CheckWriteOperations(); if (!CanWrite) { throw __Error.GetWriteNotSupported(); } throw new PlatformNotSupportedException(); // no mechanism for this on Unix } // Gets the transmission mode for the pipe. This is virtual so that subclassing types can // override this in cases where only one mode is legal (such as anonymous pipes) public virtual PipeTransmissionMode TransmissionMode { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { CheckPipePropertyOperations(); return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based } } // Gets the buffer size in the inbound direction for the pipe. This checks if pipe has read // access. If that passes, call to GetNamedPipeInfo will succeed. public virtual int InBufferSize { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")] get { CheckPipePropertyOperations(); if (!CanRead) { throw new NotSupportedException(SR.NotSupported_UnreadableStream); } return InBufferSizeCore; } } // Gets the buffer size in the outbound direction for the pipe. This uses cached version // if it's an outbound only pipe because GetNamedPipeInfo requires read access to the pipe. // However, returning cached is good fallback, especially if user specified a value in // the ctor. public virtual int OutBufferSize { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { CheckPipePropertyOperations(); if (!CanWrite) { throw new NotSupportedException(SR.NotSupported_UnwritableStream); } return OutBufferSizeCore; } } public virtual PipeTransmissionMode ReadMode { [SecurityCritical] get { CheckPipePropertyOperations(); return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based } [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] set { CheckPipePropertyOperations(); if (value < PipeTransmissionMode.Byte || value > PipeTransmissionMode.Message) { throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_TransmissionModeByteOrMsg); } if (value != PipeTransmissionMode.Byte) // Unix pipes are only byte-based, not message-based { throw new PlatformNotSupportedException(); } // nop, since it's already the only valid value } } // ----------------------------- // ---- PAL layer ends here ---- // ----------------------------- private static string s_pipeDirectoryPath; private static string EnsurePipeDirectoryPath() { const string PipesFeatureName = "pipes"; // Ideally this would simply use PersistedFiles.GetTempFeatureDirectory(PipesFeatureName) and then // Directory.CreateDirectory to ensure it exists. But this assembly doesn't reference System.IO.FileSystem. // As such, we'd be calling GetTempFeatureDirectory, only to then need to parse it in order // to create each of the individual directories as part of the path. We instead access the named portions // of the path directly and do the building of the path and directory structure manually. // First ensure we know what the full path should be, e.g. /tmp/.dotnet/corefx/pipes/ string fullPath = s_pipeDirectoryPath; string tempPath = null; if (fullPath == null) { tempPath = Path.GetTempPath(); fullPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory, PersistedFiles.SecondLevelDirectory, PipesFeatureName); s_pipeDirectoryPath = fullPath; } // Then create the directory if it doesn't already exist. If we get any error back from stat, // just proceed to build up the directory, failing in the CreateDirectory calls if there's some // problem. Similarly, it's possible stat succeeds but the path is a file rather than directory; we'll // call that success for now and let this fail later when the code tries to create a file in this "directory" // (we don't want to overwrite/delete whatever that unknown file may be, and this is similar to other cases // we can't control where the file system is manipulated concurrently with and separately from this code). Interop.Sys.FileStatus ignored; bool pathExists = Interop.Sys.Stat(fullPath, out ignored) == 0; if (!pathExists) { // We need to build up the directory manually. Ensure we have the temp directory in which // we'll create the structure, e.g. /tmp/ if (tempPath == null) { tempPath = Path.GetTempPath(); } Debug.Assert(Interop.Sys.Stat(tempPath, out ignored) == 0, "Path.GetTempPath() directory could not be accessed"); // Create /tmp/.dotnet/ if it doesn't exist. string partialPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory); CreateDirectory(partialPath); // Create /tmp/.dotnet/corefx/ if it doesn't exist partialPath = Path.Combine(partialPath, PersistedFiles.SecondLevelDirectory); CreateDirectory(partialPath); // Create /tmp/.dotnet/corefx/pipes/ if it doesn't exist CreateDirectory(fullPath); } return fullPath; } private static void CreateDirectory(string directoryPath) { while (true) { int result = Interop.libc.mkdir(directoryPath, (int)Interop.libc.Permissions.S_IRWXU); // If successful created, we're done. if (result >= 0) return; // If the directory already exists, consider it a success. Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo(); if (errorInfo.Error == Interop.Error.EEXIST) return; // If the I/O was interrupted, try again. if (errorInfo.Error == Interop.Error.EINTR) continue; // Otherwise, fail. throw Interop.GetExceptionForIoErrno(errorInfo, directoryPath, isDirectory: true); } } internal static Interop.libc.OpenFlags TranslateFlags(PipeDirection direction, PipeOptions options, HandleInheritability inheritability) { // Translate direction Interop.libc.OpenFlags flags = direction == PipeDirection.InOut ? Interop.libc.OpenFlags.O_RDWR : direction == PipeDirection.Out ? Interop.libc.OpenFlags.O_WRONLY : Interop.libc.OpenFlags.O_RDONLY; // Translate options if ((options & PipeOptions.WriteThrough) != 0) { flags |= Interop.libc.OpenFlags.O_SYNC; } // Translate inheritability. if ((inheritability & HandleInheritability.Inheritable) == 0) { flags |= Interop.libc.OpenFlags.O_CLOEXEC; } // PipeOptions.Asynchronous is ignored, at least for now. Asynchronous processing // is handling just by queueing a work item to do the work synchronously on a pool thread. return flags; } /// <summary> /// Helper for making system calls that involve the stream's file descriptor. /// System calls are expected to return greather than or equal to zero on success, /// and less than zero on failure. In the case of failure, errno is expected to /// be set to the relevant error code. /// </summary> /// <param name="sysCall">A delegate that invokes the system call.</param> /// <param name="arg1">The first argument to be passed to the system call, after the file descriptor.</param> /// <param name="arg2">The second argument to be passed to the system call.</param> /// <returns>The return value of the system call.</returns> /// <remarks> /// Arguments are expected to be passed via <paramref name="arg1"/> and <paramref name="arg2"/> /// so as to avoid delegate and closure allocations at the call sites. /// </remarks> private long SysCall( SafePipeHandle handle, Func<int, IntPtr, int, long> sysCall, IntPtr arg1 = default(IntPtr), int arg2 = default(int)) { bool gotRefOnHandle = false; try { // Get the file descriptor from the handle. We increment the ref count to help // ensure it's not closed out from under us. handle.DangerousAddRef(ref gotRefOnHandle); Debug.Assert(gotRefOnHandle); int fd = (int)handle.DangerousGetHandle(); Debug.Assert(fd >= 0); while (true) { long result = sysCall(fd, arg1, arg2); if (result < 0) { Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo(); if (errorInfo.Error == Interop.Error.EINTR) continue; if (errorInfo.Error == Interop.Error.EPIPE) State = PipeState.Broken; throw Interop.GetExceptionForIoErrno(errorInfo); } return result; } } finally { if (gotRefOnHandle) { handle.DangerousRelease(); } } } } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using SharpGL.SceneGraph.Collections; using SharpGL.SceneGraph.Core; using SharpGL.SceneGraph.Lighting; using SharpGL.SceneGraph.Raytracing; using SharpGL.SceneGraph.Helpers; using System.Xml.Serialization; using SharpGL.SceneGraph.Transformations; using SharpGL.SceneGraph.Assets; namespace SharpGL.SceneGraph.Primitives { /// <summary> /// A polygon contains a set of 'faces' which are indexes into a single list /// of vertices. The main thing about polygons is that they are easily editable /// by the user, depending on the Context they're in. /// </summary> [Serializable] public class Polygon : SceneElement, IHasObjectSpace, IRenderable, IRayTracable, IFreezable, IVolumeBound, IDeepCloneable<Polygon>, IHasMaterial { /// <summary> /// Initializes a new instance of the <see cref="Polygon"/> class. /// </summary> public Polygon() { Name = "Polygon"; } /// <summary> /// This function is cool, just stick in a set of points, it'll add them to the /// array, and create a face. It will take account of duplicate vertices too! /// </summary> /// <param name="vertexData">A set of vertices to make into a face.</param> public virtual void AddFaceFromVertexData(Vertex[] vertexData) { // Create a face. Face newFace = new Face(); // Go through the vertices... foreach(Vertex v in vertexData) { // Do we have this vertex already? int at = VertexSearch.Search(vertices, 0, v, 0.01f); // Add the vertex, and index it. if (at == -1) { newFace.Indices.Add(new Index(vertices.Count)); vertices.Add(v); } else { newFace.Indices.Add(new Index(at)); } } // Add the face. faces.Add(newFace); } /// <summary> /// Triangulate this polygon. /// </summary> public void Triangulate() { List<Face> newFaces = new List<Face>(); // Go through each face... foreach(Face face in faces) { // Number of triangles = vertices - 2. int triangles = face.Indices.Count - 2; // Is it a triangle already?... if(triangles == 1) { newFaces.Add(face); continue; } // Add a set of triangles. for(int i=0; i<triangles; i++) { Face triangle = new Face(); triangle.Indices.Add(new Index(face.Indices[0])); triangle.Indices.Add(new Index(face.Indices[i+1])); triangle.Indices.Add(new Index(face.Indices[i+2])); triangle.Indices.Add(new Index(face.Indices[i+2])); triangle.Indices.Add(new Index(face.Indices[i+1])); newFaces.Add(triangle); } } faces.Clear(); faces = newFaces; } /// <summary> /// Render to the provided instance of OpenGL. /// </summary> /// <param name="gl">The OpenGL instance.</param> /// <param name="renderMode">The render mode.</param> public virtual void Render(OpenGL gl, RenderMode renderMode) { // If we're frozen, use the helper. if (freezableHelper.IsFrozen) { freezableHelper.Render(gl); return; } // Go through each face. foreach (Face face in faces) { // If the face has its own material, push it. if (face.Material != null) face.Material.Push(gl); // Begin drawing a polygon. if (face.Indices.Count == 2) gl.Begin(OpenGL.GL_LINES); else gl.Begin(OpenGL.GL_POLYGON); foreach (Index index in face.Indices) { // Set a texture coord (if any). if (index.UV != -1) gl.TexCoord(uvs[index.UV]); // Set a normal, or generate one. if (index.Normal != -1) gl.Normal(normals[index.Normal]); else { // Do we have enough vertices for a normal? if (face.Indices.Count >= 3) { // Create a normal. Vertex vNormal = face.GetSurfaceNormal(this); vNormal.UnitLength(); // todo use auto smoothing instead // Add it to the normals, setting the index for next time. normals.Add(vNormal); index.Normal = normals.Count - 1; gl.Normal(vNormal); } } // Set the vertex. gl.Vertex(vertices[index.Vertex]); } gl.End(); // If the face has its own material, pop it. if (face.Material != null) face.Material.Pop(gl); } // Draw normals if we have to. if (drawNormals) { // Set the colour to red. gl.PushAttrib(OpenGL.GL_ALL_ATTRIB_BITS); gl.Color(1, 0, 0, 1); gl.Disable(OpenGL.GL_LIGHTING); // Go through each face. foreach (Face face in faces) { // Go though each index. foreach (Index index in face.Indices) { // Make sure it's got a normal, and a vertex. if (index.Normal != -1 && index.Vertex != -1) { // Get the vertex. Vertex vertex = vertices[index.Vertex]; // Get the normal vertex. Vertex normal = normals[index.Normal]; Vertex vertex2 = vertex + normal; gl.Begin(OpenGL.GL_LINES); gl.Vertex(vertex); gl.Vertex(vertex2); gl.End(); } } } // Restore the attributes. gl.PopAttrib(); } } /// <summary> /// This creates a polygon from a height map (any picture). Black is low, /// and the colors are high (the lighter the color, the higher the surface). /// </summary> /// <param name="filename">Path of the image file.</param> /// <param name="xPoints">Number of points along X.</param> /// <param name="yPoints">Number of points along Y.</param> /// <returns>True if sucessful, false otherwise.</returns> public virtual bool CreateFromMap(string filename, int xPoints, int yPoints) { // Try and load the image file. System.Drawing.Bitmap map = new System.Drawing.Bitmap(filename); if(map.Size.IsEmpty) return false; // Set the descriptive name. Name = "Map created from '" + filename + "'"; // Get points. for(int y=0; y < yPoints; y++) { int yValue = (map.Height / yPoints) * y; for(int x=0; x < xPoints; x++) { int xValue = (map.Width / xPoints) * x; // Get the pixel. System.Drawing.Color col = map.GetPixel(xValue, yValue); float xPos = (float)x / (float)xPoints; float yPos = (float)y / (float)yPoints; // Create a control point from it. Vertex v = new Vertex(xPos, 0, yPos); // Add the 'height', based on color. v.Y = (float)col.R / 255.0f + (float)col.G / 255.0f + (float)col.B / 255.0f; // Add this vertex to the vertices array. Vertices.Add(v); } } // Create faces for the polygon. for(int y=0; y < (yPoints-1); y++) { for(int x=0; x < (xPoints-1); x++) { // Create the face. Face face = new Face(); // Create vertex indicies. int nTopLeft = (y * xPoints) + x; int nBottomLeft = ((y + 1) * xPoints) + x; face.Indices.Add(new Index(nTopLeft)); face.Indices.Add(new Index(nTopLeft + 1)); face.Indices.Add(new Index(nBottomLeft + 1)); face.Indices.Add(new Index(nBottomLeft)); // Add the face. Faces.Add(face); } } return true; } /// <summary> /// This function performs lossless optimisation on the polygon, it should be /// called when the geometry changes, and the polygon goes into static mode. /// </summary> /// <returns>The amount of optimisation (as a %).</returns> protected virtual float OptimisePolygon() { // Check for any null faces. float facesBefore = faces.Count; for(int i=0; i<faces.Count; i++) { if(faces[i].Count == 0) faces.RemoveAt(i--); } float facesAfter = faces.Count; return (facesAfter / facesBefore) * 100; } /// <summary> /// Call this function as soon as you change the polygons geometry, it will /// re-generate normals, etc. /// </summary> /// <param name="regenerateNormals">Regenerate Normals.</param> public virtual void Validate(bool regenerateNormals) { if(regenerateNormals) normals.Clear(); // Go through each face. foreach(Face face in faces) { if(regenerateNormals) { // Find a normal for the face. Vertex normal = face.GetSurfaceNormal(this); // Does this normal already exist? int index = VertexSearch.Search(normals, 0, normal, 0.001f); if (index == -1) { index = normals.Count; normals.Add(normal); } // Set the index normal. foreach(Index i in face.Indices) i.Normal = index; } } } /// <summary> /// This function tests to see if a ray interesects the polygon. /// </summary> /// <param name="ray">The ray you want to test.</param> /// <returns> /// The distance from the origin of the ray to the intersection, or -1 if there /// is no intersection. /// </returns> private Intersection TestIntersection(Ray ray) { Intersection intersect = new Intersection(); // This code came from jgt intersect_triangle code (search dogpile for it). foreach(Face face in faces) { // Assert that it's a triangle. if(face.Count != 3) continue; // Find the point of intersection upon the plane, as a point 't' along // the ray. Vertex point1OnPlane = vertices[face.Indices[0].Vertex]; Vertex point2OnPlane = vertices[face.Indices[1].Vertex]; Vertex point3OnPlane = vertices[face.Indices[2].Vertex]; Vertex midpointOpp1 = (point2OnPlane + point3OnPlane) / 2; Vertex midpointOpp2 = (point1OnPlane + point3OnPlane) / 2; Vertex midpointOpp3 = (point1OnPlane + point2OnPlane) / 2; Vertex planeNormal = face.GetSurfaceNormal(this); Vertex diff = point1OnPlane - ray.origin; float s1 = diff.ScalarProduct(planeNormal); float s2 = ray.direction.ScalarProduct(planeNormal); if(s2 == 0) continue; float t = s1 / s2; if(t < 0) continue; float denomintor = planeNormal.ScalarProduct(ray.direction); if(denomintor < 0.00001f && denomintor > -0.00001f) continue; // doesn't intersect the plane. // Vertex v = point1OnPlane - ray.origin; // float t = (v.ScalarProduct(planeNormal)) / denomintor; // Now we can get the point of intersection. Vertex vIntersect = ray.origin + (ray.direction * t); // Do my cool test. Vertex vectorTo1 = vIntersect - point1OnPlane; Vertex vectorTo2 = vIntersect - point2OnPlane; Vertex vectorTo3 = vIntersect - point3OnPlane; Vertex vectorMidTo1 = midpointOpp1 - point1OnPlane; Vertex vectorMidTo2 = midpointOpp2 - point2OnPlane; Vertex vectorMidTo3 = midpointOpp3 - point3OnPlane; if(vectorTo1.Magnitude() > vectorMidTo1.Magnitude()) continue; if(vectorTo2.Magnitude() > vectorMidTo2.Magnitude()) continue; if(vectorTo3.Magnitude() > vectorMidTo3.Magnitude()) continue; if(intersect.closeness == -1 || t < intersect.closeness) { // It's fucking intersection city man intersect.point = vIntersect; intersect.intersected = true; intersect.normal = planeNormal; intersect.closeness = t; } } return intersect; } /// <summary> /// Raytraces the specified ray. If an intersection is found, it is returned, /// otherwise null is returned. /// </summary> /// <param name="ray">The ray.</param> /// <param name="scene">The scene.</param> /// <returns> /// The intersection with the object, or null. /// </returns> public Intersection Raytrace(Ray ray, Scene scene) { // First we see if this ray intersects this polygon. Intersection intersect = TestIntersection(ray); // If there wasn't an intersection, return. if(intersect.intersected == false) return intersect; // There was an intersection, find the color of this point on the // polygon. var lights = from se in scene.SceneContainer.Traverse() where se is Light select se; foreach(Light light in lights) { if(light.On) { // Can we see this light? Cast a shadow ray. Ray shadowRay = new Ray(); bool shadow = false; shadowRay.origin = intersect.point; shadowRay.direction = light.Position - shadowRay.origin; // Test it with every polygon. foreach(Polygon p in scene.SceneContainer.Traverse<Polygon>()) { if(p == this) continue; Intersection shadowIntersect = p.TestIntersection(shadowRay); if(shadowIntersect.intersected) { shadow = true; break; } } if(shadow == false) { // Now find out what this light complements to our color. //todofloat angle = light.Direction.ScalarProduct(intersect.normal); //todo ray.light += material.CalculateLighting(light, angle); ray.light.Clamp(); } } } return intersect; } /// <summary> /// This function subdivides the faces of this polygon. /// </summary> /// <returns>The number of faces in the new subdivided polygon.</returns> public int Subdivide() { List<Face> newFaces = new List<Face>(); foreach(Face face in Faces) { // Make sure the face is a triangle. if(face.Count != 3) continue; // Now get the vertices of the face. Vertex v1 = Vertices[face.Indices[0].Vertex]; Vertex v2 = Vertices[face.Indices[1].Vertex]; Vertex v3 = Vertices[face.Indices[2].Vertex]; // Add the vertices to get a the midpoint of the edge formed by those // vectors. Vertex vMidpoint = (v1 + v2 + v3) / 3; Index iMidpoint = new Index(Vertices.Count); Vertices.Add(vMidpoint); // Now make three new faces from the old vertices and the midpoint. Face newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[1])); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); newFace = new Face(); newFace.Indices.Add(new Index(face.Indices[2])); newFace.Indices.Add(new Index(face.Indices[0])); newFace.Indices.Add(iMidpoint); newFaces.Add(newFace); } faces = newFaces; return faces.Count; } /// <summary> /// Freezes this instance using the provided OpenGL instance. /// </summary> /// <param name="gl">The OpenGL instance.</param> public void Freeze(OpenGL gl) { // Freeze using the helper. freezableHelper.Freeze(gl, this); } /// <summary> /// Unfreezes this instance using the provided OpenGL instance. /// </summary> /// <param name="gl">The OpenGL instance.</param> public void Unfreeze(OpenGL gl) { // Unfreeze using the helper. freezableHelper.Unfreeze(gl); } /// <summary> /// Pushes us into Object Space using the transformation into the specified OpenGL instance. /// </summary> /// <param name="gl">The OpenGL instance.</param> public void PushObjectSpace(OpenGL gl) { // Use the helper to push us into object space. hasObjectSpaceHelper.PushObjectSpace(gl); } /// <summary> /// Pops us from Object Space using the transformation into the specified OpenGL instance. /// </summary> /// <param name="gl">The gl.</param> public void PopObjectSpace(OpenGL gl) { // Use the helper to pop us from object space. hasObjectSpaceHelper.PopObjectSpace(gl); } /// <summary> /// Creates a new object that is a copy of the current instance. /// </summary> /// <returns> /// A new object that is a copy of this instance. /// </returns> public Polygon DeepClone() { // Create a new polygon. Polygon polygon = new Polygon(); // Clone the data. polygon.hasObjectSpaceHelper = hasObjectSpaceHelper.DeepClone(); polygon.freezableHelper = new FreezableHelper(); // TODO clone lists. return polygon; } /// <summary> /// The IHasObjectSpace helper. /// </summary> private HasObjectSpaceHelper hasObjectSpaceHelper = new HasObjectSpaceHelper(); /// <summary> /// The freezable helper. /// </summary> private FreezableHelper freezableHelper = new FreezableHelper(); /// <summary> /// The faces that make up the polygon. /// </summary> private List<Face> faces = new List<Face>(); /// <summary> /// The vertices that make up the polygon. /// </summary> private List<Vertex> vertices = new List<Vertex>(); /// <summary> /// The UV coordinates (texture coodinates) for the polygon. /// </summary> private List<UV> uvs = new List<UV>(); /// <summary> /// The normals of the polygon object. /// </summary> private List<Vertex> normals = new List<Vertex>(); /// <summary> /// Should the normals be drawn? /// </summary> private bool drawNormals = false; /// <summary> /// The bounding volume helper - used to ease implementation of IBoundVolume. /// </summary> private BoundingVolumeHelper boundingVolumeHelper = new BoundingVolumeHelper(); /// <summary> /// Gets or sets the faces. /// </summary> /// <value> /// The faces. /// </value> [Description("The faces that make up the polygon."), Category("Polygon")] public List<Face> Faces { get {return faces;} set {faces = value; } } /// <summary> /// Gets or sets the vertices. /// </summary> /// <value> /// The vertices. /// </value> [Description("The vertices that make up the polygon."), Category("Polygon")] public List<Vertex> Vertices { get {return vertices;} set {vertices = value; } } /// <summary> /// Gets or sets the U vs. /// </summary> /// <value> /// The U vs. /// </value> [Description("The material coordinates."), Category("Polygon")] public List<UV> UVs { get {return uvs;} set {uvs = value; } } /// <summary> /// Gets or sets the normals. /// </summary> /// <value> /// The normals. /// </value> [Description("The normals."), Category("Normals")] public List<Vertex> Normals { get {return normals;} set {normals = value; } } /// <summary> /// Gets or sets a value indicating whether [draw normals]. /// </summary> /// <value> /// <c>true</c> if [draw normals]; otherwise, <c>false</c>. /// </value> [Description("Should normals be drawn for each face?"), Category("Polygon")] public bool DrawNormals { get {return drawNormals;} set {drawNormals = value; } } /// <summary> /// Gets the transformation that pushes us into object space. /// </summary> [Description("The Polygon Object Space Transformation"), Category("Polygon")] public LinearTransformation Transformation { get { return hasObjectSpaceHelper.Transformation; } set { hasObjectSpaceHelper.Transformation = value; } } /// <summary> /// Gets the bounding volume. /// </summary> [Browsable(false)] [XmlIgnore] public BoundingVolume BoundingVolume { get { // todo; only create bv when vertices changed. boundingVolumeHelper.BoundingVolume.FromVertices(vertices); boundingVolumeHelper.BoundingVolume.Pad(0.1f); return boundingVolumeHelper.BoundingVolume; } } /// <summary> /// Gets a value indicating whether this instance is frozen. /// </summary> /// <value> /// <c>true</c> if this instance is frozen; otherwise, <c>false</c>. /// </value> [Browsable(false)] [XmlIgnore] public bool IsFrozen { get { return freezableHelper.IsFrozen; } } /// <summary> /// Material to be used when rendering the polygon in lighted mode. /// This material may be overriden on a per-face basis. /// </summary> /// <value> /// The material. /// </value> [XmlIgnore] public Material Material { get; set; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using GUIOdyssey.Areas.HelpPage.ModelDescriptions; using GUIOdyssey.Areas.HelpPage.Models; namespace GUIOdyssey.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Threading; using ManagedBass; using NUnit.Framework; using osu.Framework.Audio; using osu.Framework.Audio.Track; using osu.Framework.Bindables; using osu.Framework.Development; using osu.Framework.IO.Stores; using osu.Framework.Platform.Linux.Native; using osu.Framework.Threading; #pragma warning disable 4014 namespace osu.Framework.Tests.Audio { [TestFixture] public class TrackBassTest { private DllResourceStore resources; private TrackBass track; [SetUp] public void Setup() { if (RuntimeInfo.OS == RuntimeInfo.Platform.Linux) { // required for the time being to address libbass_fx.so load failures (see https://github.com/ppy/osu/issues/2852) Library.Load("libbass.so", Library.LoadFlags.RTLD_LAZY | Library.LoadFlags.RTLD_GLOBAL); } // Initialize bass with no audio to make sure the test remains consistent even if there is no audio device. Bass.Init(0); resources = new DllResourceStore(typeof(TrackBassTest).Assembly); track = new TrackBass(resources.GetStream("Resources.Tracks.sample-track.mp3")); updateTrack(); } [TearDown] public void Teardown() { // See AudioThread.freeDevice(). if (RuntimeInfo.OS != RuntimeInfo.Platform.Linux) Bass.Free(); } [Test] public void TestStart() { track.StartAsync(); updateTrack(); Thread.Sleep(50); updateTrack(); Assert.IsTrue(track.IsRunning); Assert.Greater(track.CurrentTime, 0); } [Test] public void TestStop() { track.StartAsync(); updateTrack(); track.StopAsync(); updateTrack(); Assert.IsFalse(track.IsRunning); double expectedTime = track.CurrentTime; Thread.Sleep(50); Assert.AreEqual(expectedTime, track.CurrentTime); } [Test] public void TestStopAtEnd() { startPlaybackAt(track.Length - 1); Thread.Sleep(50); updateTrack(); track.StopAsync(); updateTrack(); Assert.IsFalse(track.IsRunning); Assert.AreEqual(track.Length, track.CurrentTime); } [Test] public void TestSeek() { track.SeekAsync(1000); updateTrack(); Assert.IsFalse(track.IsRunning); Assert.AreEqual(1000, track.CurrentTime); } [Test] public void TestSeekWhileRunning() { track.StartAsync(); updateTrack(); track.SeekAsync(1000); updateTrack(); Thread.Sleep(50); updateTrack(); Assert.IsTrue(track.IsRunning); Assert.GreaterOrEqual(track.CurrentTime, 1000); } /// <summary> /// Bass does not allow seeking to the end of the track. It should fail and the current time should not change. /// </summary> [Test] public void TestSeekToEndFails() { bool? success = null; runOnAudioThread(() => { success = track.Seek(track.Length); }); updateTrack(); Assert.AreEqual(0, track.CurrentTime); Assert.IsFalse(success); } [Test] public void TestSeekBackToSamePosition() { track.SeekAsync(1000); track.SeekAsync(0); updateTrack(); Thread.Sleep(50); updateTrack(); Assert.GreaterOrEqual(track.CurrentTime, 0); Assert.Less(track.CurrentTime, 1000); } [Test] public void TestPlaybackToEnd() { startPlaybackAt(track.Length - 1); Thread.Sleep(50); updateTrack(); Assert.IsFalse(track.IsRunning); Assert.AreEqual(track.Length, track.CurrentTime); } /// <summary> /// Bass restarts the track from the beginning if Start is called when the track has been completed. /// This is blocked locally in <see cref="TrackBass"/>, so this test expects the track to not restart. /// </summary> [Test] public void TestStartFromEndDoesNotRestart() { startPlaybackAt(track.Length - 1); Thread.Sleep(50); updateTrack(); track.StartAsync(); updateTrack(); Assert.AreEqual(track.Length, track.CurrentTime); } [Test] public void TestRestart() { startPlaybackAt(1000); Thread.Sleep(50); updateTrack(); restartTrack(); Assert.IsTrue(track.IsRunning); Assert.Less(track.CurrentTime, 1000); } [Test] public void TestRestartAtEnd() { startPlaybackAt(track.Length - 1); Thread.Sleep(50); updateTrack(); restartTrack(); Assert.IsTrue(track.IsRunning); Assert.LessOrEqual(track.CurrentTime, 1000); } [Test] public void TestRestartFromRestartPoint() { track.RestartPoint = 1000; startPlaybackAt(3000); restartTrack(); Assert.IsTrue(track.IsRunning); Assert.GreaterOrEqual(track.CurrentTime, 1000); Assert.Less(track.CurrentTime, 3000); } [TestCase(0)] [TestCase(1000)] public void TestLoopingRestart(double restartPoint) { track.Looping = true; track.RestartPoint = restartPoint; startPlaybackAt(track.Length - 1); Thread.Sleep(50); // The first update brings the track to its end time and restarts it updateTrack(); // The second update updates the IsRunning state updateTrack(); // In a perfect world the track will be running after the update above, but during testing it's possible that the track is in // a stalled state due to updates running on Bass' own thread, so we'll loop until the track starts running again // Todo: This should be fixed in the future if/when we invoke Bass.Update() ourselves int loopCount = 0; while (++loopCount < 50 && !track.IsRunning) { updateTrack(); Thread.Sleep(10); } if (loopCount == 50) throw new TimeoutException("Track failed to start in time."); Assert.GreaterOrEqual(track.CurrentTime, restartPoint); Assert.LessOrEqual(track.CurrentTime, restartPoint + 1000); } [Test] public void TestSetTempoNegative() { Assert.Throws<ArgumentException>(() => track.Tempo.Value = -1); Assert.Throws<ArgumentException>(() => track.Tempo.Value = 0.04f); Assert.IsFalse(track.IsReversed); track.Tempo.Value = 0.05f; Assert.IsFalse(track.IsReversed); Assert.AreEqual(0.05f, track.Tempo.Value); } [Test] public void TestRateWithAggregateAdjustments() { track.AddAdjustment(AdjustableProperty.Frequency, new BindableDouble(1.5f)); Assert.AreEqual(1.5, track.Rate); } [Test] public void TestLoopingTrackDoesntSetCompleted() { bool completedEvent = false; track.Completed += () => completedEvent = true; track.Looping = true; startPlaybackAt(track.Length - 1); takeEffectsAndUpdateAfter(50); Assert.IsFalse(track.HasCompleted); Assert.IsFalse(completedEvent); updateTrack(); Assert.IsTrue(track.IsRunning); } [Test] public void TestHasCompletedResetsOnSeekBack() { // start playback and wait for completion. startPlaybackAt(track.Length - 1); takeEffectsAndUpdateAfter(50); Assert.IsTrue(track.HasCompleted); // ensure seeking to end doesn't reset completed state. track.SeekAsync(track.Length); updateTrack(); Assert.IsTrue(track.HasCompleted); // seeking back reset completed state. track.SeekAsync(track.Length - 1); updateTrack(); Assert.IsFalse(track.HasCompleted); } [Test] public void TestZeroFrequencyHandling() { // start track. track.StartAsync(); takeEffectsAndUpdateAfter(50); // ensure running and has progressed. Assert.IsTrue(track.IsRunning); Assert.Greater(track.CurrentTime, 0); // now set to zero frequency and update track to take effects. track.Frequency.Value = 0; updateTrack(); var currentTime = track.CurrentTime; // assert time is frozen after 50ms sleep and didn't change with full precision, but "IsRunning" is still true. Thread.Sleep(50); updateTrack(); Assert.IsTrue(track.IsRunning); Assert.AreEqual(currentTime, track.CurrentTime); // set back to one and update track. track.Frequency.Value = 1; takeEffectsAndUpdateAfter(50); // ensure time didn't jump away, and is progressing normally. Assert.IsTrue(track.IsRunning); Assert.Greater(track.CurrentTime, currentTime); Assert.Less(track.CurrentTime, currentTime + 1000.0); } /// <summary> /// Ensure setting a paused (or not yet played) track's frequency from zero to one doesn't resume / play it. /// </summary> [Test] public void TestZeroFrequencyDoesntResumeTrack() { // start at zero frequency and wait a bit. track.Frequency.Value = 0; track.StartAsync(); takeEffectsAndUpdateAfter(50); // ensure started but not progressing. Assert.IsTrue(track.IsRunning); Assert.AreEqual(0, track.CurrentTime); // stop track and update. track.StopAsync(); updateTrack(); Assert.IsFalse(track.IsRunning); // set back to 1 frequency. track.Frequency.Value = 1; takeEffectsAndUpdateAfter(50); // assert track channel still paused regardless of frequency because it's stopped via Stop() above. Assert.IsFalse(track.IsRunning); Assert.AreEqual(0, track.CurrentTime); } [Test] public void TestBitrate() { Assert.Greater(track.Bitrate, 0); } [Test] public void TestCurrentTimeUpdatedAfterInlineSeek() { track.StartAsync(); updateTrack(); runOnAudioThread(() => track.Seek(20000)); Assert.That(track.CurrentTime, Is.EqualTo(20000).Within(100)); } private void takeEffectsAndUpdateAfter(int after) { updateTrack(); Thread.Sleep(after); updateTrack(); } private void startPlaybackAt(double time) { track.SeekAsync(time); track.StartAsync(); updateTrack(); } private void updateTrack() => runOnAudioThread(() => track.Update()); private void restartTrack() { runOnAudioThread(() => { track.Restart(); track.Update(); }); } /// <summary> /// Certain actions are invoked on the audio thread. /// Here we simulate this process on a correctly named thread to avoid endless blocking. /// </summary> /// <param name="action">The action to perform.</param> private void runOnAudioThread(Action action) { var resetEvent = new ManualResetEvent(false); new Thread(() => { ThreadSafety.IsAudioThread = true; action(); resetEvent.Set(); }) { Name = GameThread.PrefixedThreadNameFor("Audio") }.Start(); if (!resetEvent.WaitOne(TimeSpan.FromSeconds(10))) throw new TimeoutException(); } } } #pragma warning restore 4014
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass001.genclass001; using System.Collections.Generic; using System.Linq; using System.Text; using System.Linq.Expressions; using System.Reflection; public class MyClass { public int Field = 0; } public struct MyStruct { public int Number; } public enum MyEnum { First = 1, Second = 2, Third = 3 } public class MemberClass<T> { /// <summary> /// We use this to get the values we cannot get directly /// </summary> /// <param name = "target"></param> /// <param name = "name"></param> /// <returns></returns> public object GetPrivateValue(object target, string name) { var tip = target.GetType(); var prop = tip.GetTypeInfo().GetDeclaredField(name); return prop.GetValue(target); } /// <summary> /// We use this to set the value we cannot set directly /// </summary> /// <param name = "target"></param> /// <param name = "name"></param> /// <param name = "value"></param> public void SetPrivateValue(object target, string name, object value) { var tip = target.GetType(); var prop = tip.GetTypeInfo().GetDeclaredField(name); prop.SetValue(target, value); } public decimal[] myDecimalArr = new decimal[2]; public dynamic myDynamic = new object(); public T myT; public T Property_T { set { myT = value; } get { return myT; } } public decimal[] Property_decimalArr { protected internal set { myDecimalArr = value; } get { return myDecimalArr; } } public dynamic Property_dynamic { get { return myDynamic; } set { myDynamic = value; } } public static float myFloatStatic; public static T myTStatic; public static T Property_TStatic { set { myTStatic = value; } get { return myTStatic; } } } } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass001.genclass001 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass001.genclass001; // <Title> Tests generic class regular property used in regular method body.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { Test t1 = new Test(); return t1.TestGetMethod(new MemberClass<bool>()) + t1.TestSetMethod(new MemberClass<bool>()) == 0 ? 0 : 1; } public int TestGetMethod(MemberClass<bool> mc) { mc.Property_T = true; dynamic dy = mc; if (!(bool)dy.Property_T) return 1; else return 0; } public int TestSetMethod(MemberClass<bool> mc) { dynamic dy = mc; dy.Property_T = true; mc = dy; //mc might be a boxed struct if (!mc.Property_T) return 1; else return 0; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass002.genclass002 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass002.genclass002; // <Title> Tests generic class regular property used in regular method body with conditional attribute.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { private static int s_count = 0; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { Test t1 = new Test(); t1.TestGetMethod(new MemberClass<bool>()); t1.TestSetMethod(new MemberClass<bool>()); return s_count; } [System.Diagnostics.Conditional("c1")] public void TestGetMethod(MemberClass<bool> mc) { dynamic dy = mc; mc.Property_decimalArr = new decimal[1]; if ((int)dy.Property_decimalArr.Length != 1) s_count++; } [System.Diagnostics.Conditional("c2")] public void TestSetMethod(MemberClass<bool> mc) { dynamic dy = mc; dy.Property_decimalArr = new decimal[] { 0M, 1M } ; if (!((int)mc.Property_decimalArr.Length != 2)) s_count++; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass003.genclass003 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass003.genclass003; // <Title> Tests generic class regular property used in member initializer of anonymous type.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { MemberClass<string> mc = new MemberClass<string>(); mc.myT = "Test"; mc.myDecimalArr = new decimal[] { 0M, 1M } ; dynamic dy = mc; var tc = new { A1 = (string)dy.Property_T, A2 = (decimal[])dy.Property_decimalArr, A3 = (object)dy.Property_dynamic } ; if (tc != null && mc.myT == tc.A1 && tc.A2[0] == 0M && tc.A2[1] == 1M && tc.A3 == mc.myDynamic) return 0; return 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass005.genclass005 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass005.genclass005; // <Title> Tests generic class regular property used in query expression.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; using System.Linq; using System.Collections.Generic; public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { var list = new List<string>() { null, "b", null, "a" } ; MemberClass<string> mc = new MemberClass<string>(); mc.myT = "a"; dynamic dy = mc; var result = list.Where(p => p == (string)dy.Property_T).ToList(); if (result.Count == 1 && result[0] == "a") return 0; return 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass006.genclass006 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass006.genclass006; // <Title> Tests generic class regular property used in member initialzier of object initializer.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; using System.Linq; using System.Collections.Generic; public class Test { private int _field1; private string _field2 = string.Empty; private MyEnum _field3; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { MemberClass<string> mc1 = new MemberClass<string>(); MemberClass<Test> mc2 = new MemberClass<Test>(); MemberClass<MyStruct> mc3 = new MemberClass<MyStruct>(); mc1.Property_dynamic = 10; mc3.Property_dynamic = MyEnum.Second; dynamic dy1 = mc1; dynamic dy2 = mc2; dynamic dy3 = mc3; var test = new Test() { _field1 = dy1.Property_dynamic, _field2 = dy2.Property_dynamic == null ? string.Empty : dy2.Property_dynamic.ToString(), _field3 = dy3.Property_dynamic } ; if (test._field1 == 10 && test._field2 != null && test._field3 == MyEnum.Second) return 0; return 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass007.genclass007 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass007.genclass007; // <Title> Tests generic class regular property used in explicit operator.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { public class InnerTest1 { public int field; public static explicit operator InnerTest2(InnerTest1 t1) { var dy = new MemberClass<InnerTest2>(); dy.Property_T = new InnerTest2() { field = t1.field + 1 } ; return dy.Property_T; } } public class InnerTest2 { public int field; } [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { InnerTest1 t = new InnerTest1() { field = 20 } ; InnerTest2 result = (InnerTest2)t; //explicit return (result.field == 21) ? 0 : 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass008.genclass008 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass008.genclass008; // <Title> Tests generic class regular property used in implicit operator.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { public class InnerTest1 { public int field; public static implicit operator InnerTest2(InnerTest1 t1) { var dy = new MemberClass<InnerTest2>(); dy.Property_T = new InnerTest2() { field = t1.field + 1 } ; return dy.Property_T; } } public class InnerTest2 { public int field; } [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { InnerTest1 t = new InnerTest1() { field = 20 } ; InnerTest2 result1 = (InnerTest2)t; //explicit InnerTest2 result2 = t; //implicit return (result1.field == 21 && result2.field == 21) ? 0 : 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass011.genclass011 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass011.genclass011; // <Title> Tests generic class regular property used in volatile field initializer.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System; public class Test { private static MemberClass<MyClass> s_mc; private static dynamic s_dy; static Test() { s_mc = new MemberClass<MyClass>(); s_mc.Property_dynamic = new MyClass() { Field = 10 } ; s_dy = s_mc; } private volatile object _o = s_dy.Property_dynamic; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { Test t = new Test(); if (t._o.GetType() == typeof(MyClass) && ((MyClass)t._o).Field == 10) return 0; return 1; } } //</Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass012.genclass012 { using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclassregprop.genclassregprop; using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.regproperty.genclass.genclass012.genclass012; // <Title> Tests generic class regular property used in volatile field initializer.</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> //<Expects Status=warning>\(17,16\).*CS0219</Expects> using System; using System.Linq; using System.Collections.Generic; public class Test { [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod()); } public static int MainMethod() { List<int> list = new List<int>() { 0, 4, 1, 6, 4, 4, 5 } ; string s = "test"; var mc = new MemberClass<int>(); mc.Property_T = 4; mc.Property_dynamic = "Test"; dynamic dy = mc; var result = list.Where(p => p == (int)dy.Property_T).Select(p => new { A = dy.Property_T, B = dy.Property_dynamic } ).ToList(); if (result.Count == 3) { foreach (var m in result) { if ((int)m.A != 4 || m.A.GetType() != typeof(int) || (string)m.B != "Test" || m.B.GetType() != typeof(string)) return 1; } return 0; } return 1; } } //</Code> }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Common.Tests; using System.ComponentModel.Composition.Factories; using System.ComponentModel.Composition.Primitives; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Text; using System.UnitTesting; using Microsoft.DotNet.RemoteExecutor; using Xunit; namespace System.ComponentModel.Composition { [Serializable] public class CompositionExceptionTests { [Fact] public void Constructor1_ShouldSetMessagePropertyToDefault() { var exception = new CompositionException(); ExceptionAssert.HasDefaultMessage(exception); } [Fact] public void Constructor2_NullAsMessageArgument_ShouldSetMessagePropertyToDefault() { var exception = new CompositionException((string)null); ExceptionAssert.HasDefaultMessage(exception); } [Fact] public void Constructor3_EmptyEnumerableAsErrorsArgument_ShouldSetMessagePropertyToDefault() { var exception = new CompositionException(Enumerable.Empty<CompositionError>()); ExceptionAssert.HasDefaultMessage(exception); } [Fact] public void Constructor4_NullAsMessageArgument_ShouldSetMessagePropertyToDefault() { var exception = new CompositionException((string)null, new Exception()); ExceptionAssert.HasDefaultMessage(exception); } [Fact] public void Constructor5_NullAsMessageArgument_ShouldSetMessagePropertyToDefault() { var exception = new CompositionException((string)null, new Exception(), Enumerable.Empty<CompositionError>()); ExceptionAssert.HasDefaultMessage(exception); } [Fact] public void Constructor2_ValueAsMessageArgument_ShouldSetMessageProperty() { var expectations = Expectations.GetExceptionMessages(); foreach (var e in expectations) { var exception = new CompositionException(e); Assert.Equal(e, exception.Message); } } [Fact] public void Constructor4_ValueAsMessageArgument_ShouldSetMessageProperty() { var expectations = Expectations.GetExceptionMessages(); foreach (var e in expectations) { var exception = new CompositionException(e, new Exception()); Assert.Equal(e, exception.Message); } } [Fact] public void Constructor5_ValueAsMessageArgument_ShouldSetMessageProperty() { var expectations = Expectations.GetExceptionMessages(); foreach (var e in expectations) { var exception = new CompositionException(e, new Exception(), Enumerable.Empty<CompositionError>()); Assert.Equal(e, exception.Message); } } [Fact] public void Constructor1_ShouldSetInnerExceptionPropertyToNull() { var exception = new CompositionException(); Assert.Null(exception.InnerException); } [Fact] public void Constructor2_ShouldSetInnerExceptionPropertyToNull() { var exception = new CompositionException("Message"); Assert.Null(exception.InnerException); } [Fact] public void Constructor3_ShouldSetInnerExceptionPropertyToNull() { var exception = new CompositionException(Enumerable.Empty<CompositionError>()); Assert.Null(exception.InnerException); } [Fact] public void Constructor4_NullAsInnerExceptionArgument_ShouldSetInnerExceptionPropertyToNull() { var exception = new CompositionException("Message", (Exception)null); Assert.Null(exception.InnerException); } [Fact] public void Constructor5_NullAsInnerExceptionArgument_ShouldSetInnerExceptionPropertyToNull() { var exception = new CompositionException("Message", (Exception)null, Enumerable.Empty<CompositionError>()); Assert.Null(exception.InnerException); } [Fact] public void Constructor4_ValueAsInnerExceptionArgument_ShouldSetInnerExceptionProperty() { var expectations = Expectations.GetInnerExceptions(); foreach (var e in expectations) { var exception = new CompositionException("Message", e); Assert.Same(e, exception.InnerException); } } [Fact] public void Constructor5_ValueAsInnerExceptionArgument_ShouldSetInnerExceptionProperty() { var expectations = Expectations.GetInnerExceptions(); foreach (var e in expectations) { var exception = new CompositionException("Message", e, Enumerable.Empty<CompositionError>()); Assert.Same(e, exception.InnerException); } } [Fact] public void Constructor2_ArrayWithNullAsErrorsArgument_ShouldThrowArgument() { var errors = new CompositionError[] { null }; Assert.Throws<ArgumentException>("errors", () => { new CompositionException(errors); }); } [Fact] public void Constructor5_ArrayWithNullAsErrorsArgument_ShouldThrowArgument() { var errors = new CompositionError[] { null }; Assert.Throws<ArgumentException>("errors", () => { new CompositionException("Message", new Exception(), errors); }); } [Fact] public void Constructor1_ShouldSetErrorsPropertyToEmpty() { var exception = new CompositionException(); Assert.Empty(exception.Errors); } [Fact] public void Constructor2_NullAsErrorsArgument_ShouldSetErrorsPropertyToEmptyEnumerable() { var exception = new CompositionException((IEnumerable<CompositionError>)null); Assert.Empty(exception.Errors); } [Fact] public void Constructor2_EmptyEnumerableAsErrorsArgument_ShouldSetErrorsPropertyToEmptyEnumerable() { var exception = new CompositionException(Enumerable.Empty<CompositionError>()); Assert.Empty(exception.Errors); } [Fact] public void Constructor2_ValueAsErrorsArgument_ShouldSetErrorsProperty() { var expectations = Expectations.GetCompositionErrors(); foreach (var e in expectations) { var exception = new CompositionException(e); EqualityExtensions.CheckEquals(e, exception.Errors); } } [Fact] public void Constructor2_ArrayAsAsErrorsArgument_ShouldNotAllowModificationAfterConstruction() { var error = CreateCompositionError(); var errors = new CompositionError[] { error }; var exception = new CompositionException(errors); errors[0] = null; EnumerableAssert.AreEqual(exception.Errors, error); } [Fact] public void Constructor3_ShouldSetErrorsPropertyToEmpty() { var exception = new CompositionException(); Assert.Empty(exception.Errors); } [Fact] public void Constructor4_ShouldSetErrorsPropertyToEmptyEnumerable() { var exception = new CompositionException("Message", new Exception()); Assert.Empty(exception.Errors); } [Fact] public void Constructor5_NullAsErrorsArgument_ShouldSetErrorsPropertyToEmptyEnumerable() { var exception = new CompositionException("Message", new Exception(), (IEnumerable<CompositionError>)null); Assert.Empty(exception.Errors); } [Fact] public void Constructor5_EmptyEnumerableAsErrorsArgument_ShouldSetErrorsPropertyToEmptyEnumerable() { var exception = new CompositionException("Message", new Exception(), Enumerable.Empty<CompositionError>()); Assert.Empty(exception.Errors); } [Fact] public void Constructor5_ValueAsErrorsArgument_ShouldSetErrorsProperty() { var expectations = Expectations.GetCompositionErrors(); foreach (var e in expectations) { var exception = new CompositionException("Message", new Exception(), e); EqualityExtensions.CheckEquals(e, exception.Errors); } } [Fact] public void Constructor5_ArrayAsAsErrorsArgument_ShouldNotAllowModificationAfterConstruction() { var error = CreateCompositionError(); var errors = new CompositionError[] { error }; var exception = new CompositionException("Message", new Exception(), errors); errors[0] = null; EnumerableAssert.AreEqual(exception.Errors, error); } [Fact] public void Message_ShouldIncludeElementGraph() { var expectations = new ExpectationCollection<CompositionError, string>(); CompositionError error = null; error = CreateCompositionErrorWithElementChain(1); expectations.Add(error, GetElementGraphString(error)); error = CreateCompositionErrorWithElementChain(2); expectations.Add(error, GetElementGraphString(error)); error = CreateCompositionErrorWithElementChain(3); expectations.Add(error, GetElementGraphString(error)); error = CreateCompositionErrorWithElementChain(10); expectations.Add(error, GetElementGraphString(error)); foreach (var e in expectations) { var exception = CreateCompositionException(new CompositionError[] { e.Input }); string result = exception.ToString(); string expected = FixMessage(e.Output); AssertExtensions.Contains(result, expected); } } [Fact] public void Message_ShouldIncludeErrors() { var expectations = new ExpectationCollection<IEnumerable<CompositionError>, string>(); expectations.Add(ErrorFactory.CreateFromDsl("Error"), "1<Separator> Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error|Error"), "1<Separator> Error|2<Separator> Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error|Error|Error"), "1<Separator> Error|2<Separator> Error|3<Separator> Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error)"), "1<Separator> Error|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error|Error)"), "1<Separator> Error|<Prefix>Error|2<Separator> Error|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error|Error|Error)"), "1<Separator> Error|<Prefix>Error|2<Separator> Error|<Prefix>Error|3<Separator> Error|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error(Exception))"), "1<Separator> Exception|<Prefix>Error|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error|Exception)"), "1<Separator> Error|<Prefix>Error|2<Separator> Exception|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Exception)"), "1<Separator> Exception|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Exception(Exception))"), "1<Separator> Exception|<Prefix>Exception|<Prefix>Error"); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error(Exception)|Error)"), "1<Separator> Exception|<Prefix>Error|<Prefix>Error|2<Separator> Error|<Prefix>Error"); foreach (var e in expectations) { var exception = CreateCompositionException(e.Input); AssertMessage(exception, e.Output.Split('|')); } } [Fact] public void Messsage_ShouldIncludeCountOfRootCauses() { var expectations = new ExpectationCollection<IEnumerable<CompositionError>, int>(); expectations.Add(ErrorFactory.CreateFromDsl("Error"), 1); expectations.Add(ErrorFactory.CreateFromDsl("Error|Error"), 2); expectations.Add(ErrorFactory.CreateFromDsl("Error|Error|Error"), 3); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error)"), 1); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error)|Error(Error)"), 2); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error|Error)"), 2); expectations.Add(ErrorFactory.CreateFromDsl("Error(Error|Error|Exception)"), 3); foreach (var e in expectations) { var exception = CreateCompositionException(e.Input); AssertMessage(exception, e.Output, CultureInfo.CurrentCulture); } } [Fact] public void Message_ShouldFormatCountOfRootCausesUsingTheCurrentCulture() { RemoteExecutor.Invoke(() => { IEnumerable<CultureInfo> cultures = Expectations.GetCulturesForFormatting(); foreach (CultureInfo culture in cultures) { // Save old culture and set a fixed culture for object instantiation using (new ThreadCultureChange(culture)) { CompositionError[] errors = CreateCompositionErrors(1000); CompositionException exception = CreateCompositionException(errors); AssertMessage(exception, 1000, culture); errors = CreateCompositionErrors(1); exception = CreateCompositionException(errors); AssertMessage(exception, 1, culture); } } return RemoteExecutor.SuccessExitCode; }).Dispose(); } private string GetElementGraphString(CompositionError error) { StringBuilder writer = new StringBuilder(); ICompositionElement element = error.Element; writer.AppendFormat(CultureInfo.CurrentCulture, SR.CompositionException_ElementPrefix, element.DisplayName); while ((element = element.Origin) != null) { writer.AppendFormat(CultureInfo.CurrentCulture, SR.CompositionException_OriginFormat, SR.CompositionException_OriginSeparator, element.DisplayName); } return writer.ToString(); } private void AssertMessage(CompositionException exception, int rootCauseCount, CultureInfo culture) { using (StringReader reader = new StringReader(exception.Message)) { string line = reader.ReadLine(); if (rootCauseCount == 1) { Assert.True(line.Contains(SR.CompositionException_SingleErrorWithSinglePath)); } else { Assert.True( line.Contains(string.Format(CultureInfo.CurrentCulture, SR.CompositionException_SingleErrorWithMultiplePaths, rootCauseCount)) || line.Contains(string.Format(CultureInfo.CurrentCulture, SR.CompositionException_MultipleErrorsWithMultiplePaths, rootCauseCount)) ); } } } private void AssertMessage(CompositionException exception, string[] expected) { using (StringReader reader = new StringReader(exception.Message)) { // Skip header reader.ReadLine(); foreach (string expect in expected) { // Skip blank line reader.ReadLine(); Assert.Equal(FixMessage(expect), reader.ReadLine()); } } } private string FixMessage(string expect) { string fixedPrefix = expect.Replace("<Prefix>", SR.CompositionException_ErrorPrefix + " "); string fixedSeparator = fixedPrefix.Replace("<Separator>", SR.CompositionException_PathsCountSeparator); return fixedSeparator.Replace("<OriginSeparator>", SR.CompositionException_OriginSeparator); } private static CompositionError CreateCompositionError() { return CreateCompositionError("Description"); } private static CompositionError CreateCompositionError(string message) { return new CompositionError(message); } private static CompositionError CreateCompositionErrorWithElementChain(int count) { return new CompositionError("Description", ElementFactory.CreateChain(count)); } private static CompositionError[] CreateCompositionErrors(int count) { CompositionError[] errors = new CompositionError[count]; for (int i = 0; i < count; i++) { errors[i] = CreateCompositionError("Description" + (i + 1)); } return errors; } private static CompositionException CreateCompositionException() { return CreateCompositionException((string)null, (Exception)null, (IEnumerable<CompositionError>)null); } private static CompositionException CreateCompositionException(string message) { return CreateCompositionException(message, (Exception)null, (IEnumerable<CompositionError>)null); } private static CompositionException CreateCompositionException(IEnumerable<CompositionError> errors) { return CreateCompositionException((string)null, (Exception)null, errors); } private static CompositionException CreateCompositionException(Exception innerException) { return CreateCompositionException((string)null, innerException, (IEnumerable<CompositionError>)null); } private static CompositionException CreateCompositionException(string message, Exception innerException, IEnumerable<CompositionError> errors) { return new CompositionException(message, innerException, errors); } } }
using System; namespace Tamir.SharpSsh.jsch { /* -*-mode:java; c-basic-offset:2; -*- */ /* Copyright (c) 2002,2003,2004 ymnk, JCraft,Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT, INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ public class Buffer { private readonly byte[] tmp = new byte[4]; internal byte[] buffer; internal int index; internal int s; public Buffer(int size) { buffer = new byte[size]; index = 0; s = 0; } public Buffer(byte[] buffer) { this.buffer = buffer; index = 0; s = 0; } public Buffer() : this(1024*10*2) { } public void putByte(byte foo) { buffer[index++] = foo; } public void putByte(byte[] foo) { putByte(foo, 0, foo.Length); } public void putByte(byte[] foo, int begin, int length) { Array.Copy(foo, begin, buffer, index, length); index += length; } public void putString(byte[] foo) { putString(foo, 0, foo.Length); } public void putString(byte[] foo, int begin, int length) { putInt(length); putByte(foo, begin, length); } public void putInt(int v) { uint val = (uint) v; tmp[0] = (byte) (val >> 24); tmp[1] = (byte) (val >> 16); tmp[2] = (byte) (val >> 8); tmp[3] = (byte) (val); Array.Copy(tmp, 0, buffer, index, 4); index += 4; } public void putLong(long v) { ulong val = (ulong) v; tmp[0] = (byte) (val >> 56); tmp[1] = (byte) (val >> 48); tmp[2] = (byte) (val >> 40); tmp[3] = (byte) (val >> 32); Array.Copy(tmp, 0, buffer, index, 4); tmp[0] = (byte) (val >> 24); tmp[1] = (byte) (val >> 16); tmp[2] = (byte) (val >> 8); tmp[3] = (byte) (val); Array.Copy(tmp, 0, buffer, index + 4, 4); index += 8; } internal void skip(int n) { index += n; } internal void putPad(int n) { while (n > 0) { buffer[index++] = (byte) 0; n--; } } public void putMPInt(byte[] foo) { int i = foo.Length; if ((foo[0] & 0x80) != 0) { i++; putInt(i); putByte((byte) 0); } else { putInt(i); } putByte(foo); } public int getLength() { return index - s; } public int getOffSet() { return s; } public void setOffSet(int s) { this.s = s; } public long getLong() { long foo = getInt() & 0xffffffffL; foo = ((foo << 32)) | (getInt() & 0xffffffffL); return foo; } public int getInt() { uint foo = (uint) getShort(); foo = ((foo << 16) & 0xffff0000) | ((uint) getShort() & 0xffff); return (int) foo; } internal int getShort() { int foo = getByte(); foo = ((foo << 8) & 0xff00) | (getByte() & 0xff); return foo; } public int getByte() { return (buffer[s++] & 0xff); } public void getByte(byte[] foo) { getByte(foo, 0, foo.Length); } private void getByte(byte[] foo, int start, int len) { Array.Copy(buffer, s, foo, start, len); s += len; } public int getByte(int len) { int foo = s; s += len; return foo; } public byte[] getMPInt() { int i = getInt(); byte[] foo = new byte[i]; getByte(foo, 0, i); return foo; } public byte[] getMPIntBits() { int bits = getInt(); int bytes = (bits + 7)/8; byte[] foo = new byte[bytes]; getByte(foo, 0, bytes); if ((foo[0] & 0x80) != 0) { byte[] bar = new byte[foo.Length + 1]; bar[0] = 0; // ?? Array.Copy(foo, 0, bar, 1, foo.Length); foo = bar; } return foo; } public byte[] getString() { int i = getInt(); byte[] foo = new byte[i]; getByte(foo, 0, i); return foo; } internal byte[] getString(int[] start, int[] len) { int i = getInt(); start[0] = getByte(i); len[0] = i; return buffer; } public void reset() { index = 0; s = 0; } public void shift() { if (s == 0) return; Array.Copy(buffer, s, buffer, 0, index - s); index = index - s; s = 0; } internal void rewind() { s = 0; } } }
using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Sockets; using System.Reflection; using System.Text; using System.Threading; using System.Xml; using Orleans.Configuration; using Orleans.Providers; namespace Orleans.Runtime.Configuration { /// <summary> /// Orleans client configuration parameters. /// </summary> [Serializable] public class ClientConfiguration : MessagingConfiguration, IStatisticsConfiguration { internal const string DEPRECATE_DEPLOYMENT_ID_MESSAGE = "DeploymentId is the same as ClusterId. Please use ClusterId instead of DeploymentId."; /// <summary> /// Specifies the type of the gateway provider. /// </summary> public enum GatewayProviderType { /// <summary>No provider specified</summary> None, /// <summary>use Azure, requires SystemStore element</summary> AzureTable, /// <summary>use ADO.NET, requires SystemStore element</summary> AdoNet, /// <summary>use ZooKeeper, requires SystemStore element</summary> ZooKeeper, /// <summary>use Config based static list, requires Config element(s)</summary> Config, /// <summary>use provider from third-party assembly</summary> Custom } /// <summary> /// The name of this client. /// </summary> public string ClientName { get; set; } = "Client"; /// <summary>Gets the configuration source file path</summary> public string SourceFile { get; private set; } /// <summary> /// The list fo the gateways to use. /// Each GatewayNode element specifies an outside grain client gateway node. /// If outside (non-Orleans) clients are to connect to the Orleans system, then at least one gateway node must be specified. /// Additional gateway nodes may be specified if desired, and will add some failure resilience and scalability. /// If multiple gateways are specified, then each client will select one from the list at random. /// </summary> public IList<IPEndPoint> Gateways { get; set; } /// <summary> /// </summary> public int PreferedGatewayIndex { get; set; } /// <summary> /// </summary> public GatewayProviderType GatewayProvider { get; set; } /// <summary> /// Specifies a unique identifier for this cluster. /// If the silos are deployed on Azure (run as workers roles), deployment id is set automatically by Azure runtime, /// accessible to the role via RoleEnvironment.DeploymentId static variable and is passed to the silo automatically by the role via config. /// So if the silos are run as Azure roles this variable should not be specified in the OrleansConfiguration.xml (it will be overwritten if specified). /// If the silos are deployed on the cluster and not as Azure roles, this variable should be set by a deployment script in the OrleansConfiguration.xml file. /// </summary> public string ClusterId { get; set; } /// <summary> /// Deployment Id. This is the same as ClusterId and has been deprecated in favor of it. /// </summary> [Obsolete(DEPRECATE_DEPLOYMENT_ID_MESSAGE)] public string DeploymentId { get => this.ClusterId; set => this.ClusterId = value; } /// <summary> /// Specifies the connection string for the gateway provider. /// If the silos are deployed on Azure (run as workers roles), DataConnectionString may be specified via RoleEnvironment.GetConfigurationSettingValue("DataConnectionString"); /// In such a case it is taken from there and passed to the silo automatically by the role via config. /// So if the silos are run as Azure roles and this config is specified via RoleEnvironment, /// this variable should not be specified in the OrleansConfiguration.xml (it will be overwritten if specified). /// If the silos are deployed on the cluster and not as Azure roles, this variable should be set in the OrleansConfiguration.xml file. /// If not set at all, DevelopmentStorageAccount will be used. /// </summary> public string DataConnectionString { get; set; } /// <summary> /// When using ADO, identifies the underlying data provider for the gateway provider. This three-part naming syntax is also used when creating a new factory /// and for identifying the provider in an application configuration file so that the provider name, along with its associated /// connection string, can be retrieved at run time. https://msdn.microsoft.com/en-us/library/dd0w4a2z%28v=vs.110%29.aspx /// </summary> public string AdoInvariant { get; set; } public string CustomGatewayProviderAssemblyName { get; set; } /// <summary> /// Whether Trace.CorrelationManager.ActivityId settings should be propagated into grain calls. /// </summary> public bool PropagateActivityId { get; set; } /// <summary> /// </summary> public AddressFamily PreferredFamily { get; set; } /// <summary> /// The Interface attribute specifies the name of the network interface to use to work out an IP address for this machine. /// </summary> public string NetInterface { get; private set; } /// <summary> /// The Port attribute specifies the specific listen port for this client machine. /// If value is zero, then a random machine-assigned port number will be used. /// </summary> public int Port { get; private set; } /// <summary>Gets the true host name, no IP address. It equals Dns.GetHostName()</summary> public string DNSHostName { get; private set; } /// <summary> /// </summary> public TimeSpan GatewayListRefreshPeriod { get; set; } public string StatisticsProviderName { get; set; } public TimeSpan StatisticsPerfCountersWriteInterval { get; set; } public TimeSpan StatisticsLogWriteInterval { get; set; } [Obsolete("Statistics table is no longer supported.")] public bool StatisticsWriteLogStatisticsToTable { get; set; } public StatisticsLevel StatisticsCollectionLevel { get; set; } public TelemetryConfiguration TelemetryConfiguration { get; } = new TelemetryConfiguration(); public LimitManager LimitManager { get; private set; } private static readonly TimeSpan DEFAULT_STATS_PERF_COUNTERS_WRITE_PERIOD = Constants.INFINITE_TIMESPAN; /// <summary> /// </summary> public bool UseAzureSystemStore { get { return GatewayProvider == GatewayProviderType.AzureTable && !String.IsNullOrWhiteSpace(this.ClusterId) && !String.IsNullOrWhiteSpace(DataConnectionString); } } /// <summary> /// </summary> public bool UseAdoNetSystemStore { get { return GatewayProvider == GatewayProviderType.AdoNet && !String.IsNullOrWhiteSpace(this.ClusterId) && !String.IsNullOrWhiteSpace(DataConnectionString); } } private bool HasStaticGateways { get { return Gateways != null && Gateways.Count > 0; } } /// <summary> /// </summary> public IDictionary<string, ProviderCategoryConfiguration> ProviderConfigurations { get; set; } /// <summary>Initializes a new instance of <see cref="ClientConfiguration"/>.</summary> public ClientConfiguration() : base(false) { SourceFile = null; PreferedGatewayIndex = GatewayOptions.DEFAULT_PREFERED_GATEWAY_INDEX; Gateways = new List<IPEndPoint>(); GatewayProvider = GatewayProviderType.None; PreferredFamily = ClientMessagingOptions.DEFAULT_PREFERRED_FAMILY; NetInterface = null; Port = 0; DNSHostName = Dns.GetHostName(); this.ClusterId = ""; DataConnectionString = ""; // Assume the ado invariant is for sql server storage if not explicitly specified AdoInvariant = Constants.INVARIANT_NAME_SQL_SERVER; PropagateActivityId = MessagingOptions.DEFAULT_PROPAGATE_E2E_ACTIVITY_ID; GatewayListRefreshPeriod = GatewayOptions.DEFAULT_GATEWAY_LIST_REFRESH_PERIOD; StatisticsProviderName = null; StatisticsPerfCountersWriteInterval = DEFAULT_STATS_PERF_COUNTERS_WRITE_PERIOD; StatisticsLogWriteInterval = StatisticsOptions.DEFAULT_LOG_WRITE_PERIOD; StatisticsCollectionLevel = StatisticsOptions.DEFAULT_COLLECTION_LEVEL; LimitManager = new LimitManager(); ProviderConfigurations = new Dictionary<string, ProviderCategoryConfiguration>(); } public void Load(TextReader input) { var xml = new XmlDocument(); var xmlReader = XmlReader.Create(input); xml.Load(xmlReader); XmlElement root = xml.DocumentElement; LoadFromXml(root); } internal void LoadFromXml(XmlElement root) { foreach (XmlNode node in root.ChildNodes) { var child = node as XmlElement; if (child != null) { switch (child.LocalName) { case "Gateway": Gateways.Add(ConfigUtilities.ParseIPEndPoint(child).GetResult()); if (GatewayProvider == GatewayProviderType.None) { GatewayProvider = GatewayProviderType.Config; } break; case "Azure": // Throw exception with explicit deprecation error message throw new OrleansException( "The Azure element has been deprecated -- use SystemStore element instead."); case "SystemStore": if (child.HasAttribute("SystemStoreType")) { var sst = child.GetAttribute("SystemStoreType"); GatewayProvider = (GatewayProviderType)Enum.Parse(typeof(GatewayProviderType), sst); } if (child.HasAttribute("CustomGatewayProviderAssemblyName")) { CustomGatewayProviderAssemblyName = child.GetAttribute("CustomGatewayProviderAssemblyName"); if (CustomGatewayProviderAssemblyName.EndsWith(".dll")) throw new FormatException("Use fully qualified assembly name for \"CustomGatewayProviderAssemblyName\""); if (GatewayProvider != GatewayProviderType.Custom) throw new FormatException("SystemStoreType should be \"Custom\" when CustomGatewayProviderAssemblyName is specified"); } if (child.HasAttribute("DeploymentId")) { this.ClusterId = child.GetAttribute("DeploymentId"); } if (child.HasAttribute(Constants.DATA_CONNECTION_STRING_NAME)) { DataConnectionString = child.GetAttribute(Constants.DATA_CONNECTION_STRING_NAME); if (String.IsNullOrWhiteSpace(DataConnectionString)) { throw new FormatException("SystemStore.DataConnectionString cannot be blank"); } if (GatewayProvider == GatewayProviderType.None) { // Assume the connection string is for Azure storage if not explicitly specified GatewayProvider = GatewayProviderType.AzureTable; } } if (child.HasAttribute(Constants.ADO_INVARIANT_NAME)) { AdoInvariant = child.GetAttribute(Constants.ADO_INVARIANT_NAME); if (String.IsNullOrWhiteSpace(AdoInvariant)) { throw new FormatException("SystemStore.AdoInvariant cannot be blank"); } } break; case "Tracing": if (ConfigUtilities.TryParsePropagateActivityId(child, ClientName, out var propagateActivityId)) this.PropagateActivityId = propagateActivityId; break; case "Statistics": ConfigUtilities.ParseStatistics(this, child, ClientName); break; case "Limits": ConfigUtilities.ParseLimitValues(LimitManager, child, ClientName); break; case "Debug": break; case "Messaging": base.Load(child); break; case "LocalAddress": if (child.HasAttribute("PreferredFamily")) { PreferredFamily = ConfigUtilities.ParseEnum<AddressFamily>(child.GetAttribute("PreferredFamily"), "Invalid address family for the PreferredFamily attribute on the LocalAddress element"); } else { throw new FormatException("Missing PreferredFamily attribute on the LocalAddress element"); } if (child.HasAttribute("Interface")) { NetInterface = child.GetAttribute("Interface"); } if (child.HasAttribute("Port")) { Port = ConfigUtilities.ParseInt(child.GetAttribute("Port"), "Invalid integer value for the Port attribute on the LocalAddress element"); } break; case "Telemetry": ConfigUtilities.ParseTelemetry(child, this.TelemetryConfiguration); break; default: if (child.LocalName.EndsWith("Providers", StringComparison.Ordinal)) { var providerCategory = ProviderCategoryConfiguration.Load(child); if (ProviderConfigurations.ContainsKey(providerCategory.Name)) { var existingCategory = ProviderConfigurations[providerCategory.Name]; existingCategory.Merge(providerCategory); } else { ProviderConfigurations.Add(providerCategory.Name, providerCategory); } } break; } } } } /// <summary> /// </summary> public static ClientConfiguration LoadFromFile(string fileName) { if (fileName == null) { return null; } using (TextReader input = File.OpenText(fileName)) { var config = new ClientConfiguration(); config.Load(input); config.SourceFile = fileName; return config; } } /// <summary> /// Registers a given type of <typeparamref name="T"/> where <typeparamref name="T"/> is stream provider /// </summary> /// <typeparam name="T">Non-abstract type which implements <see cref="Orleans.Streams.IStreamProvider"/> stream</typeparam> /// <param name="providerName">Name of the stream provider</param> /// <param name="properties">Properties that will be passed to stream provider upon initialization</param> public void RegisterStreamProvider<T>(string providerName, IDictionary<string, string> properties = null) where T : Orleans.Streams.IStreamProvider { TypeInfo providerTypeInfo = typeof(T).GetTypeInfo(); if (providerTypeInfo.IsAbstract || providerTypeInfo.IsGenericType || !typeof(Orleans.Streams.IStreamProvider).IsAssignableFrom(typeof(T))) throw new ArgumentException("Expected non-generic, non-abstract type which implements IStreamProvider interface", "typeof(T)"); ProviderConfigurationUtility.RegisterProvider(ProviderConfigurations, ProviderCategoryConfiguration.STREAM_PROVIDER_CATEGORY_NAME, providerTypeInfo.FullName, providerName, properties); } /// <summary> /// Registers a given stream provider. /// </summary> /// <param name="providerTypeFullName">Full name of the stream provider type</param> /// <param name="providerName">Name of the stream provider</param> /// <param name="properties">Properties that will be passed to the stream provider upon initialization </param> public void RegisterStreamProvider(string providerTypeFullName, string providerName, IDictionary<string, string> properties = null) { ProviderConfigurationUtility.RegisterProvider(ProviderConfigurations, ProviderCategoryConfiguration.STREAM_PROVIDER_CATEGORY_NAME, providerTypeFullName, providerName, properties); } /// <summary> /// Retrieves an existing provider configuration /// </summary> /// <param name="providerTypeFullName">Full name of the stream provider type</param> /// <param name="providerName">Name of the stream provider</param> /// <param name="config">The provider configuration, if exists</param> /// <returns>True if a configuration for this provider already exists, false otherwise.</returns> public bool TryGetProviderConfiguration(string providerTypeFullName, string providerName, out IProviderConfiguration config) { return ProviderConfigurationUtility.TryGetProviderConfiguration(ProviderConfigurations, providerTypeFullName, providerName, out config); } /// <summary> /// Retrieves an enumeration of all currently configured provider configurations. /// </summary> /// <returns>An enumeration of all currently configured provider configurations.</returns> public IEnumerable<IProviderConfiguration> GetAllProviderConfigurations() { return ProviderConfigurationUtility.GetAllProviderConfigurations(ProviderConfigurations); } /// <summary> /// Loads the configuration from the standard paths, looking up the directory hierarchy /// </summary> /// <returns>Client configuration data if a configuration file was found.</returns> /// <exception cref="FileNotFoundException">Thrown if no configuration file could be found in any of the standard locations</exception> public static ClientConfiguration StandardLoad() { var fileName = ConfigUtilities.FindConfigFile(false); // Throws FileNotFoundException return LoadFromFile(fileName); } /// <summary>Returns a detailed human readable string that represents the current configuration. It does not contain every single configuration knob.</summary> public override string ToString() { var sb = new StringBuilder(); sb.AppendLine("Platform version info:").Append(ConfigUtilities.RuntimeVersionInfo()); sb.Append(" Host: ").AppendLine(Dns.GetHostName()); sb.Append(" Processor Count: ").Append(System.Environment.ProcessorCount).AppendLine(); sb.AppendLine("Client Configuration:"); sb.Append(" Config File Name: ").AppendLine(string.IsNullOrEmpty(SourceFile) ? "" : Path.GetFullPath(SourceFile)); sb.Append(" Start time: ").AppendLine(LogFormatter.PrintDate(DateTime.UtcNow)); sb.Append(" Gateway Provider: ").Append(GatewayProvider); if (GatewayProvider == GatewayProviderType.None) { sb.Append(". Gateway Provider that will be used instead: ").Append(GatewayProviderToUse); } sb.AppendLine(); if (Gateways != null && Gateways.Count > 0 ) { sb.AppendFormat(" Gateways[{0}]:", Gateways.Count).AppendLine(); foreach (var endpoint in Gateways) { sb.Append(" ").AppendLine(endpoint.ToString()); } } else { sb.Append(" Gateways: ").AppendLine("Unspecified"); } sb.Append(" Preferred Gateway Index: ").AppendLine(PreferedGatewayIndex.ToString()); if (Gateways != null && PreferedGatewayIndex >= 0 && PreferedGatewayIndex < Gateways.Count) { sb.Append(" Preferred Gateway Address: ").AppendLine(Gateways[PreferedGatewayIndex].ToString()); } sb.Append(" GatewayListRefreshPeriod: ").Append(GatewayListRefreshPeriod).AppendLine(); if (!String.IsNullOrEmpty(this.ClusterId) || !String.IsNullOrEmpty(DataConnectionString)) { sb.Append(" Azure:").AppendLine(); sb.Append(" ClusterId: ").Append(this.ClusterId).AppendLine(); string dataConnectionInfo = ConfigUtilities.RedactConnectionStringInfo(DataConnectionString); // Don't print Azure account keys in log files sb.Append(" DataConnectionString: ").Append(dataConnectionInfo).AppendLine(); } if (!string.IsNullOrWhiteSpace(NetInterface)) { sb.Append(" Network Interface: ").AppendLine(NetInterface); } if (Port != 0) { sb.Append(" Network Port: ").Append(Port).AppendLine(); } sb.Append(" Preferred Address Family: ").AppendLine(PreferredFamily.ToString()); sb.Append(" DNS Host Name: ").AppendLine(DNSHostName); sb.Append(" Client Name: ").AppendLine(ClientName); sb.Append(ConfigUtilities.IStatisticsConfigurationToString(this)); sb.Append(LimitManager); sb.AppendFormat(base.ToString()); sb.Append(" .NET: ").AppendLine(); int workerThreads; int completionPortThreads; ThreadPool.GetMinThreads(out workerThreads, out completionPortThreads); sb.AppendFormat(" .NET thread pool sizes - Min: Worker Threads={0} Completion Port Threads={1}", workerThreads, completionPortThreads).AppendLine(); ThreadPool.GetMaxThreads(out workerThreads, out completionPortThreads); sb.AppendFormat(" .NET thread pool sizes - Max: Worker Threads={0} Completion Port Threads={1}", workerThreads, completionPortThreads).AppendLine(); sb.AppendFormat(" Providers:").AppendLine(); sb.Append(ProviderConfigurationUtility.PrintProviderConfigurations(ProviderConfigurations)); return sb.ToString(); } internal GatewayProviderType GatewayProviderToUse { get { // order is important here for establishing defaults. if (GatewayProvider != GatewayProviderType.None) return GatewayProvider; if (UseAzureSystemStore) return GatewayProviderType.AzureTable; return HasStaticGateways ? GatewayProviderType.Config : GatewayProviderType.None; } } internal void CheckGatewayProviderSettings() { switch (GatewayProvider) { case GatewayProviderType.AzureTable: if (!UseAzureSystemStore) throw new ArgumentException("Config specifies Azure based GatewayProviderType, but Azure element is not specified or not complete.", "GatewayProvider"); break; case GatewayProviderType.Config: if (!HasStaticGateways) throw new ArgumentException("Config specifies Config based GatewayProviderType, but Gateway element(s) is/are not specified.", "GatewayProvider"); break; case GatewayProviderType.Custom: if (String.IsNullOrEmpty(CustomGatewayProviderAssemblyName)) throw new ArgumentException("Config specifies Custom GatewayProviderType, but CustomGatewayProviderAssemblyName attribute is not specified", "GatewayProvider"); break; case GatewayProviderType.None: if (!UseAzureSystemStore && !HasStaticGateways) throw new ArgumentException("Config does not specify GatewayProviderType, and also does not have the adequate defaults: no Azure and or Gateway element(s) are specified.","GatewayProvider"); break; case GatewayProviderType.AdoNet: if (!UseAdoNetSystemStore) throw new ArgumentException("Config specifies SqlServer based GatewayProviderType, but ClusterId or DataConnectionString are not specified or not complete.", "GatewayProvider"); break; case GatewayProviderType.ZooKeeper: break; } } /// <summary> /// Returns a ClientConfiguration object for connecting to a local silo (for testing). /// </summary> /// <param name="gatewayPort">Client gateway TCP port</param> /// <returns>ClientConfiguration object that can be passed to GrainClient class for initialization</returns> public static ClientConfiguration LocalhostSilo(int gatewayPort = 40000) { var config = new ClientConfiguration {GatewayProvider = GatewayProviderType.Config}; config.Gateways.Add(new IPEndPoint(IPAddress.Loopback, gatewayPort)); return config; } } }
/* Copyright 2006 - 2010 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Text; using System.Collections; using System.Text.RegularExpressions; using OpenSource.UPnP; using OpenSource.UPnP.AV; using OpenSource.UPnP.AV.CdsMetadata; namespace UPnPValidator { public class CdsResult_BrowseSortCriteria: CdsResult_BrowseStats { public CdsResult_BrowseAll BrowseAllResults; public CdsResult_GetSortCapabilities SortCapsResults; public ArrayList SortFields; } /// <summary> /// Summary description for Cds_BrowseSortCriteria. /// </summary> public class Cds_BrowseSortCriteria: Cds_BrowseTest { private CdsResult_BrowseSortCriteria _Details; public override object Details { get { return _Details; } } public override CdsResult_BrowseStats BrowseStats { get { return _Details; } } public override void CalculateExpectedTestingTime(ICollection otherSubTests, ISubTestArgument arg) { // get the results from the prerequisite tests CdsResult_BrowseAll BROWSE_RESULTS = null; CdsResult_GetSortCapabilities SORTCAPS = null; foreach (ISubTest preTest in otherSubTests) { if (preTest.Name == this.PRE_BROWSEALL.Name) { BROWSE_RESULTS = preTest.Details as CdsResult_BrowseAll; } else if (preTest.Name == this.PRE_SORTCAPS.Name) { SORTCAPS = preTest.Details as CdsResult_GetSortCapabilities; } } if (BROWSE_RESULTS == null) { return; } if (SORTCAPS == null) { return; } if (BROWSE_RESULTS.LargestContainer == null) { return; } MediaContainer MC = BROWSE_RESULTS.LargestContainer as MediaContainer; if (MC == null) { return; } ArrayList sortFields = new ArrayList(); if (SORTCAPS.SortCapabilities == "") { } else if (SORTCAPS.SortCapabilities == "*") { sortFields = (ArrayList) BROWSE_RESULTS.PropertyNames.Clone(); } else { sortFields.AddRange ( GetSortFields(SORTCAPS.SortCapabilities) ); } int fieldCount = sortFields.Count; IList childList = BROWSE_RESULTS.LargestContainer.CompleteList; uint inc = (uint) (childList.Count / 3); int firstInc = (fieldCount / 3); if (firstInc == 0) { firstInc = 1; } int totalBrowses = 0; for (int numFields = 0; numFields < fieldCount; numFields++) { for (int first = 0; first < fieldCount; first+=firstInc) { //for (uint i=0; i < childList.Count; i+=inc) { totalBrowses++; } } } //add one for an unsorted browse totalBrowses++; //multiply by 2 because we have 2 rounds to check for consistency in ordered results totalBrowses *= 2; //calculate expected time this._ExpectedTestingTime = totalBrowses * 900; } private struct Round2 { public BrowseInput Input; public CdsBrowseSearchResults PreviousResult; } public override UPnPTestStates Run (ICollection otherSubTests, CdsSubTestArgument arg) { CpContentDirectory CDS = this.GetCDS(arg._Device); _Details = new CdsResult_BrowseSortCriteria(); this._TestState = UPnPTestStates.Running; arg._TestGroup.AddEvent(LogImportance.Remark, this.Name, "\""+this.Name + "\" started."); // get the results from the prerequisite tests CdsResult_BrowseAll BROWSE_RESULTS = null; CdsResult_GetSortCapabilities SORTCAPS = null; try { foreach (ISubTest preTest in otherSubTests) { if (preTest.Name == this.PRE_BROWSEALL.Name) { BROWSE_RESULTS = preTest.Details as CdsResult_BrowseAll; } else if (preTest.Name == this.PRE_SORTCAPS.Name) { SORTCAPS = preTest.Details as CdsResult_GetSortCapabilities; } } if (BROWSE_RESULTS == null) { throw new TestException(this._Name + " requires that the \"" + this.PRE_BROWSEALL.Name + "\" test be run as a prerequisite. The results from that test cannot be obtained.", otherSubTests); } if (SORTCAPS == null) { throw new TestException(this._Name + " requires that the \"" + this.PRE_SORTCAPS.Name + "\" test be run as a prerequisite. The results from that test cannot be obtained.", otherSubTests); } } catch (Exception e) { throw new TestException(this._Name + " requires that the \"" + this.PRE_BROWSEALL.Name + "\" and \"" + this.PRE_SORTCAPS+ "\" tests be run before. An error occurred when attempting to obtain the results of those prerequisites.", otherSubTests, e); } _Details.BrowseAllResults = BROWSE_RESULTS; _Details.SortCapsResults = SORTCAPS; UPnPTestStates state = this._TestState; if (BROWSE_RESULTS.LargestContainer == null) { throw new TestException(this.PRE_BROWSEALL.Name + " failed to find the container with the most child objects. " +this._Name+ " requires this value.", BROWSE_RESULTS); } MediaContainer MC = BROWSE_RESULTS.LargestContainer as MediaContainer; if (MC == null) { throw new TestException(this.PRE_BROWSEALL.Name + " has the largest container as type \"" +BROWSE_RESULTS.LargestContainer.GetType().ToString() +"\" when \"" +this.Name+ "\" requires \"" +typeof(MediaContainer).ToString()+ "\".", BROWSE_RESULTS); } ArrayList sortFields = new ArrayList(); if (SORTCAPS.SortCapabilities == "") { //arg.TestGroup.AddEvent(LogImportance.Remark, this.Name, "\""+this.Name+"\" has no sorting capabilities."); } else if (SORTCAPS.SortCapabilities == "*") { sortFields = (ArrayList) BROWSE_RESULTS.PropertyNames.Clone(); } else { sortFields.AddRange ( GetSortFields(SORTCAPS.SortCapabilities) ); } _Details.ExpectedTotalBrowseRequests = 0; _Details.SortFields = sortFields; int fieldCount = sortFields.Count; IList childList = BROWSE_RESULTS.LargestContainer.CompleteList; _Details.ExpectedTotalBrowseRequests = 0;//fieldCount * fieldCount * fieldCount; uint inc = (uint) (childList.Count / 3); int firstInc = (fieldCount / 3); if (firstInc == 0) { firstInc = 1; } for (int numFields = 0; numFields < fieldCount; numFields++) { for (int first = 0; first < fieldCount; first+=firstInc) { //for (uint i=0; i < childList.Count; i+=inc) { _Details.ExpectedTotalBrowseRequests++; } } } // add 1 for an unsorted browse _Details.ExpectedTotalBrowseRequests++; //multiply by 2 because we have 2 rounds to check for consistency in ordered results _Details.ExpectedTotalBrowseRequests *= 2; //calculate time this._ExpectedTestingTime = _Details.ExpectedTotalBrowseRequests * 900; arg.ActiveTests.UpdateTimeAndProgress(0); if (state <= UPnPTestStates.Running) { state = UPnPTestStates.Pass; try { ArrayList round2 = new ArrayList(); //perform the standard unsorted browse BrowseInput input = new BrowseInput(); input.BrowseFlag = CpContentDirectory.Enum_A_ARG_TYPE_BrowseFlag.BROWSEDIRECTCHILDREN; input.StartingIndex = 0; input.ObjectID = MC.ID; input.RequestedCount = 0; input.Filter = "*"; input.SortCriteria = ""; CdsBrowseSearchResults br = Browse(input, this, arg, CDS, _Details); Round2 r2 = new Round2(); r2.Input = (BrowseInput) input.Clone(); r2.PreviousResult = br; round2.Add(r2); for (int numFields = 0; numFields < fieldCount; numFields++) { for (int first = 0; first < fieldCount; first+=firstInc) { ArrayList sortSettings = GetSortSettings(sortFields, first, first); input.SortCriteria = GetSortCriteriaString(sortSettings, numFields+first); arg.ActiveTests.UpdateTimeAndProgress(_Details.TotalBrowseRequests * 900); uint ignored; //use this sorter for to determine the expected order of the media objects IMediaSorter sorter = new MediaSorter(true, input.SortCriteria); IList expectedSorted = MC.BrowseSorted(0, 0, sorter, out ignored); br = Browse(input, this, arg, CDS, _Details); arg.ActiveTests.UpdateTimeAndProgress(_Details.TotalBrowseRequests * 900); this.CompareResultsAgainstExpected(br, expectedSorted, ref state, arg, input, false); r2 = new Round2(); r2.Input = (BrowseInput) input.Clone(); r2.PreviousResult = br; round2.Add(r2); } } //do round2 - check for consistency in results foreach (Round2 r in round2) { br = Browse(r.Input, this, arg, CDS, _Details); arg.ActiveTests.UpdateTimeAndProgress(_Details.TotalBrowseRequests * 900); this.CompareResultsAgainstExpected(br, r.PreviousResult.MediaObjects, ref state, arg, r.Input, true); } } catch (TerminateEarly te) { string reason = "\"" +this.Name+ "\" terminating early. Reason => " + te.Message; arg._TestGroup.AddEvent(LogImportance.Critical, this.Name, reason); state = UPnPTestStates.Failed; } } // finish up logging this._TestState = state; StringBuilder sb = new StringBuilder(); sb.AppendFormat("\"{0}\" completed", this.Name); if (this._TestState <= UPnPTestStates.Running) { throw new TestException("\"" +this.Name+ "\" must have a pass/warn/fail result.", this._TestState); } switch (this._TestState) { case UPnPTestStates.Pass: sb.Append(" successfully."); break; case UPnPTestStates.Warn: sb.Append(" with warnings."); break; case UPnPTestStates.Failed: sb.Append(" with a failed result."); break; } arg._TestGroup.AddResult(sb.ToString()); if (this._TestState <= UPnPTestStates.Warn) { if (_Details.TotalBrowseRequests != _Details.ExpectedTotalBrowseRequests) { throw new TestException("TotalBrowseRequests="+_Details.TotalBrowseRequests.ToString()+" ExpectedTotal="+_Details.ExpectedTotalBrowseRequests.ToString(), _Details); } } arg._TestGroup.AddEvent(LogImportance.Remark, this.Name, sb.ToString()); return this._TestState; } public void CompareResultsAgainstExpected(CdsBrowseSearchResults br, IList expectedResults, ref UPnPTestStates state, CdsSubTestArgument arg, BrowseInput input, bool strictOrder) { if (br.WorstError >= UPnPTestStates.Failed) { throw new TerminateEarly("\"" + this.Name + "\" is terminating early because " +input.PrintBrowseParams()+ " returned with an error or had problems with the DIDL-Lite."); } else { if (br.MediaObjects.Count != expectedResults.Count) { throw new TerminateEarly("\""+this.Name+"\" did a " +input.PrintBrowseParams()+ " and it should have returned "+expectedResults.Count+ " media objects. DIDL-Lite contained " +br.MediaObjects.Count+ " media objects. DIDL-Lite => " + br.Result); } bool warnResults = false; for (int i=0; i < br.MediaObjects.Count; i++) { IUPnPMedia gotThis = (IUPnPMedia) br.MediaObjects[i]; IUPnPMedia expectedMedia = (IUPnPMedia) expectedResults[i]; if (gotThis.ID == expectedMedia.ID) { //arg.TestGroup.AddEvent(LogImportance.Remark, this.Name, "\""+this.Name+"\" did a " +input.PrintBrowseParams()+ " and encountered no errors in the results."); } else { bool failed = false; if ((input.SortCriteria == null) || (input.SortCriteria == "")) { failed = true; } else { // Use this sorter to test for value-equality in situations where the expected order didn't match. // We need to do this because two media objects may be value-equivalent according to a sorting // algorithm, in which case there's no way to really distinguish what order they should be in. IMediaSorter sorter2 = new MediaSorter(false, input.SortCriteria); int cmp = sorter2.Compare(gotThis, expectedMedia); if (cmp != 0) { arg.TestGroup.AddEvent(LogImportance.Medium, this.Name, "\""+this.Name+"\" found media object ID=\""+gotThis.ID+"\" when it expected to find \""+expectedMedia.ID+"\" and they are not equal in their sorted order."); warnResults = true; } else { if (strictOrder == false) { arg.TestGroup.AddEvent(LogImportance.Low, this.Name, "\""+this.Name+"\" found media object ID=\""+gotThis.ID+"\" when it expected to find \""+expectedMedia.ID+"\" but since they are effectively value-equivalent, the ordering is OK."); } else { failed = true; } } } if (failed) { StringBuilder msg = new StringBuilder(); msg.AppendFormat("\"{0}\" did a {1} and the order of object ID's in the result conflicts with previous browse requests."); msg.AppendFormat("\r\n\r\nReceived objects in order by ID: "); int z = 0; foreach (IUPnPMedia em in br.MediaObjects) { if (z > 0) { msg.Append(","); } msg.AppendFormat("\"{0}\"", em.ID); z++; } msg.Append("\r\n\r\nThe expected order by ID is: "); z = 0; foreach (IUPnPMedia em in expectedResults) { if (z > 0) { msg.Append(","); } msg.AppendFormat("\"{0}\"", em.ID); z++; } msg.AppendFormat(".\r\n\r\nDIDL-Lite ==> {0}", br.Result); throw new TerminateEarly(msg.ToString()); } } } if (warnResults == false) { arg.TestGroup.AddEvent(LogImportance.Remark, this.Name, "\""+this.Name+"\" did a " +input.PrintBrowseParams()+ " and encountered no errors or warnings in the results."); } else { StringBuilder msg = new StringBuilder(); msg.AppendFormat("WARNING: \"{0}\" did a {1} and \r\nreceived results in the following order by ID: ", this.Name, input.PrintBrowseParams()); int z = 0; foreach (IUPnPMedia em in br.MediaObjects) { if (z > 0) { msg.Append(","); } msg.AppendFormat("\"{0}\"", em.ID); z++; } msg.Append("\r\n\r\nThe expected order by ID is: "); z = 0; foreach (IUPnPMedia em in expectedResults) { if (z > 0) { msg.Append(","); } msg.AppendFormat("\"{0}\"", em.ID); z++; } msg.AppendFormat(".\r\n\r\nDIDL-Lite ==> {0}", br.Result); // warn state = UPnPTestStates.Warn; arg._TestGroup.AddEvent(LogImportance.Medium, this.Name, msg.ToString()); } } } public static bool DoZeroOneBitCountsMatch (int bitLength, int zeroOneBits) { int one = 0; int zero = 0; for (int i=0; i < bitLength; i++) { int mask = 1 << i; bool isOne = ((zeroOneBits & mask) != 0); if (isOne) { one++; } else { zero++; } } return (one == zero); } public static string GetSortCriteriaString (IList fields, int ascendingDescendingBits) { StringBuilder sb = new StringBuilder(); int sbi = 0; foreach (string val in fields) { if (sbi > 0) { sb.Append(","); } int mask = 1 << sbi; bool isAscending = ((ascendingDescendingBits & mask) != 0); if (isAscending) { sb.Append("+"); } else { sb.Append("-"); } sb.Append(val); sbi++; } return sb.ToString(); } private ArrayList GetSortSettings(IList fields, int first, int from) { ArrayList sortSettings = new ArrayList(); sortSettings.Add( fields[first] ); for (int i=0; i < fields.Count; i++) { if (from >= fields.Count) { from = from - fields.Count; } bool added = false; string str = (string)fields[from]; int posAmp = str.IndexOf("@"); if (posAmp >= 0) { str = str.Remove(0, posAmp); } foreach (string sortField in sortSettings) { if (sortField.IndexOf(str) >= 0) { added = true; break; } } if (added == false) { sortSettings.Add( fields[from] ); } from++; } return sortSettings; } /// <summary> /// Parses the sort capabilities /// </summary> /// <param name="sortCaps"></param> /// <returns></returns> private static string[] GetSortFields(string sortCaps) { return sortCaps.Split(','); } protected override void SetTestInfo() { this._Name = "Browse SortCriteria"; this._Description = "Finds the container with the most children calls BrowseDirectChildren on it with various SortCriteria strings."; this._ExpectedTestingTime = 900; this._Prerequisites.Add(this.PRE_BROWSEALL); this._Prerequisites.Add(this.PRE_SORTCAPS); } private Cds_BrowseAll PRE_BROWSEALL = new Cds_BrowseAll(); private Cds_GetSortCapabilities PRE_SORTCAPS = new Cds_GetSortCapabilities(); private static Tags T = Tags.GetInstance(); } }
using System; using MonoBrickFirmware.Native; using MonoBrickFirmware.Tools; using System.Text; namespace MonoBrickFirmware.Sensors { /// <summary> /// Sensor modes /// </summary> public enum I2CMode { #pragma warning disable LowSpeed = AnalogMode.Set, LowSpeed9V = AnalogMode.Set | AnalogMode.Pin1 #pragma warning restore }; /// <summary> /// Base class for all I2C sensors. This should be used when implementing a new I2C sensor /// </summary> public abstract class I2CSensor:ISensor { private UnixDevice I2CDevice; //private MemoryArea I2CMemory; private const int InitDelay = 100; private const int BufferSize = 30; //I2C control private const UInt32 I2CIOSetup = 0xc04c6905; private enum I2cRegister : byte { FirmwareVersion = 0x00, VendorId = 0x08, DeviceId = 0x10 }; private string ConvertByteArrayToString (byte[] bytes) { string s = System.Text.Encoding.Default.GetString(bytes); int pos = s.IndexOf('\0'); if (pos >= 0) s = s.Substring(0, pos); return s; } protected byte I2CAddress = 0x00; protected const int NumberOfSensorPorts = SensorManager.NumberOfSensorPorts; protected SensorPort port; protected UARTMode uartMode{get; private set;} protected I2CMode mode; public I2CSensor (SensorPort port, byte address, I2CMode mode) { this.port = port; this.I2CAddress = address; I2CDevice = SensorManager.Instance.I2CDevice; this.mode = mode; SensorManager.Instance.SetAnalogMode((AnalogMode)mode, port); } protected string GetDeviceId () { return ConvertByteArrayToString(ReadRegister((byte)I2cRegister.DeviceId)); } protected string GetVendorId () { return ConvertByteArrayToString(ReadRegister((byte)I2cRegister.VendorId)); } protected string GetFirmwareVersion () { return ConvertByteArrayToString(ReadRegister((byte)I2cRegister.FirmwareVersion)); } protected void Reset () { SensorManager.Instance.ResetI2C(this.port); } protected void SetMode () { SensorManager.Instance.SetI2COperatingMode(this.port); } protected bool Initialise() { SensorManager.Instance.SetAnalogMode(AnalogMode.Pin5, this.port); Reset(); System.Threading.Thread.Sleep(InitDelay); SetMode(); System.Threading.Thread.Sleep(InitDelay); SetMode(); System.Threading.Thread.Sleep(InitDelay); return true; } /// <summary> /// Reads a 8 byte register from the sensor /// </summary> /// <returns> /// The bytes that was read /// </returns> /// <param name='register'> /// Register to read /// </param> protected byte[] ReadRegister(byte register){ return ReadRegister(register,8); } /// <summary> /// Reads a register from the sensor /// </summary> /// <returns> /// The bytes that was read /// </returns> /// <param name='register'> /// Register to read /// </param> /// <param name='rxLength'> /// The number of bytes to read /// </param> protected byte[] ReadRegister(byte register, byte rxLength) { byte[] command = {}; return WriteAndRead(register, command, rxLength); } /// <summary> /// Writes a byte to a register. /// </summary> /// <param name='register'> /// Register to write to /// </param> /// <param name='data'> /// Data byte to write /// </param> protected void WriteRegister(byte register, byte data) { //byte[] command = { I2CAddress, register, data}; byte[] command = {data}; WriteAndRead(register, command, 0); } protected void WriteRegister(byte register, byte[] data) { WriteAndRead(register, data, 0); } /// <summary> /// Write and read an array of bytes to the sensor /// </summary> /// <returns>The bytes that was read</returns> /// <param name="register">Register to write to.</param> /// <param name="data">Byte array to write</param> /// <param name="rxLength">Length of the expected reply</param> protected byte[] WriteAndRead (byte register, byte[] data, int rxLength) { if (rxLength > BufferSize) throw new ArgumentOutOfRangeException("I2C Receive Buffer only holds " + BufferSize + " bytes"); if (data.Length > BufferSize) { throw new ArgumentOutOfRangeException("I2C Write Buffer only holds " + BufferSize + " bytes"); } bool dataReady = false; int replyIndex = 0; byte[] writeData = new byte[BufferSize];//30 Array.Copy (data, 0, writeData, 0, data.Length); ByteArrayCreator command = new ByteArrayCreator (); command.Append ((int)-1); command.Append ((byte)this.port); command.Append ((byte)1);//repeat command.Append ((short)0);//time command.Append ((byte)(data.Length + 2));//length of write data command.Append ((byte)((byte)I2CAddress >> 1)); command.Append (register); command.Append(writeData); command.Append ((byte)-rxLength); replyIndex = command.Data.Length; command.Append (new byte[BufferSize]);//make room for reply byte[] i2cData = command.Data; while (!dataReady) { unchecked { I2CDevice.IoCtl ((Int32)I2CIOSetup, i2cData); } int status = BitConverter.ToInt32 (i2cData, 0); if (status < 0) { throw new Exception ("I2C I/O error"); } if (status == 0) { byte[] reply = new byte[rxLength]; if (rxLength > 0) { Array.Copy(i2cData,replyIndex, reply,0, rxLength); } return reply; } } throw new TimeoutException("I2C timeout"); } public abstract string ReadAsString (); public abstract void SelectNextMode(); public abstract string GetSensorName(); public abstract void SelectPreviousMode(); public abstract int NumberOfModes(); public abstract string SelectedMode(); public SensorPort Port{ get {return port;}} } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.IO; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using Xunit; namespace System.IO.FileSystem.DriveInfoTests { public class ctor_str { [DllImport("kernel32.dll", SetLastError = true)] internal static extern int GetLogicalDrives(); [Fact] [ActiveIssue(514)] public void Test01() { String[] driveLetterCombinaions = { String.Empty, ":", String.Format("{0}{1}", ":", "\\"), String.Format("{0}{1}", ":", "/") }; DriveInfo drive; String path; String tempPath; int win32Result; uint mask; BitArray bits; List<Char> driveLetters; List<Char> nonDriveLetters; Char letter; //We will test this API by first getting the drives ourselves. There are 3 ways to get the drives //1) Directory.GetLogicalDrives which returns a String[] //2) DriveInfo.GetDrives() which returns a DriveInfo[]. This API calls 1) //3) Win32 GetLogicalDrives, which 1) calls to get the values //We will use the 3rd one here //Win32 call. If the function succeeds, the return value is a bitmask representing the currently available disk drives. //Bit position 0 (the least-significant bit) is drive A, bit position 1 is drive B, bit position 2 is drive C, and so on. win32Result = GetLogicalDrives(); mask = (uint)win32Result; if (mask == 0) { Assert.False(true, "Test Failed, No drives in this machine or error calling Win32!"); } //There are many ways to read this bit mask. Old style C way would be to shift the bits and check for 1. But we go the modern FX way! bits = new BitArray(new Int32[] { (int)mask }); driveLetters = new List<Char>(); nonDriveLetters = new List<Char>(); for (int i = 0; i < bits.Length; i++) { letter = (Char)(65 + i); if (bits[i]) driveLetters.Add(letter); else { if (Char.IsLetter(letter)) nonDriveLetters.Add(letter); } } //Scenario 1: Vanilla - valid drive letters and valid paths to the root //Scenario 2: Variation of the drive letters: c, c:, c:\ //Scenario 3: DriveAltCharacters like / in Windows foreach (char c in driveLetters) { //compromising clarity for brevity since the new testcase guidelines encourages brevity. Combining the following testcases in a loop //DriveInfo ctor with path with only the letter - "C" //DriveInfo ctor with path with the letter and the : - "C:" //DriveInfo ctor with path with the letter and the :\ - "C:\" //DriveInfo ctor with path with the letter and the :/ - "C:/" foreach (String suffix in driveLetterCombinaions) { //Uppercase letters path = c.ToString() + suffix; drive = new DriveInfo(path); //If we pass the AltDirectorySeparatorChar in the ctor, the library will change this to DirectorySeparatorChar if (!drive.Name.StartsWith(path.Replace("/", "\\"))) { Assert.False(true, string.Format("Error, Wrong value returned. Expected: {1}, Returned: {2}", suffix, path, drive.Name)); } if (!EnsureOtherPrepertiesWork(drive, path)) { Assert.False(true, string.Format("Error, Other properties didn't work. Path: {1}", suffix, path)); } //Lowercase path = c.ToString().ToLower() + suffix; drive = new DriveInfo(path); //If we pass the AltDirectorySeparatorChar in the ctor, the library will change this to DirectorySeparatorChar if (!drive.Name.ToLower().StartsWith(path.Replace("/", "\\"))) { Assert.False(true, string.Format("Error, Wrong value returned. Expected: {1}, Returned: {2}", suffix, path, drive.Name)); } if (!EnsureOtherPrepertiesWork(drive, path)) { Assert.False(true, string.Format("Error, Other properties didn't work. Path: {1}", suffix, path)); } } //We allow any path to use including invalid paths tempPath = c.ToString().ToUpper() + String.Format("{0}{1}", ":", "\\"); path = tempPath + @"bar1\bar2"; drive = new DriveInfo(path); if (!path.StartsWith(drive.Name)) { Assert.False(true, string.Format("Error, Wrong value returned. Expected: {0}, Returned: {1}", path, drive.Name)); } if (!EnsureOtherPrepertiesWork(drive, path)) { Assert.False(true, string.Format("Error, Wrong value returned. Path: {0}", path)); } path = tempPath + @"bar1\bar2\..\bar1bar2"; drive = new DriveInfo(path); if (!path.StartsWith(drive.Name)) { Assert.False(true, string.Format("Error, Wrong value returned. Expected: {0}, Returned: {1}", path, drive.Name)); } if (!EnsureOtherPrepertiesWork(drive, path)) { Assert.False(true, string.Format("Error, Wrong value returned. Path: {0}", path)); } //And invalid characters too foreach (Char invalidCh in Path.GetInvalidPathChars()) { path = tempPath + invalidCh.ToString(); Assert.Throws<ArgumentException>(() => { drive = new DriveInfo(path); }); } } //invalid drive letter foreach (char c in nonDriveLetters) { drive = new DriveInfo(c.ToString()); if (!drive.Name.StartsWith(c.ToString())) { Assert.False(true, string.Format("Error, Wrong value returned. Expected: {1}, Returned: {2}", c, c, drive.Name)); } if (!EnsureOtherPrepertiesWork(drive, c.ToString())) { Assert.False(true, string.Format("Error, Wrong value returned. Path: {0}", c.ToString())); } } //4) Parm validation: null, empty, UNC shares Assert.Throws<ArgumentNullException>(() => { drive = new DriveInfo(null); }); Assert.Throws<ArgumentException>(() => { drive = new DriveInfo(String.Empty); }); Assert.Throws<ArgumentException>(() => { drive = new DriveInfo(@"\\user\public"); }); String[] invalidRootPaths = { "cc", "c?", "california", @"C\", "c ", " c" }; foreach (String inR in invalidRootPaths) { Assert.Throws<ArgumentException>(() => { drive = new DriveInfo(inR); }); } } private Boolean CheckDrive(System.IO.DriveInfo drive, String path) { Boolean retValue = false; if (!drive.IsReady) { DriveInfo checkDrive = new DriveInfo(path[0].ToString()); if (!checkDrive.IsReady) retValue = true; ; } return retValue; } private Boolean EnsureOtherPrepertiesWork(System.IO.DriveInfo drive, String path) { //The DriveInfo was created using different path patterns. Here, we check to make sure that other properties work for these 'exotic' path patterns Boolean retValue = true; DriveInfo checkDrive = new DriveInfo(path[0].ToString()); try { if (checkDrive.AvailableFreeSpace != drive.AvailableFreeSpace) retValue = false; } catch { if (!CheckDrive(drive, path)) retValue = false; } try { if (checkDrive.DriveFormat != drive.DriveFormat) retValue = false; } catch { if (!CheckDrive(drive, path)) retValue = false; } if (checkDrive.DriveType != drive.DriveType) retValue = false; if (checkDrive.IsReady != drive.IsReady) retValue = false; if (checkDrive.Name != drive.Name) retValue = false; if (checkDrive.RootDirectory.Name != drive.RootDirectory.Name) retValue = false; try { if (checkDrive.TotalFreeSpace != drive.TotalFreeSpace) retValue = false; } catch { if (!CheckDrive(drive, path)) retValue = false; } try { if (checkDrive.TotalSize != drive.TotalSize) retValue = false; } catch { if (!CheckDrive(drive, path)) retValue = false; } try { if (checkDrive.VolumeLabel != drive.VolumeLabel) retValue = false; } catch { if (!CheckDrive(drive, path)) retValue = false; } return retValue; } } }
using Sparrow.Binary; using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading.Tasks; namespace Sparrow { partial class Hashing { public static class Streamed { #region XXHash32 & XXHash64 public class XXHash32Context { internal uint Seed = 0; internal XXHash32Values Current; internal readonly byte[] Leftover = new byte[XXHash32.Alignment]; internal int LeftoverCount = 0; internal int BufferSize = 0; } public static unsafe class XXHash32 { public const int Alignment = 16; [MethodImpl(MethodImplOptions.AggressiveInlining)] public static XXHash32Context BeginProcessInline(uint seed = 0) { var context = new XXHash32Context { Seed = seed }; context.Current.V1 = seed + XXHash32Constants.PRIME32_1 + XXHash32Constants.PRIME32_2; context.Current.V2 = seed + XXHash32Constants.PRIME32_2; context.Current.V3 = seed + 0; context.Current.V4 = seed - XXHash32Constants.PRIME32_1; return context; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static XXHash32Context ProcessInline(XXHash32Context context, byte* buffer, int size) { if (context.LeftoverCount != 0) throw new NotSupportedException("Streaming process does not support resuming with buffers whose size is not 16 bytes aligned. Supporting it would impact performance."); byte* bEnd = buffer + size; byte* limit = bEnd - Alignment; context.LeftoverCount = (int)(bEnd - buffer) % Alignment; if (context.BufferSize + size >= Alignment) { uint v1 = context.Current.V1; uint v2 = context.Current.V2; uint v3 = context.Current.V3; uint v4 = context.Current.V4; while (buffer <= limit) { v1 += ((uint*)buffer)[0] * XXHash32Constants.PRIME32_2; v2 += ((uint*)buffer)[1] * XXHash32Constants.PRIME32_2; v3 += ((uint*)buffer)[2] * XXHash32Constants.PRIME32_2; v4 += ((uint*)buffer)[3] * XXHash32Constants.PRIME32_2; buffer += 4 * sizeof(uint); v1 = Bits.RotateLeft32(v1, 13); v2 = Bits.RotateLeft32(v2, 13); v3 = Bits.RotateLeft32(v3, 13); v4 = Bits.RotateLeft32(v4, 13); v1 *= XXHash32Constants.PRIME32_1; v2 *= XXHash32Constants.PRIME32_1; v3 *= XXHash32Constants.PRIME32_1; v4 *= XXHash32Constants.PRIME32_1; context.BufferSize += Alignment; } context.Current.V1 = v1; context.Current.V2 = v2; context.Current.V3 = v3; context.Current.V4 = v4; } for(int i = 0; i < context.LeftoverCount; i++ ) { context.Leftover[i] = *buffer; buffer++; } return context; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static uint EndProcessInline(XXHash32Context context) { uint h32; if (context.BufferSize >= Alignment) { uint v1 = context.Current.V1; uint v2 = context.Current.V2; uint v3 = context.Current.V3; uint v4 = context.Current.V4; h32 = Bits.RotateLeft32(v1, 1) + Bits.RotateLeft32(v2, 7) + Bits.RotateLeft32(v3, 12) + Bits.RotateLeft32(v4, 18); } else { h32 = context.Seed + XXHash32Constants.PRIME32_5; } h32 += (uint)(context.BufferSize + context.LeftoverCount); if ( context.LeftoverCount > 0 ) { fixed (byte* b = context.Leftover) { byte* buffer = b; byte* bEnd = b + context.LeftoverCount; while (buffer + 4 <= bEnd) { h32 += *((uint*)buffer) * XXHash32Constants.PRIME32_3; h32 = Bits.RotateLeft32(h32, 17) * XXHash32Constants.PRIME32_4; buffer += 4; } while (buffer < bEnd) { h32 += (uint)(*buffer) * XXHash32Constants.PRIME32_5; h32 = Bits.RotateLeft32(h32, 11) * XXHash32Constants.PRIME32_1; buffer++; } } } h32 ^= h32 >> 15; h32 *= XXHash32Constants.PRIME32_2; h32 ^= h32 >> 13; h32 *= XXHash32Constants.PRIME32_3; h32 ^= h32 >> 16; return h32; } public static XXHash32Context BeginProcess(uint seed = 0) { return BeginProcessInline(seed); } public static uint EndProcess(XXHash32Context context) { return EndProcessInline(context); } public static XXHash32Context Process(XXHash32Context context, byte* buffer, int size) { return ProcessInline(context, buffer, size); } public static XXHash32Context Process(XXHash32Context context, byte[] value, int size = -1) { if (size == -1) size = value.Length; fixed (byte* buffer = value) { return ProcessInline(context, buffer, size); } } } public class XXHash64Context { internal ulong Seed = 0; internal XXHash64Values Current; internal readonly byte[] Leftover = new byte[XXHash64.Alignment]; internal int LeftoverCount; internal int BufferSize = 0; } public static unsafe class XXHash64 { public const int Alignment = 32; public static XXHash64Context BeginProcessInline(uint seed = 0) { var context = new XXHash64Context { Seed = seed }; context.Current.V1 = seed + XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_2; context.Current.V2 = seed + XXHash64Constants.PRIME64_2; context.Current.V3 = seed + 0; context.Current.V4 = seed - XXHash64Constants.PRIME64_1; return context; } public static XXHash64Context ProcessInline(XXHash64Context context, byte* buffer, int size) { if (context.LeftoverCount != 0) throw new NotSupportedException("Streaming process does not support resuming with buffers whose size is not 16 bytes aligned. Supporting it would impact performance."); byte* bEnd = buffer + size; byte* limit = bEnd - Alignment; context.LeftoverCount = (int)(bEnd - buffer) % Alignment; if (context.BufferSize + size >= Alignment) { ulong v1 = context.Current.V1; ulong v2 = context.Current.V2; ulong v3 = context.Current.V3; ulong v4 = context.Current.V4; while (buffer <= limit) { v1 += ((ulong*)buffer)[0] * XXHash64Constants.PRIME64_2; v2 += ((ulong*)buffer)[1] * XXHash64Constants.PRIME64_2; v3 += ((ulong*)buffer)[2] * XXHash64Constants.PRIME64_2; v4 += ((ulong*)buffer)[3] * XXHash64Constants.PRIME64_2; buffer += 4 * sizeof(ulong); v1 = Bits.RotateLeft64(v1, 31); v2 = Bits.RotateLeft64(v2, 31); v3 = Bits.RotateLeft64(v3, 31); v4 = Bits.RotateLeft64(v4, 31); v1 *= XXHash64Constants.PRIME64_1; v2 *= XXHash64Constants.PRIME64_1; v3 *= XXHash64Constants.PRIME64_1; v4 *= XXHash64Constants.PRIME64_1; context.BufferSize += Alignment; } context.Current.V1 = v1; context.Current.V2 = v2; context.Current.V3 = v3; context.Current.V4 = v4; } for (int i = 0; i < context.LeftoverCount; i++) { context.Leftover[i] = *buffer; buffer++; } return context; } public static ulong EndProcessInline(XXHash64Context context) { ulong h64; if (context.BufferSize >= Alignment) { ulong v1 = context.Current.V1; ulong v2 = context.Current.V2; ulong v3 = context.Current.V3; ulong v4 = context.Current.V4; h64 = Bits.RotateLeft64(v1, 1) + Bits.RotateLeft64(v2, 7) + Bits.RotateLeft64(v3, 12) + Bits.RotateLeft64(v4, 18); v1 *= XXHash64Constants.PRIME64_2; v2 *= XXHash64Constants.PRIME64_2; v3 *= XXHash64Constants.PRIME64_2; v4 *= XXHash64Constants.PRIME64_2; v1 = Bits.RotateLeft64(v1, 31); v2 = Bits.RotateLeft64(v2, 31); v3 = Bits.RotateLeft64(v3, 31); v4 = Bits.RotateLeft64(v4, 31); v1 *= XXHash64Constants.PRIME64_1; v2 *= XXHash64Constants.PRIME64_1; v3 *= XXHash64Constants.PRIME64_1; v4 *= XXHash64Constants.PRIME64_1; h64 ^= v1; h64 = h64 * XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_4; h64 ^= v2; h64 = h64 * XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_4; h64 ^= v3; h64 = h64 * XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_4; h64 ^= v4; h64 = h64 * XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_4; } else { h64 = context.Seed + XXHash64Constants.PRIME64_5; } h64 += (uint)(context.BufferSize + context.LeftoverCount); if (context.LeftoverCount > 0) { fixed (byte* b = context.Leftover) { byte* buffer = b; byte* bEnd = b + context.LeftoverCount; while (buffer + 8 <= bEnd) { ulong k1 = *((ulong*)buffer); k1 *= XXHash64Constants.PRIME64_2; k1 = Bits.RotateLeft64(k1, 31); k1 *= XXHash64Constants.PRIME64_1; h64 ^= k1; h64 = Bits.RotateLeft64(h64, 27) * XXHash64Constants.PRIME64_1 + XXHash64Constants.PRIME64_4; buffer += 8; } if (buffer + 4 <= bEnd) { h64 ^= *(uint*)buffer * XXHash64Constants.PRIME64_1; h64 = Bits.RotateLeft64(h64, 23) * XXHash64Constants.PRIME64_2 + XXHash64Constants.PRIME64_3; buffer += 4; } while (buffer < bEnd) { h64 ^= ((ulong)*buffer) * XXHash64Constants.PRIME64_5; h64 = Bits.RotateLeft64(h64, 11) * XXHash64Constants.PRIME64_1; buffer++; } } } h64 ^= h64 >> 33; h64 *= XXHash64Constants.PRIME64_2; h64 ^= h64 >> 29; h64 *= XXHash64Constants.PRIME64_3; h64 ^= h64 >> 32; return h64; } public static XXHash64Context BeginProcess(uint seed = 0) { return BeginProcessInline(seed); } public static ulong EndProcess(XXHash64Context context) { return EndProcessInline(context); } public static XXHash64Context Process(XXHash64Context context, byte* buffer, int size) { return ProcessInline(context, buffer, size); } public static XXHash64Context Process(XXHash64Context context, byte[] value, int size = -1) { if (size == -1) size = value.Length; fixed (byte* buffer = value) { return ProcessInline(context, buffer, size); } } } #endregion #region Metro128 public class Metro128Context { internal uint Seed = 0; internal Metro128Values Current; internal readonly byte[] Leftover = new byte[Metro128.Alignment]; internal int LeftoverCount = 0; internal int BufferSize = 0; } public static unsafe class Metro128 { public const int Alignment = 32; public static Metro128Context BeginProcessInline(uint seed = 0) { var context = new Metro128Context { Seed = seed }; context.Current.V0 = (seed - Metro128Constants.K0) * Metro128Constants.K3; context.Current.V1 = (seed + Metro128Constants.K1) * Metro128Constants.K2; context.Current.V2 = (seed + Metro128Constants.K0) * Metro128Constants.K2; context.Current.V3 = (seed - Metro128Constants.K1) * Metro128Constants.K3; return context; } public static Metro128Context ProcessInline(Metro128Context context, byte* buffer, int length) { if (context.LeftoverCount != 0) throw new NotSupportedException("Streaming process does not support resuming with buffers whose size is not 32 bytes aligned. Supporting it would impact performance."); byte* ptr = buffer; byte* end = ptr + length; context.LeftoverCount = (int)(end - ptr) % Alignment; if (context.BufferSize + length >= Alignment) { ulong v0 = context.Current.V0; ulong v1 = context.Current.V1; ulong v2 = context.Current.V2; ulong v3 = context.Current.V3; while (ptr <= (end - 32)) { v0 += ((ulong*)ptr)[0] * Metro128Constants.K0; v1 += ((ulong*)ptr)[1] * Metro128Constants.K1; v0 = Bits.RotateRight64(v0, 29) + v2; v1 = Bits.RotateRight64(v1, 29) + v3; v2 += ((ulong*)ptr)[2] * Metro128Constants.K2; v3 += ((ulong*)ptr)[3] * Metro128Constants.K3; v2 = Bits.RotateRight64(v2, 29) + v0; v3 = Bits.RotateRight64(v3, 29) + v1; ptr += Alignment; context.BufferSize += Alignment; } context.Current.V0 = v0; context.Current.V1 = v1; context.Current.V2 = v2; context.Current.V3 = v3; } for (int i = 0; i < context.LeftoverCount; i++) { context.Leftover[i] = *ptr; ptr++; } return context; } public static Metro128Hash EndProcessInline(Metro128Context context) { ulong v0 = context.Current.V0; ulong v1 = context.Current.V1; ulong v2 = context.Current.V2; ulong v3 = context.Current.V3; if (context.BufferSize >= Alignment) { v2 ^= Bits.RotateRight64(((v0 + v3) * Metro128Constants.K0) + v1, 21) * Metro128Constants.K1; v3 ^= Bits.RotateRight64(((v1 + v2) * Metro128Constants.K1) + v0, 21) * Metro128Constants.K0; v0 ^= Bits.RotateRight64(((v0 + v2) * Metro128Constants.K0) + v3, 21) * Metro128Constants.K1; v1 ^= Bits.RotateRight64(((v1 + v3) * Metro128Constants.K1) + v2, 21) * Metro128Constants.K0; } if (context.LeftoverCount > 0) { fixed (byte* b = context.Leftover) { byte* ptr = b; byte* end = b + context.LeftoverCount; if ((end - ptr) >= 16) { v0 += ((ulong*)ptr)[0] * Metro128Constants.K2; v1 += ((ulong*)ptr)[1] * Metro128Constants.K2; v0 = Bits.RotateRight64(v0, 33) * Metro128Constants.K3; v1 = Bits.RotateRight64(v1, 33) * Metro128Constants.K3; ptr += 2 * sizeof(ulong); v0 ^= Bits.RotateRight64((v0 * Metro128Constants.K2) + v1, 45) * Metro128Constants.K1; v1 ^= Bits.RotateRight64((v1 * Metro128Constants.K3) + v0, 45) * Metro128Constants.K0; } if ((end - ptr) >= 8) { v0 += *((ulong*)ptr) * Metro128Constants.K2; ptr += sizeof(ulong); v0 = Bits.RotateRight64(v0, 33) * Metro128Constants.K3; v0 ^= Bits.RotateRight64((v0 * Metro128Constants.K2) + v1, 27) * Metro128Constants.K1; } if ((end - ptr) >= 4) { v1 += *((uint*)ptr) * Metro128Constants.K2; ptr += sizeof(uint); v1 = Bits.RotateRight64(v1, 33) * Metro128Constants.K3; v1 ^= Bits.RotateRight64((v1 * Metro128Constants.K3) + v0, 46) * Metro128Constants.K0; } if ((end - ptr) >= 2) { v0 += *((ushort*)ptr) * Metro128Constants.K2; ptr += sizeof(ushort); v0 = Bits.RotateRight64(v0, 33) * Metro128Constants.K3; v0 ^= Bits.RotateRight64((v0 * Metro128Constants.K2) + v1, 22) * Metro128Constants.K1; } if ((end - ptr) >= 1) { v1 += *((byte*)ptr) * Metro128Constants.K2; v1 = Bits.RotateRight64(v1, 33) * Metro128Constants.K3; v1 ^= Bits.RotateRight64((v1 * Metro128Constants.K3) + v0, 58) * Metro128Constants.K0; } } } v0 += Bits.RotateRight64((v0 * Metro128Constants.K0) + v1, 13); v1 += Bits.RotateRight64((v1 * Metro128Constants.K1) + v0, 37); v0 += Bits.RotateRight64((v0 * Metro128Constants.K2) + v1, 13); v1 += Bits.RotateRight64((v1 * Metro128Constants.K3) + v0, 37); return new Metro128Hash { H1 = v0, H2 = v1 }; } public static Metro128Context BeginProcess(uint seed = 0) { return BeginProcessInline(seed); } public static Metro128Hash EndProcess(Metro128Context context) { return EndProcessInline(context); } public static Metro128Context Process(Metro128Context context, byte* buffer, int size) { return ProcessInline(context, buffer, size); } public static Metro128Context Process(Metro128Context context, byte[] value, int size = -1) { if (size == -1) size = value.Length; fixed (byte* buffer = value) { return ProcessInline(context, buffer, size); } } } #endregion } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using osu.Framework.Allocation; using osu.Framework.Audio; using osu.Framework.Audio.Sample; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Input.Events; using osu.Framework.Logging; using osu.Framework.Screens; using osu.Framework.Threading; using osu.Game.Beatmaps; using osu.Game.Graphics; using osu.Game.Graphics.Containers; using osu.Game.Input.Bindings; using osu.Game.Overlays; using osu.Game.Overlays.Mods; using osu.Game.Rulesets; using osu.Game.Rulesets.Mods; using osu.Game.Screens.Edit; using osu.Game.Screens.Menu; using osu.Game.Screens.Select.Options; using osuTK; using osuTK.Graphics; using osuTK.Input; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using osu.Framework.Audio.Track; using osu.Framework.Graphics.Sprites; using osu.Framework.Input.Bindings; using osu.Game.Collections; using osu.Game.Graphics.UserInterface; using System.Diagnostics; using osu.Game.Screens.Play; using osu.Game.Database; namespace osu.Game.Screens.Select { public abstract class SongSelect : ScreenWithBeatmapBackground, IKeyBindingHandler<GlobalAction> { public static readonly float WEDGE_HEIGHT = 245; protected const float BACKGROUND_BLUR = 20; private const float left_area_padding = 20; public FilterControl FilterControl { get; private set; } protected virtual bool ShowFooter => true; protected virtual bool DisplayStableImportPrompt => stableImportManager?.SupportsImportFromStable == true; /// <summary> /// Can be null if <see cref="ShowFooter"/> is false. /// </summary> protected BeatmapOptionsOverlay BeatmapOptions { get; private set; } /// <summary> /// Can be null if <see cref="ShowFooter"/> is false. /// </summary> protected Footer Footer { get; private set; } /// <summary> /// Contains any panel which is triggered by a footer button. /// Helps keep them located beneath the footer itself. /// </summary> protected Container FooterPanels { get; private set; } /// <summary> /// Whether entering editor mode should be allowed. /// </summary> public virtual bool AllowEditing => true; [Resolved] private Bindable<IReadOnlyList<Mod>> selectedMods { get; set; } protected BeatmapCarousel Carousel { get; private set; } protected Container LeftArea { get; private set; } private BeatmapInfoWedge beatmapInfoWedge; private DialogOverlay dialogOverlay; [Resolved] private BeatmapManager beatmaps { get; set; } [Resolved(CanBeNull = true)] private StableImportManager stableImportManager { get; set; } protected ModSelectOverlay ModSelect { get; private set; } protected Sample SampleConfirm { get; private set; } private Sample sampleChangeDifficulty; private Sample sampleChangeBeatmap; private Container carouselContainer; protected BeatmapDetailArea BeatmapDetails { get; private set; } private readonly Bindable<RulesetInfo> decoupledRuleset = new Bindable<RulesetInfo>(); [Resolved] private MusicController music { get; set; } [BackgroundDependencyLoader(true)] private void load(AudioManager audio, DialogOverlay dialog, OsuColour colours, ManageCollectionsDialog manageCollectionsDialog, DifficultyRecommender recommender) { // initial value transfer is required for FilterControl (it uses our re-cached bindables in its async load for the initial filter). transferRulesetValue(); LoadComponentAsync(Carousel = new BeatmapCarousel { AllowSelection = false, // delay any selection until our bindables are ready to make a good choice. Anchor = Anchor.CentreRight, Origin = Anchor.CentreRight, RelativeSizeAxes = Axes.Both, BleedTop = FilterControl.HEIGHT, BleedBottom = Footer.HEIGHT, SelectionChanged = updateSelectedBeatmap, BeatmapSetsChanged = carouselBeatmapsLoaded, GetRecommendedBeatmap = s => recommender?.GetRecommendedBeatmap(s), }, c => carouselContainer.Child = c); AddRangeInternal(new Drawable[] { new ResetScrollContainer(() => Carousel.ScrollToSelected()) { RelativeSizeAxes = Axes.Y, Width = 250, }, new VerticalMaskingContainer { Children = new Drawable[] { new GridContainer // used for max width implementation { RelativeSizeAxes = Axes.Both, ColumnDimensions = new[] { new Dimension(), new Dimension(GridSizeMode.Relative, 0.5f, maxSize: 850), }, Content = new[] { new Drawable[] { new ParallaxContainer { ParallaxAmount = 0.005f, RelativeSizeAxes = Axes.Both, Child = new WedgeBackground { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Right = -150 }, }, }, carouselContainer = new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Top = FilterControl.HEIGHT, Bottom = Footer.HEIGHT }, Child = new LoadingSpinner(true) { State = { Value = Visibility.Visible } } } }, } }, FilterControl = new FilterControl { RelativeSizeAxes = Axes.X, Height = FilterControl.HEIGHT, FilterChanged = ApplyFilterToCarousel, }, new GridContainer // used for max width implementation { RelativeSizeAxes = Axes.Both, ColumnDimensions = new[] { new Dimension(GridSizeMode.Relative, 0.5f, maxSize: 650), }, Content = new[] { new Drawable[] { LeftArea = new Container { Origin = Anchor.BottomLeft, Anchor = Anchor.BottomLeft, RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Top = left_area_padding }, Children = new Drawable[] { beatmapInfoWedge = new BeatmapInfoWedge { Height = WEDGE_HEIGHT, RelativeSizeAxes = Axes.X, Margin = new MarginPadding { Right = left_area_padding, Left = -BeatmapInfoWedge.BORDER_THICKNESS, // Hide the left border }, }, new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Bottom = Footer.HEIGHT, Top = WEDGE_HEIGHT, Left = left_area_padding, Right = left_area_padding * 2, }, Child = BeatmapDetails = CreateBeatmapDetailArea().With(d => { d.RelativeSizeAxes = Axes.Both; d.Padding = new MarginPadding { Top = 10, Right = 5 }; }) }, } }, }, } } } }, }); if (ShowFooter) { AddRangeInternal(new Drawable[] { new GridContainer // used for max height implementation { RelativeSizeAxes = Axes.Both, RowDimensions = new[] { new Dimension(), new Dimension(GridSizeMode.Relative, 1f, maxSize: ModSelectOverlay.HEIGHT + Footer.HEIGHT), }, Content = new[] { null, new Drawable[] { FooterPanels = new Container { Anchor = Anchor.BottomLeft, Origin = Anchor.BottomLeft, RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Bottom = Footer.HEIGHT }, Children = new Drawable[] { BeatmapOptions = new BeatmapOptionsOverlay(), ModSelect = CreateModSelectOverlay() } } } } }, Footer = new Footer() }); } if (Footer != null) { foreach (var (button, overlay) in CreateFooterButtons()) Footer.AddButton(button, overlay); BeatmapOptions.AddButton(@"Manage", @"collections", FontAwesome.Solid.Book, colours.Green, () => manageCollectionsDialog?.Show()); BeatmapOptions.AddButton(@"Delete", @"all difficulties", FontAwesome.Solid.Trash, colours.Pink, () => delete(Beatmap.Value.BeatmapSetInfo)); BeatmapOptions.AddButton(@"Remove", @"from unplayed", FontAwesome.Regular.TimesCircle, colours.Purple, null); BeatmapOptions.AddButton(@"Clear", @"local scores", FontAwesome.Solid.Eraser, colours.Purple, () => clearScores(Beatmap.Value.BeatmapInfo)); } dialogOverlay = dialog; sampleChangeDifficulty = audio.Samples.Get(@"SongSelect/select-difficulty"); sampleChangeBeatmap = audio.Samples.Get(@"SongSelect/select-expand"); SampleConfirm = audio.Samples.Get(@"SongSelect/confirm-selection"); if (dialogOverlay != null) { Schedule(() => { // if we have no beatmaps, let's prompt the user to import from over a stable install if he has one. if (!beatmaps.GetAllUsableBeatmapSetsEnumerable(IncludedDetails.Minimal).Any() && DisplayStableImportPrompt) { dialogOverlay.Push(new ImportFromStablePopup(() => { Task.Run(() => stableImportManager.ImportFromStableAsync(StableContent.All)); })); } }); } } /// <summary> /// Creates the buttons to be displayed in the footer. /// </summary> /// <returns>A set of <see cref="FooterButton"/> and an optional <see cref="OverlayContainer"/> which the button opens when pressed.</returns> protected virtual IEnumerable<(FooterButton, OverlayContainer)> CreateFooterButtons() => new (FooterButton, OverlayContainer)[] { (new FooterButtonMods { Current = Mods }, ModSelect), (new FooterButtonRandom { NextRandom = () => Carousel.SelectNextRandom(), PreviousRandom = Carousel.SelectPreviousRandom }, null), (new FooterButtonOptions(), BeatmapOptions) }; protected virtual ModSelectOverlay CreateModSelectOverlay() => new LocalPlayerModSelectOverlay(); protected virtual void ApplyFilterToCarousel(FilterCriteria criteria) { // if not the current screen, we want to get carousel in a good presentation state before displaying (resume or enter). bool shouldDebounce = this.IsCurrentScreen(); Carousel.Filter(criteria, shouldDebounce); } private DependencyContainer dependencies; protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent) { dependencies = new DependencyContainer(base.CreateChildDependencies(parent)); dependencies.CacheAs(this); dependencies.CacheAs(decoupledRuleset); dependencies.CacheAs<IBindable<RulesetInfo>>(decoupledRuleset); return dependencies; } /// <summary> /// Creates the beatmap details to be displayed underneath the wedge. /// </summary> protected abstract BeatmapDetailArea CreateBeatmapDetailArea(); public void Edit(BeatmapInfo beatmap = null) { if (!AllowEditing) throw new InvalidOperationException($"Attempted to edit when {nameof(AllowEditing)} is disabled"); Beatmap.Value = beatmaps.GetWorkingBeatmap(beatmap ?? beatmapNoDebounce); this.Push(new Editor()); } /// <summary> /// Call to make a selection and perform the default action for this SongSelect. /// </summary> /// <param name="beatmap">An optional beatmap to override the current carousel selection.</param> /// <param name="ruleset">An optional ruleset to override the current carousel selection.</param> /// <param name="customStartAction">An optional custom action to perform instead of <see cref="OnStart"/>.</param> public void FinaliseSelection(BeatmapInfo beatmap = null, RulesetInfo ruleset = null, Action customStartAction = null) { // This is very important as we have not yet bound to screen-level bindables before the carousel load is completed. if (!Carousel.BeatmapSetsLoaded) return; if (ruleset != null) Ruleset.Value = ruleset; transferRulesetValue(); // while transferRulesetValue will flush, it only does so if the ruleset changes. // the user could have changed a filter, and we want to ensure we are 100% up-to-date and consistent here. Carousel.FlushPendingFilterOperations(); // avoid attempting to continue before a selection has been obtained. // this could happen via a user interaction while the carousel is still in a loading state. if (Carousel.SelectedBeatmap == null) return; if (beatmap != null) Carousel.SelectBeatmap(beatmap); if (selectionChangedDebounce?.Completed == false) { selectionChangedDebounce.RunTask(); selectionChangedDebounce?.Cancel(); // cancel the already scheduled task. selectionChangedDebounce = null; } if (customStartAction != null) { customStartAction(); Carousel.AllowSelection = false; } else if (OnStart()) Carousel.AllowSelection = false; } /// <summary> /// Called when a selection is made. /// </summary> /// <returns>If a resultant action occurred that takes the user away from SongSelect.</returns> protected abstract bool OnStart(); private ScheduledDelegate selectionChangedDebounce; private void workingBeatmapChanged(ValueChangedEvent<WorkingBeatmap> e) { if (e.NewValue is DummyWorkingBeatmap || !this.IsCurrentScreen()) return; Logger.Log($"working beatmap updated to {e.NewValue}"); if (!Carousel.SelectBeatmap(e.NewValue.BeatmapInfo, false)) { // A selection may not have been possible with filters applied. // There was possibly a ruleset mismatch. This is a case we can help things along by updating the game-wide ruleset to match. if (e.NewValue.BeatmapInfo.Ruleset != null && !e.NewValue.BeatmapInfo.Ruleset.Equals(decoupledRuleset.Value)) { Ruleset.Value = e.NewValue.BeatmapInfo.Ruleset; transferRulesetValue(); } // Even if a ruleset mismatch was not the cause (ie. a text filter is applied), // we still want to temporarily show the new beatmap, bypassing filters. // This will be undone the next time the user changes the filter. var criteria = FilterControl.CreateCriteria(); criteria.SelectedBeatmapSet = e.NewValue.BeatmapInfo.BeatmapSet; Carousel.Filter(criteria); Carousel.SelectBeatmap(e.NewValue.BeatmapInfo); } } // We need to keep track of the last selected beatmap ignoring debounce to play the correct selection sounds. private BeatmapInfo beatmapNoDebounce; private RulesetInfo rulesetNoDebounce; private void updateSelectedBeatmap(BeatmapInfo beatmap) { if (beatmap == null && beatmapNoDebounce == null) return; if (beatmap?.Equals(beatmapNoDebounce) == true) return; beatmapNoDebounce = beatmap; performUpdateSelected(); } private void updateSelectedRuleset(RulesetInfo ruleset) { if (ruleset == null && rulesetNoDebounce == null) return; if (ruleset?.Equals(rulesetNoDebounce) == true) return; rulesetNoDebounce = ruleset; performUpdateSelected(); } /// <summary> /// Selection has been changed as the result of a user interaction. /// </summary> private void performUpdateSelected() { var beatmap = beatmapNoDebounce; var ruleset = rulesetNoDebounce; selectionChangedDebounce?.Cancel(); if (beatmapNoDebounce == null) run(); else selectionChangedDebounce = Scheduler.AddDelayed(run, 200); void run() { // clear pending task immediately to track any potential nested debounce operation. selectionChangedDebounce = null; Logger.Log($"updating selection with beatmap:{beatmap?.ID.ToString() ?? "null"} ruleset:{ruleset?.ID.ToString() ?? "null"}"); if (transferRulesetValue()) { Mods.Value = Array.Empty<Mod>(); // transferRulesetValue() may trigger a re-filter. If the current selection does not match the new ruleset, we want to switch away from it. // The default logic on WorkingBeatmap change is to switch to a matching ruleset (see workingBeatmapChanged()), but we don't want that here. // We perform an early selection attempt and clear out the beatmap selection to avoid a second ruleset change (revert). if (beatmap != null && !Carousel.SelectBeatmap(beatmap, false)) beatmap = null; } if (selectionChangedDebounce != null) { // a new nested operation was started; switch to it for further selection. // this avoids having two separate debounces trigger from the same source. selectionChangedDebounce.RunTask(); return; } // We may be arriving here due to another component changing the bindable Beatmap. // In these cases, the other component has already loaded the beatmap, so we don't need to do so again. if (!EqualityComparer<BeatmapInfo>.Default.Equals(beatmap, Beatmap.Value.BeatmapInfo)) { Logger.Log($"beatmap changed from \"{Beatmap.Value.BeatmapInfo}\" to \"{beatmap}\""); int? lastSetID = Beatmap.Value?.BeatmapInfo.BeatmapSetInfoID; Beatmap.Value = beatmaps.GetWorkingBeatmap(beatmap); if (beatmap != null) { if (beatmap.BeatmapSetInfoID == lastSetID) sampleChangeDifficulty.Play(); else sampleChangeBeatmap.Play(); } } if (this.IsCurrentScreen()) ensurePlayingSelected(); updateComponentFromBeatmap(Beatmap.Value); } } public override void OnEntering(IScreen last) { base.OnEntering(last); this.FadeInFromZero(250); FilterControl.Activate(); ModSelect.SelectedMods.BindTo(selectedMods); beginLooping(); } private const double logo_transition = 250; protected override void LogoArriving(OsuLogo logo, bool resuming) { base.LogoArriving(logo, resuming); Vector2 position = new Vector2(0.95f, 0.96f); if (logo.Alpha > 0.8f) { logo.MoveTo(position, 500, Easing.OutQuint); } else { logo.Hide(); logo.ScaleTo(0.2f); logo.MoveTo(position); } logo.FadeIn(logo_transition, Easing.OutQuint); logo.ScaleTo(0.4f, logo_transition, Easing.OutQuint); logo.Action = () => { FinaliseSelection(); return false; }; } protected override void LogoExiting(OsuLogo logo) { base.LogoExiting(logo); logo.ScaleTo(0.2f, logo_transition / 2, Easing.Out); logo.FadeOut(logo_transition / 2, Easing.Out); } public override void OnResuming(IScreen last) { base.OnResuming(last); // required due to https://github.com/ppy/osu-framework/issues/3218 ModSelect.SelectedMods.Disabled = false; ModSelect.SelectedMods.BindTo(selectedMods); Carousel.AllowSelection = true; BeatmapDetails.Refresh(); beginLooping(); music.ResetTrackAdjustments(); if (Beatmap != null && !Beatmap.Value.BeatmapSetInfo.DeletePending) { updateComponentFromBeatmap(Beatmap.Value); // restart playback on returning to song select, regardless. // not sure this should be a permanent thing (we may want to leave a user pause paused even on returning) music.Play(requestedByUser: true); } this.FadeIn(250); this.ScaleTo(1, 250, Easing.OutSine); FilterControl.Activate(); } public override void OnSuspending(IScreen next) { ModSelect.SelectedMods.UnbindFrom(selectedMods); ModSelect.Hide(); BeatmapOptions.Hide(); endLooping(); this.ScaleTo(1.1f, 250, Easing.InSine); this.FadeOut(250); FilterControl.Deactivate(); base.OnSuspending(next); } public override bool OnExiting(IScreen next) { if (base.OnExiting(next)) return true; beatmapInfoWedge.Hide(); this.FadeOut(100); FilterControl.Deactivate(); endLooping(); return false; } private bool isHandlingLooping; private void beginLooping() { Debug.Assert(!isHandlingLooping); isHandlingLooping = true; ensureTrackLooping(Beatmap.Value, TrackChangeDirection.None); music.TrackChanged += ensureTrackLooping; } private void endLooping() { // may be called multiple times during screen exit process. if (!isHandlingLooping) return; music.CurrentTrack.Looping = isHandlingLooping = false; music.TrackChanged -= ensureTrackLooping; } private void ensureTrackLooping(WorkingBeatmap beatmap, TrackChangeDirection changeDirection) => beatmap.PrepareTrackForPreviewLooping(); public override bool OnBackButton() { if (ModSelect.State.Value == Visibility.Visible) { ModSelect.Hide(); return true; } return false; } protected override void Dispose(bool isDisposing) { base.Dispose(isDisposing); decoupledRuleset.UnbindAll(); if (music != null) music.TrackChanged -= ensureTrackLooping; } /// <summary> /// Allow components in SongSelect to update their loaded beatmap details. /// This is a debounced call (unlike directly binding to WorkingBeatmap.ValueChanged). /// </summary> /// <param name="beatmap">The working beatmap.</param> private void updateComponentFromBeatmap(WorkingBeatmap beatmap) { ApplyToBackground(backgroundModeBeatmap => { backgroundModeBeatmap.Beatmap = beatmap; backgroundModeBeatmap.BlurAmount.Value = BACKGROUND_BLUR; backgroundModeBeatmap.FadeColour(Color4.White, 250); }); beatmapInfoWedge.Beatmap = beatmap; BeatmapDetails.Beatmap = beatmap; } private readonly WeakReference<ITrack> lastTrack = new WeakReference<ITrack>(null); /// <summary> /// Ensures some music is playing for the current track. /// Will resume playback from a manual user pause if the track has changed. /// </summary> private void ensurePlayingSelected() { ITrack track = music.CurrentTrack; bool isNewTrack = !lastTrack.TryGetTarget(out var last) || last != track; if (!track.IsRunning && (music.UserPauseRequested != true || isNewTrack)) music.Play(true); lastTrack.SetTarget(track); } private void carouselBeatmapsLoaded() { bindBindables(); Carousel.AllowSelection = true; // If a selection was already obtained, do not attempt to update the selected beatmap. if (Carousel.SelectedBeatmapSet != null) return; // Attempt to select the current beatmap on the carousel, if it is valid to be selected. if (!Beatmap.IsDefault && Beatmap.Value.BeatmapSetInfo?.DeletePending == false && Beatmap.Value.BeatmapSetInfo?.Protected == false) { if (Carousel.SelectBeatmap(Beatmap.Value.BeatmapInfo, false)) return; // prefer not changing ruleset at this point, so look for another difficulty in the currently playing beatmap var found = Beatmap.Value.BeatmapSetInfo.Beatmaps.FirstOrDefault(b => b.Ruleset.Equals(decoupledRuleset.Value)); if (found != null && Carousel.SelectBeatmap(found, false)) return; } // If the current active beatmap could not be selected, select a new random beatmap. if (!Carousel.SelectNextRandom()) { // in the case random selection failed, we want to trigger selectionChanged // to show the dummy beatmap (we have nothing else to display). performUpdateSelected(); } } private bool boundLocalBindables; private void bindBindables() { if (boundLocalBindables) return; // manual binding to parent ruleset to allow for delayed load in the incoming direction. transferRulesetValue(); Ruleset.ValueChanged += r => updateSelectedRuleset(r.NewValue); decoupledRuleset.ValueChanged += r => Ruleset.Value = r.NewValue; decoupledRuleset.DisabledChanged += r => Ruleset.Disabled = r; Beatmap.BindValueChanged(workingBeatmapChanged); boundLocalBindables = true; } /// <summary> /// Transfer the game-wide ruleset to the local decoupled ruleset. /// Will immediately run filter operations if required. /// </summary> /// <returns>Whether a transfer occurred.</returns> private bool transferRulesetValue() { if (decoupledRuleset.Value?.Equals(Ruleset.Value) == true) return false; Logger.Log($"decoupled ruleset transferred (\"{decoupledRuleset.Value}\" -> \"{Ruleset.Value}\")"); rulesetNoDebounce = decoupledRuleset.Value = Ruleset.Value; // if we have a pending filter operation, we want to run it now. // it could change selection (ie. if the ruleset has been changed). Carousel?.FlushPendingFilterOperations(); return true; } private void delete(BeatmapSetInfo beatmap) { if (beatmap == null || beatmap.ID <= 0) return; dialogOverlay?.Push(new BeatmapDeleteDialog(beatmap)); } private void clearScores(BeatmapInfo beatmap) { if (beatmap == null || beatmap.ID <= 0) return; dialogOverlay?.Push(new BeatmapClearScoresDialog(beatmap, () => // schedule done here rather than inside the dialog as the dialog may fade out and never callback. Schedule(() => BeatmapDetails.Refresh()))); } public virtual bool OnPressed(GlobalAction action) { if (!this.IsCurrentScreen()) return false; switch (action) { case GlobalAction.Select: FinaliseSelection(); return true; } return false; } public void OnReleased(GlobalAction action) { } protected override bool OnKeyDown(KeyDownEvent e) { if (e.Repeat) return false; switch (e.Key) { case Key.Delete: if (e.ShiftPressed) { if (!Beatmap.IsDefault) delete(Beatmap.Value.BeatmapSetInfo); return true; } break; } return base.OnKeyDown(e); } private class VerticalMaskingContainer : Container { private const float panel_overflow = 1.2f; protected override Container<Drawable> Content { get; } public VerticalMaskingContainer() { RelativeSizeAxes = Axes.Both; Masking = true; Anchor = Anchor.Centre; Origin = Anchor.Centre; Width = panel_overflow; // avoid horizontal masking so the panels don't clip when screen stack is pushed. InternalChild = Content = new Container { RelativeSizeAxes = Axes.Both, Anchor = Anchor.Centre, Origin = Anchor.Centre, Width = 1 / panel_overflow, }; } } private class ResetScrollContainer : Container { private readonly Action onHoverAction; public ResetScrollContainer(Action onHoverAction) { this.onHoverAction = onHoverAction; } protected override bool OnHover(HoverEvent e) { onHoverAction?.Invoke(); return base.OnHover(e); } } } }
using System.Globalization; using System.Reflection; namespace System { /// <summary> /// Base class for all <see cref="Type"/> that are exposed in SharpLang runtime. /// </summary> abstract class SharpLangType : RuntimeType, ISharpLangGenericContext { unsafe internal protected SharpLangEEType* EEType; unsafe public SharpLangType(SharpLangEEType* eeType) { this.EEType = eeType; } public override object InvokeMember(string name, BindingFlags invokeAttr, Binder binder, object target, object[] args, ParameterModifier[] modifiers, CultureInfo culture, string[] namedParameters) { throw new NotImplementedException(); } public override ConstructorInfo[] GetConstructors(BindingFlags bindingAttr) { throw new NotImplementedException(); } protected override bool IsPrimitiveImpl() { throw new NotImplementedException(); } protected override bool IsPointerImpl() { return false; } protected override bool IsCOMObjectImpl() { throw new NotImplementedException(); } protected override bool IsByRefImpl() { return false; } protected override bool IsArrayImpl() { return false; } protected override bool HasElementTypeImpl() { return false; } protected override TypeAttributes GetAttributeFlagsImpl() { throw new NotImplementedException(); } protected override ConstructorInfo GetConstructorImpl(BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers) { throw new NotImplementedException(); } protected override PropertyInfo GetPropertyImpl(string name, BindingFlags bindingAttr, Binder binder, Type returnType, Type[] types, ParameterModifier[] modifiers) { throw new NotImplementedException(); } public override PropertyInfo[] GetProperties(BindingFlags bindingAttr) { throw new NotImplementedException(); } public override Type[] GetNestedTypes(BindingFlags bindingAttr) { throw new NotImplementedException(); } public override Type GetNestedType(string name, BindingFlags bindingAttr) { throw new NotImplementedException(); } public override MethodInfo[] GetMethods(BindingFlags bindingAttr) { throw new NotImplementedException(); } protected override MethodInfo GetMethodImpl(string name, BindingFlags bindingAttr, Binder binder, CallingConventions callConvention, Type[] types, ParameterModifier[] modifiers) { throw new NotImplementedException(); } public override MemberInfo[] GetMembers(BindingFlags bindingAttr) { throw new NotImplementedException(); } public override FieldInfo[] GetFields(BindingFlags bindingAttr) { throw new NotImplementedException(); } public override FieldInfo GetField(string name, BindingFlags bindingAttr) { throw new NotImplementedException(); } public override EventInfo[] GetEvents(BindingFlags bindingAttr) { throw new NotImplementedException(); } public override EventInfo GetEvent(string name, BindingFlags bindingAttr) { throw new NotImplementedException(); } public override Type GetElementType() { return null; } public override Type[] GetInterfaces() { throw new NotImplementedException(); } public override Type GetInterface(string name, bool ignoreCase) { throw new NotImplementedException(); } public override Type UnderlyingSystemType { get { return this; } } public override Module Module { get { throw new NotImplementedException(); } } public override Guid GUID { get { throw new NotImplementedException(); } } public override string FullName { get { var name = Name; // Is it embedded in another type? var declaringType = DeclaringType; if (declaringType != null) return declaringType.FullName + "+" + name; // Append namespace var @namespace = Namespace; if (@namespace != null) return @namespace + "." + name; return name; } } public override Type BaseType { get { return GetBaseType(); } } public unsafe virtual SharpLangType GetBaseType() { // TODO: Interface should return null too if (EEType->Base == null) return null; return SharpLangModule.ResolveType(EEType->Base); } public override bool IsGenericParameter { get { return false; } } public override bool IsGenericType { get { return false; } } public override bool IsGenericTypeDefinition { get { return false; } } public override string AssemblyQualifiedName { get { var fullName = FullName; if (fullName == null) return null; return fullName + ", " + InternalAssemblyName; } } internal virtual string InternalAssemblyName { get { return null; } } public override Assembly Assembly { get { throw new NotImplementedException(); } } public override object[] GetCustomAttributes(Type attributeType, bool inherit) { throw new NotImplementedException(); } public override object[] GetCustomAttributes(bool inherit) { throw new NotImplementedException(); } public override bool IsDefined(Type attributeType, bool inherit) { throw new NotImplementedException(); } public unsafe override Type MakePointerType() { return SharpLangModule.ResolveElementType(null, this, SharpLangEEType.Kind.Pointer); } public unsafe override Type MakeByRefType() { return SharpLangModule.ResolveElementType(null, this, SharpLangEEType.Kind.ByRef); } public unsafe override Type MakeArrayType() { return SharpLangModule.ResolveElementType(null, this, SharpLangEEType.Kind.Array); } } }
// // System.Web.UI.WebControls.ObjectDataSource // // Authors: // Lluis Sanchez Gual (lluis@novell.com) // // (C) 2005 Novell, Inc. (http://www.novell.com) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // #if NET_2_0 using System.Collections; using System.Collections.Specialized; using System.ComponentModel; using System.IO; using System.Security.Permissions; namespace System.Web.UI.WebControls { [DefaultEventAttribute ("Selecting")] [DefaultPropertyAttribute ("TypeName")] [DesignerAttribute ("System.Web.UI.Design.WebControls.ObjectDataSourceDesigner, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.ComponentModel.Design.IDesigner")] [ParseChildrenAttribute (true)] [PersistChildrenAttribute (false)] [AspNetHostingPermissionAttribute (SecurityAction.LinkDemand, Level = AspNetHostingPermissionLevel.Minimal)] [AspNetHostingPermissionAttribute (SecurityAction.InheritanceDemand, Level = AspNetHostingPermissionLevel.Minimal)] public class ObjectDataSource : DataSourceControl { ObjectDataSourceView defaultView; public ObjectDataSource () { } public ObjectDataSource (string typeName, string selectMethod) { SelectMethod = selectMethod; TypeName = typeName; } ObjectDataSourceView DefaultView { get { if (defaultView == null) defaultView = new ObjectDataSourceView (this, "DefaultView", Context); return defaultView; } } public event ObjectDataSourceStatusEventHandler Deleted { add { DefaultView.Deleted += value; } remove { DefaultView.Deleted -= value; } } public event ObjectDataSourceMethodEventHandler Deleting { add { DefaultView.Deleting += value; } remove { DefaultView.Deleting -= value; } } public event ObjectDataSourceFilteringEventHandler Filtering { add { DefaultView.Filtering += value; } remove { DefaultView.Filtering -= value; } } public event ObjectDataSourceStatusEventHandler Inserted { add { DefaultView.Inserted += value; } remove { DefaultView.Inserted -= value; } } public event ObjectDataSourceMethodEventHandler Inserting { add { DefaultView.Inserting += value; } remove { DefaultView.Inserting -= value; } } public event ObjectDataSourceObjectEventHandler ObjectCreated { add { DefaultView.ObjectCreated += value; } remove { DefaultView.ObjectCreated -= value; } } public event ObjectDataSourceObjectEventHandler ObjectCreating { add { DefaultView.ObjectCreating += value; } remove { DefaultView.ObjectCreating -= value; } } public event ObjectDataSourceDisposingEventHandler ObjectDisposing { add { DefaultView.ObjectDisposing += value; } remove { DefaultView.ObjectDisposing -= value; } } /* public event ObjectDataSourceResolvingMethodEventHandler ResolvingMethod { add { DefaultView.ResolvingMethod += value; } remove { DefaultView.ResolvingMethod -= value; } } */ public event ObjectDataSourceStatusEventHandler Selected { add { DefaultView.Selected += value; } remove { DefaultView.Selected -= value; } } public event ObjectDataSourceSelectingEventHandler Selecting { add { DefaultView.Selecting += value; } remove { DefaultView.Selecting -= value; } } public event ObjectDataSourceStatusEventHandler Updated { add { DefaultView.Updated += value; } remove { DefaultView.Updated -= value; } } public event ObjectDataSourceMethodEventHandler Updating { add { DefaultView.Updating += value; } remove { DefaultView.Updating -= value; } } [WebCategoryAttribute ("Data")] [DefaultValueAttribute (ConflictOptions.OverwriteChanges)] public ConflictOptions ConflictDetection { get { return DefaultView.ConflictDetection; } set { DefaultView.ConflictDetection = value; } } [WebCategoryAttribute ("Data")] [DefaultValueAttribute ("")] public string DataObjectTypeName { get { return DefaultView.DataObjectTypeName; } set { DefaultView.DataObjectTypeName = value; } } [WebCategoryAttribute ("Data")] [DefaultValueAttribute ("")] public string DeleteMethod { get { return DefaultView.DeleteMethod; } set { DefaultView.DeleteMethod = value; } } [WebCategoryAttribute ("Data")] [MergablePropertyAttribute (false)] [EditorAttribute ("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] [DefaultValueAttribute (null)] [PersistenceModeAttribute (PersistenceMode.InnerProperty)] public ParameterCollection DeleteParameters { get { return DefaultView.DeleteParameters; } } [WebCategoryAttribute ("Paging")] [DefaultValueAttribute (false)] public virtual bool EnablePaging { get { return DefaultView.EnablePaging; } set { DefaultView.EnablePaging = value; } } [WebCategoryAttribute ("Data")] [DefaultValueAttribute ("")] public string FilterExpression { get { return DefaultView.FilterExpression; } set { DefaultView.FilterExpression = value; } } [WebCategoryAttribute ("Data")] [MergablePropertyAttribute (false)] [EditorAttribute ("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] [DefaultValueAttribute (null)] [PersistenceModeAttribute (PersistenceMode.InnerProperty)] public ParameterCollection FilterParameters { get { return DefaultView.FilterParameters; } } [DefaultValueAttribute ("")] [WebCategoryAttribute ("Data")] public virtual string InsertMethod { get { return DefaultView.InsertMethod; } set { DefaultView.InsertMethod = value; } } [WebCategoryAttribute ("Data")] [MergablePropertyAttribute (false)] [EditorAttribute ("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] [DefaultValueAttribute (null)] [PersistenceModeAttribute (PersistenceMode.InnerProperty)] public ParameterCollection InsertParameters { get { return DefaultView.InsertParameters; } } [WebCategoryAttribute ("Paging")] [DefaultValueAttribute ("maximumRows")] public string MaximumRowsParameterName { get { return DefaultView.MaximumRowsParameterName; } set { DefaultView.MaximumRowsParameterName = value; } } [WebCategoryAttribute ("Data")] [DefaultValueAttribute ("original_{0}")] public string OldValuesParameterFormatString { get { return DefaultView.OldValuesParameterFormatString; } set { DefaultView.OldValuesParameterFormatString = value; } } [WebCategoryAttribute ("Paging")] [DefaultValueAttribute ("")] public virtual string SelectCountMethod { get { return DefaultView.SelectCountMethod; } set { DefaultView.SelectCountMethod = value; } } [DefaultValueAttribute ("")] [WebCategoryAttribute ("Data")] public virtual string SelectMethod { get { return DefaultView.SelectMethod; } set { DefaultView.SelectMethod = value; } } [WebCategoryAttribute ("Data")] [MergablePropertyAttribute (false)] [EditorAttribute ("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] [DefaultValueAttribute (null)] [PersistenceModeAttribute (PersistenceMode.InnerProperty)] public ParameterCollection SelectParameters { get { return DefaultView.SelectParameters; } } [DefaultValueAttribute ("")] [WebCategoryAttribute ("Data")] public string SortParameterName { get { return DefaultView.SortParameterName; } set { DefaultView.SortParameterName = value; } } [WebCategoryAttribute ("Paging")] [DefaultValueAttribute ("startRowIndex")] public string StartRowIndexParameterName { get { return DefaultView.StartRowIndexParameterName; } set { DefaultView.StartRowIndexParameterName = value; } } [DefaultValueAttribute ("")] [WebCategoryAttribute ("Data")] public virtual string TypeName { get { return DefaultView.TypeName; } set { DefaultView.TypeName = value; } } [DefaultValueAttribute ("")] [WebCategoryAttribute ("Data")] public virtual string UpdateMethod { get { return DefaultView.UpdateMethod; } set { DefaultView.UpdateMethod = value; } } [WebCategoryAttribute ("Data")] [MergablePropertyAttribute (false)] [EditorAttribute ("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] [DefaultValueAttribute (null)] [PersistenceModeAttribute (PersistenceMode.InnerProperty)] public ParameterCollection UpdateParameters { get { return DefaultView.UpdateParameters; } } protected override DataSourceView GetView (string viewName) { return DefaultView; } protected override ICollection GetViewNames () { return new string [] { "DefaultView" }; } public IEnumerable Select () { return DefaultView.Select (DataSourceSelectArguments.Empty); } public int Update () { Hashtable empty = new Hashtable (); return DefaultView.Update (empty, empty, null); } public int Delete () { Hashtable empty = new Hashtable (); return DefaultView.Delete (empty, null); } public int Insert () { Hashtable empty = new Hashtable (); return DefaultView.Insert (empty); } protected override void OnInit (EventArgs e) { Page.LoadComplete += OnPageLoadComplete; } void OnPageLoadComplete (object sender, EventArgs e) { DeleteParameters.UpdateValues (Context, this); FilterParameters.UpdateValues (Context, this); InsertParameters.UpdateValues (Context, this); SelectParameters.UpdateValues (Context, this); UpdateParameters.UpdateValues (Context, this); } protected override void LoadViewState (object savedState) { if (savedState == null) { base.LoadViewState (null); ((IStateManager)DefaultView).LoadViewState (null); } else { Pair p = (Pair) savedState; base.LoadViewState (p.First); ((IStateManager)DefaultView).LoadViewState (p.Second); } } protected override object SaveViewState() { object baseState = base.SaveViewState (); object viewState = ((IStateManager)DefaultView).SaveViewState (); if (baseState != null || viewState != null) return new Pair (baseState, viewState); else return null; } protected override void TrackViewState() { ((IStateManager)DefaultView).TrackViewState (); } } } #endif
using XenAdmin.Commands; using XenAPI; namespace XenAdmin.TabPages { partial class SrStoragePage { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(SrStoragePage)); this.contextMenuStrip1 = new System.Windows.Forms.ContextMenuStrip(this.components); this.rescanToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.addToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.moveVirtualDiskToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.deleteVirtualDiskToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator(); this.editVirtualDiskToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.TitleLabel = new System.Windows.Forms.Label(); this.RemoveButtonContainer = new XenAdmin.Controls.ToolTipContainer(); this.RemoveButton = new System.Windows.Forms.Button(); this.EditButtonContainer = new XenAdmin.Controls.ToolTipContainer(); this.EditButton = new System.Windows.Forms.Button(); this.addVirtualDiskButton = new System.Windows.Forms.Button(); this.label1 = new System.Windows.Forms.Label(); this.flowLayoutPanel1 = new System.Windows.Forms.FlowLayoutPanel(); this.toolTipContainerRescan = new XenAdmin.Controls.ToolTipContainer(); this.buttonRescan = new System.Windows.Forms.Button(); this.toolTipContainerMove = new XenAdmin.Controls.ToolTipContainer(); this.buttonMove = new System.Windows.Forms.Button(); this.dataGridViewVDIs = new XenAdmin.Controls.DataGridViewEx.DataGridViewEx(); this.ColumnName = new System.Windows.Forms.DataGridViewTextBoxColumn(); this.ColumnVolume = new System.Windows.Forms.DataGridViewTextBoxColumn(); this.ColumnDesc = new System.Windows.Forms.DataGridViewTextBoxColumn(); this.ColumnSize = new System.Windows.Forms.DataGridViewTextBoxColumn(); this.ColumnVM = new System.Windows.Forms.DataGridViewTextBoxColumn(); this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel(); this.pageContainerPanel.SuspendLayout(); this.contextMenuStrip1.SuspendLayout(); this.RemoveButtonContainer.SuspendLayout(); this.EditButtonContainer.SuspendLayout(); this.flowLayoutPanel1.SuspendLayout(); this.toolTipContainerRescan.SuspendLayout(); this.toolTipContainerMove.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.dataGridViewVDIs)).BeginInit(); this.tableLayoutPanel1.SuspendLayout(); this.SuspendLayout(); // // pageContainerPanel // this.pageContainerPanel.Controls.Add(this.tableLayoutPanel1); resources.ApplyResources(this.pageContainerPanel, "pageContainerPanel"); // // contextMenuStrip1 // this.contextMenuStrip1.ImageScalingSize = new System.Drawing.Size(20, 20); this.contextMenuStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.rescanToolStripMenuItem, this.addToolStripMenuItem, this.moveVirtualDiskToolStripMenuItem, this.deleteVirtualDiskToolStripMenuItem, this.toolStripSeparator1, this.editVirtualDiskToolStripMenuItem}); this.contextMenuStrip1.Name = "contextMenuStrip1"; resources.ApplyResources(this.contextMenuStrip1, "contextMenuStrip1"); this.contextMenuStrip1.Opening += new System.ComponentModel.CancelEventHandler(this.contextMenuStrip_Opening); // // rescanToolStripMenuItem // this.rescanToolStripMenuItem.Name = "rescanToolStripMenuItem"; resources.ApplyResources(this.rescanToolStripMenuItem, "rescanToolStripMenuItem"); this.rescanToolStripMenuItem.Click += new System.EventHandler(this.rescanToolStripMenuItem_Click); // // addToolStripMenuItem // this.addToolStripMenuItem.Name = "addToolStripMenuItem"; resources.ApplyResources(this.addToolStripMenuItem, "addToolStripMenuItem"); this.addToolStripMenuItem.Click += new System.EventHandler(this.addToolStripMenuItem_Click); // // moveVirtualDiskToolStripMenuItem // this.moveVirtualDiskToolStripMenuItem.Name = "moveVirtualDiskToolStripMenuItem"; resources.ApplyResources(this.moveVirtualDiskToolStripMenuItem, "moveVirtualDiskToolStripMenuItem"); this.moveVirtualDiskToolStripMenuItem.Click += new System.EventHandler(this.moveVirtualDiskToolStripMenuItem_Click); // // deleteVirtualDiskToolStripMenuItem // this.deleteVirtualDiskToolStripMenuItem.Name = "deleteVirtualDiskToolStripMenuItem"; resources.ApplyResources(this.deleteVirtualDiskToolStripMenuItem, "deleteVirtualDiskToolStripMenuItem"); this.deleteVirtualDiskToolStripMenuItem.Click += new System.EventHandler(this.deleteVirtualDiskToolStripMenuItem_Click); // // toolStripSeparator1 // this.toolStripSeparator1.Name = "toolStripSeparator1"; resources.ApplyResources(this.toolStripSeparator1, "toolStripSeparator1"); // // editVirtualDiskToolStripMenuItem // this.editVirtualDiskToolStripMenuItem.Image = global::XenAdmin.Properties.Resources.edit_16; resources.ApplyResources(this.editVirtualDiskToolStripMenuItem, "editVirtualDiskToolStripMenuItem"); this.editVirtualDiskToolStripMenuItem.Name = "editVirtualDiskToolStripMenuItem"; this.editVirtualDiskToolStripMenuItem.Click += new System.EventHandler(this.editVirtualDiskToolStripMenuItem_Click); // // TitleLabel // resources.ApplyResources(this.TitleLabel, "TitleLabel"); this.TitleLabel.ForeColor = System.Drawing.Color.White; this.TitleLabel.Name = "TitleLabel"; // // RemoveButtonContainer // this.RemoveButtonContainer.Controls.Add(this.RemoveButton); resources.ApplyResources(this.RemoveButtonContainer, "RemoveButtonContainer"); this.RemoveButtonContainer.Name = "RemoveButtonContainer"; // // RemoveButton // resources.ApplyResources(this.RemoveButton, "RemoveButton"); this.RemoveButton.Name = "RemoveButton"; this.RemoveButton.UseVisualStyleBackColor = true; this.RemoveButton.Click += new System.EventHandler(this.RemoveButton_Click); // // EditButtonContainer // resources.ApplyResources(this.EditButtonContainer, "EditButtonContainer"); this.EditButtonContainer.Controls.Add(this.EditButton); this.EditButtonContainer.Name = "EditButtonContainer"; // // EditButton // resources.ApplyResources(this.EditButton, "EditButton"); this.EditButton.Name = "EditButton"; this.EditButton.UseVisualStyleBackColor = true; this.EditButton.Click += new System.EventHandler(this.EditButton_Click); // // addVirtualDiskButton // resources.ApplyResources(this.addVirtualDiskButton, "addVirtualDiskButton"); this.addVirtualDiskButton.Name = "addVirtualDiskButton"; this.addVirtualDiskButton.UseVisualStyleBackColor = true; this.addVirtualDiskButton.Click += new System.EventHandler(this.addVirtualDiskButton_Click); // // label1 // resources.ApplyResources(this.label1, "label1"); this.label1.Name = "label1"; // // flowLayoutPanel1 // this.flowLayoutPanel1.Controls.Add(this.toolTipContainerRescan); this.flowLayoutPanel1.Controls.Add(this.addVirtualDiskButton); this.flowLayoutPanel1.Controls.Add(this.EditButtonContainer); this.flowLayoutPanel1.Controls.Add(this.toolTipContainerMove); this.flowLayoutPanel1.Controls.Add(this.RemoveButtonContainer); resources.ApplyResources(this.flowLayoutPanel1, "flowLayoutPanel1"); this.flowLayoutPanel1.Name = "flowLayoutPanel1"; // // toolTipContainerRescan // this.toolTipContainerRescan.Controls.Add(this.buttonRescan); resources.ApplyResources(this.toolTipContainerRescan, "toolTipContainerRescan"); this.toolTipContainerRescan.Name = "toolTipContainerRescan"; // // buttonRescan // resources.ApplyResources(this.buttonRescan, "buttonRescan"); this.buttonRescan.Name = "buttonRescan"; this.buttonRescan.UseVisualStyleBackColor = true; this.buttonRescan.Click += new System.EventHandler(this.buttonRescan_Click); // // toolTipContainerMove // this.toolTipContainerMove.Controls.Add(this.buttonMove); resources.ApplyResources(this.toolTipContainerMove, "toolTipContainerMove"); this.toolTipContainerMove.Name = "toolTipContainerMove"; // // buttonMove // resources.ApplyResources(this.buttonMove, "buttonMove"); this.buttonMove.Name = "buttonMove"; this.buttonMove.UseVisualStyleBackColor = true; this.buttonMove.Click += new System.EventHandler(this.buttonMove_Click); // // dataGridViewVDIs // this.dataGridViewVDIs.AutoSizeColumnsMode = System.Windows.Forms.DataGridViewAutoSizeColumnsMode.AllCells; this.dataGridViewVDIs.BackgroundColor = System.Drawing.SystemColors.Window; this.dataGridViewVDIs.CellBorderStyle = System.Windows.Forms.DataGridViewCellBorderStyle.None; this.dataGridViewVDIs.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; this.dataGridViewVDIs.Columns.AddRange(new System.Windows.Forms.DataGridViewColumn[] { this.ColumnName, this.ColumnVolume, this.ColumnDesc, this.ColumnSize, this.ColumnVM}); resources.ApplyResources(this.dataGridViewVDIs, "dataGridViewVDIs"); this.dataGridViewVDIs.MultiSelect = true; this.dataGridViewVDIs.Name = "dataGridViewVDIs"; this.dataGridViewVDIs.ReadOnly = true; this.dataGridViewVDIs.SelectionChanged += new System.EventHandler(this.dataGridViewVDIs_SelectedIndexChanged); this.dataGridViewVDIs.SortCompare += new System.Windows.Forms.DataGridViewSortCompareEventHandler(this.DataGridViewObject_SortCompare); this.dataGridViewVDIs.KeyUp += new System.Windows.Forms.KeyEventHandler(this.dataGridViewVDIs_KeyUp); this.dataGridViewVDIs.MouseUp += new System.Windows.Forms.MouseEventHandler(this.dataGridViewVDIs_MouseUp); // // ColumnName // this.ColumnName.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.Fill; resources.ApplyResources(this.ColumnName, "ColumnName"); this.ColumnName.Name = "ColumnName"; this.ColumnName.ReadOnly = true; // // ColumnVolume // this.ColumnVolume.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.Fill; resources.ApplyResources(this.ColumnVolume, "ColumnVolume"); this.ColumnVolume.Name = "ColumnVolume"; this.ColumnVolume.ReadOnly = true; // // ColumnDesc // this.ColumnDesc.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.Fill; resources.ApplyResources(this.ColumnDesc, "ColumnDesc"); this.ColumnDesc.Name = "ColumnDesc"; this.ColumnDesc.ReadOnly = true; // // ColumnSize // this.ColumnSize.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.AllCells; resources.ApplyResources(this.ColumnSize, "ColumnSize"); this.ColumnSize.Name = "ColumnSize"; this.ColumnSize.ReadOnly = true; // // ColumnVM // this.ColumnVM.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.Fill; resources.ApplyResources(this.ColumnVM, "ColumnVM"); this.ColumnVM.Name = "ColumnVM"; this.ColumnVM.ReadOnly = true; // // tableLayoutPanel1 // resources.ApplyResources(this.tableLayoutPanel1, "tableLayoutPanel1"); this.tableLayoutPanel1.Controls.Add(this.flowLayoutPanel1, 0, 2); this.tableLayoutPanel1.Controls.Add(this.dataGridViewVDIs, 0, 1); this.tableLayoutPanel1.Controls.Add(this.label1, 0, 0); this.tableLayoutPanel1.Name = "tableLayoutPanel1"; // // SrStoragePage // resources.ApplyResources(this, "$this"); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi; this.BackColor = System.Drawing.Color.Transparent; this.DoubleBuffered = true; this.Name = "SrStoragePage"; this.Controls.SetChildIndex(this.pageContainerPanel, 0); this.pageContainerPanel.ResumeLayout(false); this.contextMenuStrip1.ResumeLayout(false); this.RemoveButtonContainer.ResumeLayout(false); this.EditButtonContainer.ResumeLayout(false); this.flowLayoutPanel1.ResumeLayout(false); this.toolTipContainerRescan.ResumeLayout(false); this.toolTipContainerMove.ResumeLayout(false); ((System.ComponentModel.ISupportInitialize)(this.dataGridViewVDIs)).EndInit(); this.tableLayoutPanel1.ResumeLayout(false); this.tableLayoutPanel1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.ContextMenuStrip contextMenuStrip1; private System.Windows.Forms.ToolStripMenuItem editVirtualDiskToolStripMenuItem; private System.Windows.Forms.ToolStripMenuItem deleteVirtualDiskToolStripMenuItem; private System.Windows.Forms.Button addVirtualDiskButton; private System.Windows.Forms.Label TitleLabel; private XenAdmin.Controls.ToolTipContainer EditButtonContainer; private System.Windows.Forms.Button EditButton; private XenAdmin.Controls.ToolTipContainer RemoveButtonContainer; private System.Windows.Forms.Button RemoveButton; private System.Windows.Forms.Label label1; private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel1; private System.Windows.Forms.Button buttonRescan; private XenAdmin.Controls.ToolTipContainer toolTipContainerRescan; private XenAdmin.Controls.ToolTipContainer toolTipContainerMove; private System.Windows.Forms.Button buttonMove; private System.Windows.Forms.ToolStripMenuItem moveVirtualDiskToolStripMenuItem; private System.Windows.Forms.ToolStripSeparator toolStripSeparator1; private XenAdmin.Controls.DataGridViewEx.DataGridViewEx dataGridViewVDIs; private System.Windows.Forms.DataGridViewTextBoxColumn ColumnName; private System.Windows.Forms.DataGridViewTextBoxColumn ColumnVolume; private System.Windows.Forms.DataGridViewTextBoxColumn ColumnDesc; private System.Windows.Forms.DataGridViewTextBoxColumn ColumnSize; private System.Windows.Forms.DataGridViewTextBoxColumn ColumnVM; private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1; private System.Windows.Forms.ToolStripMenuItem rescanToolStripMenuItem; private System.Windows.Forms.ToolStripMenuItem addToolStripMenuItem; } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Runtime.CompilerServices; using System.IO; using System.Collections; using System.Globalization; using System.Text; using System.Threading; using Xunit; public class FileInfo_Open_fm_fa_fs { public static String s_strDtTmVer = "2000/05/08 12:18"; public static String s_strClassMethod = "File.OpenText(String)"; public static String s_strTFName = "Open_fm_fa_fs.cs"; public static String s_strTFPath = "FileInfo"; private delegate void ExceptionCode(); private static bool s_pass = true; [Fact] public static void runTest() { String strLoc = "Loc_000oo"; String strValue = String.Empty; int iCountErrors = 0; int iCountTestcases = 0; try { ///////////////////////// START TESTS //////////////////////////// /////////////////////////////////////////////////////////////////// String filName = Path.Combine(TestInfo.CurrentDirectory, Path.GetRandomFileName()); FileInfo fil2; Stream fs2, fs3; if (File.Exists(filName)) File.Delete(filName); // [] FileSharing.None -- Should not be able to access the file //----------------------------------------------------------------- strLoc = "Loc_2498V"; fil2 = new FileInfo(filName); fs2 = fil2.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None); iCountTestcases++; try { fs3 = fil2.Open(FileMode.Open); iCountErrors++; printerr("Error_209uv! Shouldn't be able to open an open file"); fs3.Dispose(); #if TEST_WINRT } catch (UnauthorizedAccessException) { #endif } catch (IOException) { } catch (Exception exc) { iCountErrors++; printerr("Error_287gv! Incorrect exception thrown, exc==" + exc.ToString()); } fs2.Dispose(); fil2.Delete(); //----------------------------------------------------------------- // [] FileSharing.Read //----------------------------------------------------------------- strLoc = "Loc_f5498"; fil2 = new FileInfo(filName); fs2 = fil2.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.Read); fs2.Write(new Byte[] { 10 }, 0, 1); fs2.Flush(); fs2.Dispose(); // FileShare is not supported by WINRT, sharing is controlled by access. // It allows concurrent readers, and write after read, but not vice-versa. // Reopen file as only Read to test concurrent read behavior. fs2 = fil2.Open(FileMode.Open, FileAccess.Read, FileShare.Read); iCountTestcases++; fs3 = fil2.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite); fs3.Read(new Byte[1], 0, 1); try { fs3.Write(new Byte[] { 10 }, 0, 1); iCountErrors++; printerr("Error_958vc! Expected exception not thrown"); } catch (NotSupportedException) { } catch (Exception exc) { iCountErrors++; printerr("Error_20939! Incorrect exception thrown, exc==" + exc.ToString()); } fs2.Dispose(); fs3.Dispose(); fil2.Delete(); //----------------------------------------------------------------- // [] FileSharing.Write //----------------------------------------------------------------- strLoc = "Loc_2498x"; fil2 = new FileInfo(filName); fs2 = fil2.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.Write); iCountTestcases++; try { fs3 = fil2.Open(FileMode.Open, FileAccess.Write, FileShare.ReadWrite); fs3.Write(new Byte[] { 1, 2 }, 0, 2); #if TEST_WINRT // WinRT's sharing model does not support concurrent write printerr( "Error_209uv! Shouldn't be able to open concurrent writers"); } catch (UnauthorizedAccessException) { #endif } catch (Exception exc) { iCountErrors++; printerr("Error_2980x! Unexpected exception thrown, exc==" + exc.ToString()); } fs2.Dispose(); fs3.Dispose(); fil2.Delete(); //----------------------------------------------------------------- // [] FileSharing.ReadWrite //----------------------------------------------------------------- strLoc = "Loc_4897y"; fil2 = new FileInfo(filName); fs2 = fil2.Open(FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite); iCountTestcases++; try { fs3 = fil2.Open(FileMode.Open, FileAccess.Write, FileShare.ReadWrite); #if TEST_WINRT // WinRT's sharing model does not support concurrent write printerr( "Error_209uv! Shouldn't be able to open concurrent writers"); #endif fs2.Write(new Byte[] { 1 }, 0, 1); fs3.Dispose(); fs3 = fil2.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite); fs2.Write(new Byte[] { 2 }, 0, 1); fs3.Dispose(); fs3 = fil2.Open(FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); fs2.Write(new Byte[] { 3 }, 0, 1); fs3.Dispose(); #if TEST_WINRT // WinRT's sharing model does not support concurrent write } catch (UnauthorizedAccessException) { #endif } catch (Exception exc) { iCountErrors++; printerr("Error_287g9! Unexpected exception thrown, exc==" + exc.ToString()); } fs2.Dispose(); fs3.Dispose(); fil2.Delete(); //----------------------------------------------------------------- const String sourceString = "This is the source File"; //First we look at the drives of this machine to be sure that we can proceed String sourceFileName = Path.Combine(TestInfo.CurrentDirectory, Path.GetRandomFileName()); //Scenario 1: Vanilla - create a filestream with this flag in FileShare parameter and then delete the file (using File.Delete or FileInfo.Delete) before closing the //FileStream. Ensure that the delete operation succeeds but the file is not deleted till the FileStream is closed try { Byte[] outbits = Encoding.Unicode.GetBytes(sourceString); FileInfo file = new FileInfo(sourceFileName); FileStream stream = file.Open(FileMode.Create, FileAccess.Write, FileShare.Delete); stream.Write(outbits, 0, outbits.Length); //This should succeed File.Delete(sourceFileName); if (Interop.IsWindows) // Unix allows files to be deleted while in-use { //But we shold still be able to call the file Eval(File.Exists(sourceFileName), "Err_3947sg! File doesn't exists"); } stream.Write(outbits, 0, outbits.Length); stream.Dispose(); //Now it shouldn't exist - is there any OS delay Eval(!File.Exists(sourceFileName), "Err_2397g! File doesn't exists"); } catch (Exception ex) { s_pass = false; Console.WriteLine("Err_349t7g! Exception caught in scenario: {0}", ex); } if (!s_pass) iCountErrors++; /////////////////////////////////////////////////////////////////// /////////////////////////// END TESTS ///////////////////////////// if (File.Exists(filName)) File.Delete(filName); } catch (Exception exc_general) { ++iCountErrors; Console.WriteLine("Error Err_8888yyy! strLoc==" + strLoc + ", exc_general==" + exc_general.ToString()); } //// Finish Diagnostics if (iCountErrors != 0) { Console.WriteLine("FAiL! " + s_strTFName + " ,iCountErrors==" + iCountErrors.ToString()); } Assert.Equal(0, iCountErrors); } //Checks for error private static void Eval(bool expression, String msg, params Object[] values) { Eval(expression, String.Format(msg, values)); } private static void Eval(bool expression, String msg) { if (!expression) { s_pass = false; Console.WriteLine(msg); } } //Checks for a particular type of exception private static void CheckException<E>(ExceptionCode test, string error) { CheckException<E>(test, error, null); } //Checks for a particular type of exception and an Exception msg in the English locale private static void CheckException<E>(ExceptionCode test, string error, String msgExpected) { bool exception = false; try { test(); error = String.Format("{0} Exception NOT thrown ", error); } catch (Exception e) { if (e.GetType() == typeof(E)) { exception = true; if (msgExpected != null && System.Globalization.CultureInfo.CurrentUICulture.Name == "en-US" && e.Message != msgExpected) { exception = false; error = String.Format("{0} Message Different: <{1}>", error, e.Message); } } else error = String.Format("{0} Exception type: {1}", error, e.GetType().Name); } Eval(exception, error); } public static void printerr(String err, [CallerMemberName] string memberName = "", [CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0) { Console.WriteLine("ERROR: ({0}, {1}, {2}) {3}", memberName, filePath, lineNumber, err); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // This RegexRunner class is a base class for compiled regex code. // Implementation notes: // It provides the driver code that call's the subclass's Go() // method for either scanning or direct execution. // // It also maintains memory allocation for the backtracking stack, // the grouping stack and the longjump crawlstack, and provides // methods to push new subpattern match results into (or remove // backtracked results from) the Match instance. using System.Diagnostics; using System.Globalization; namespace System.Text.RegularExpressions { abstract internal class RegexRunner { protected int _runtextbeg; // beginning of text to search protected int _runtextend; // end of text to search protected int _runtextstart; // starting point for search protected String _runtext; // text to search protected int _runtextpos; // current position in text protected int[] _runtrack; // The backtracking stack. Opcodes use this to store data regarding protected int _runtrackpos; // what they have matched and where to backtrack to. Each "frame" on // the stack takes the form of [CodePosition Data1 Data2...], where // CodePosition is the position of the current opcode and // the data values are all optional. The CodePosition can be negative, and // these values (also called "back2") are used by the BranchMark family of opcodes // to indicate whether they are backtracking after a successful or failed // match. // When we backtrack, we pop the CodePosition off the stack, set the current // instruction pointer to that code position, and mark the opcode // with a backtracking flag ("Back"). Each opcode then knows how to // handle its own data. protected int[] _runstack; // This stack is used to track text positions across different opcodes. protected int _runstackpos; // For example, in /(a*b)+/, the parentheses result in a SetMark/CaptureMark // pair. SetMark records the text position before we match a*b. Then // CaptureMark uses that position to figure out where the capture starts. // Opcodes which push onto this stack are always paired with other opcodes // which will pop the value from it later. A successful match should mean // that this stack is empty. protected int[] _runcrawl; // The crawl stack is used to keep track of captures. Every time a group protected int _runcrawlpos; // has a capture, we push its group number onto the runcrawl stack. In // the case of a balanced match, we push BOTH groups onto the stack. protected int _runtrackcount; // count of states that may do backtracking protected Match _runmatch; // result object protected Regex _runregex; // regex object private Int32 _timeout; // timeout in milliseconds (needed for actual) private bool _ignoreTimeout; private Int32 _timeoutOccursAt; // We have determined this value in a series of experiments where x86 retail // builds (ono-lab-optimized) were run on different pattern/input pairs. Larger values // of TimeoutCheckFrequency did not tend to increase performance; smaller values // of TimeoutCheckFrequency tended to slow down the execution. private const int TimeoutCheckFrequency = 1000; private int _timeoutChecksToSkip; protected internal RegexRunner() { } /// <summary> /// Scans the string to find the first match. Uses the Match object /// both to feed text in and as a place to store matches that come out. /// /// All the action is in the abstract Go() method defined by subclasses. Our /// responsibility is to load up the class members (as done here) before /// calling Go. /// /// The optimizer can compute a set of candidate starting characters, /// and we could use a separate method Skip() that will quickly scan past /// any characters that we know can't match. /// </summary> protected internal Match Scan(Regex regex, String text, int textbeg, int textend, int textstart, int prevlen, bool quick) { return Scan(regex, text, textbeg, textend, textstart, prevlen, quick, regex.MatchTimeout); } internal Match Scan(Regex regex, String text, int textbeg, int textend, int textstart, int prevlen, bool quick, TimeSpan timeout) { int bump; int stoppos; bool initted = false; // We need to re-validate timeout here because Scan is historically protected and // thus there is a possibility it is called from user code: Regex.ValidateMatchTimeout(timeout); _ignoreTimeout = (Regex.InfiniteMatchTimeout == timeout); _timeout = _ignoreTimeout ? (Int32)Regex.InfiniteMatchTimeout.TotalMilliseconds : (Int32)(timeout.TotalMilliseconds + 0.5); // Round _runregex = regex; _runtext = text; _runtextbeg = textbeg; _runtextend = textend; _runtextstart = textstart; bump = _runregex.RightToLeft ? -1 : 1; stoppos = _runregex.RightToLeft ? _runtextbeg : _runtextend; _runtextpos = textstart; // If previous match was empty or failed, advance by one before matching if (prevlen == 0) { if (_runtextpos == stoppos) return Match.Empty; _runtextpos += bump; } StartTimeoutWatch(); for (; ;) { #if DEBUG if (_runregex.Debug) { Debug.WriteLine(""); Debug.WriteLine("Search range: from " + _runtextbeg.ToString(CultureInfo.InvariantCulture) + " to " + _runtextend.ToString(CultureInfo.InvariantCulture)); Debug.WriteLine("Firstchar search starting at " + _runtextpos.ToString(CultureInfo.InvariantCulture) + " stopping at " + stoppos.ToString(CultureInfo.InvariantCulture)); } #endif if (FindFirstChar()) { CheckTimeout(); if (!initted) { InitMatch(); initted = true; } #if DEBUG if (_runregex.Debug) { Debug.WriteLine("Executing engine starting at " + _runtextpos.ToString(CultureInfo.InvariantCulture)); Debug.WriteLine(""); } #endif Go(); if (_runmatch._matchcount[0] > 0) { // We'll return a match even if it touches a previous empty match return TidyMatch(quick); } // reset state for another go _runtrackpos = _runtrack.Length; _runstackpos = _runstack.Length; _runcrawlpos = _runcrawl.Length; } // failure! if (_runtextpos == stoppos) { TidyMatch(true); return Match.Empty; } // Recognize leading []* and various anchors, and bump on failure accordingly // Bump by one and start again _runtextpos += bump; } // We never get here } private void StartTimeoutWatch() { if (_ignoreTimeout) return; _timeoutChecksToSkip = TimeoutCheckFrequency; // We are using Environment.TickCount and not Timewatch for performance reasons. // Environment.TickCount is an int that cycles. We intentionally let timeoutOccursAt // overflow it will still stay ahead of Environment.TickCount for comparisons made // in DoCheckTimeout(): unchecked { _timeoutOccursAt = Environment.TickCount + _timeout; } } internal void CheckTimeout() { if (_ignoreTimeout) return; if (--_timeoutChecksToSkip != 0) return; _timeoutChecksToSkip = TimeoutCheckFrequency; DoCheckTimeout(); } private void DoCheckTimeout() { // Note that both, Environment.TickCount and timeoutOccursAt are ints and can overflow and become negative. // See the comment in StartTimeoutWatch(). int currentMillis = Environment.TickCount; if (currentMillis < _timeoutOccursAt) return; if (0 > _timeoutOccursAt && 0 < currentMillis) return; #if DEBUG if (_runregex.Debug) { Debug.WriteLine(""); Debug.WriteLine("RegEx match timeout occurred!"); Debug.WriteLine("Specified timeout: " + TimeSpan.FromMilliseconds(_timeout).ToString()); Debug.WriteLine("Timeout check frequency: " + TimeoutCheckFrequency); Debug.WriteLine("Search pattern: " + _runregex._pattern); Debug.WriteLine("Input: " + _runtext); Debug.WriteLine("About to throw RegexMatchTimeoutException."); } #endif throw new RegexMatchTimeoutException(_runtext, _runregex._pattern, TimeSpan.FromMilliseconds(_timeout)); } /// <summary> /// The responsibility of Go() is to run the regular expression at /// runtextpos and call Capture() on all the captured subexpressions, /// then to leave runtextpos at the ending position. It should leave /// runtextpos where it started if there was no match. /// </summary> protected abstract void Go(); /// <summary> /// The responsibility of FindFirstChar() is to advance runtextpos /// until it is at the next position which is a candidate for the /// beginning of a successful match. /// </summary> protected abstract bool FindFirstChar(); /// <summary> /// InitTrackCount must initialize the runtrackcount field; this is /// used to know how large the initial runtrack and runstack arrays /// must be. /// </summary> protected abstract void InitTrackCount(); /// <summary> /// Initializes all the data members that are used by Go() /// </summary> private void InitMatch() { // Use a hashtable'ed Match object if the capture numbers are sparse if (_runmatch == null) { if (_runregex._caps != null) _runmatch = new MatchSparse(_runregex, _runregex._caps, _runregex._capsize, _runtext, _runtextbeg, _runtextend - _runtextbeg, _runtextstart); else _runmatch = new Match(_runregex, _runregex._capsize, _runtext, _runtextbeg, _runtextend - _runtextbeg, _runtextstart); } else { _runmatch.Reset(_runregex, _runtext, _runtextbeg, _runtextend, _runtextstart); } // note we test runcrawl, because it is the last one to be allocated // If there is an alloc failure in the middle of the three allocations, // we may still return to reuse this instance, and we want to behave // as if the allocations didn't occur. (we used to test _trackcount != 0) if (_runcrawl != null) { _runtrackpos = _runtrack.Length; _runstackpos = _runstack.Length; _runcrawlpos = _runcrawl.Length; return; } InitTrackCount(); int tracksize = _runtrackcount * 8; int stacksize = _runtrackcount * 8; if (tracksize < 32) tracksize = 32; if (stacksize < 16) stacksize = 16; _runtrack = new int[tracksize]; _runtrackpos = tracksize; _runstack = new int[stacksize]; _runstackpos = stacksize; _runcrawl = new int[32]; _runcrawlpos = 32; } /// <summary> /// Put match in its canonical form before returning it. /// </summary> private Match TidyMatch(bool quick) { if (!quick) { Match match = _runmatch; _runmatch = null; match.Tidy(_runtextpos); return match; } else { // in quick mode, a successful match returns null, and // the allocated match object is left in the cache return null; } } /// <summary> /// Called by the implementation of Go() to increase the size of storage /// </summary> protected void EnsureStorage() { if (_runstackpos < _runtrackcount * 4) DoubleStack(); if (_runtrackpos < _runtrackcount * 4) DoubleTrack(); } /// <summary> /// Called by the implementation of Go() to decide whether the pos /// at the specified index is a boundary or not. It's just not worth /// emitting inline code for this logic. /// </summary> protected bool IsBoundary(int index, int startpos, int endpos) { return (index > startpos && RegexCharClass.IsWordChar(_runtext[index - 1])) != (index < endpos && RegexCharClass.IsWordChar(_runtext[index])); } protected bool IsECMABoundary(int index, int startpos, int endpos) { return (index > startpos && RegexCharClass.IsECMAWordChar(_runtext[index - 1])) != (index < endpos && RegexCharClass.IsECMAWordChar(_runtext[index])); } protected static bool CharInSet(char ch, String set, String category) { string charClass = RegexCharClass.ConvertOldStringsToClass(set, category); return RegexCharClass.CharInClass(ch, charClass); } protected static bool CharInClass(char ch, String charClass) { return RegexCharClass.CharInClass(ch, charClass); } /// <summary> /// Called by the implementation of Go() to increase the size of the /// backtracking stack. /// </summary> protected void DoubleTrack() { int[] newtrack; newtrack = new int[_runtrack.Length * 2]; System.Array.Copy(_runtrack, 0, newtrack, _runtrack.Length, _runtrack.Length); _runtrackpos += _runtrack.Length; _runtrack = newtrack; } /// <summary> /// Called by the implementation of Go() to increase the size of the /// grouping stack. /// </summary> protected void DoubleStack() { int[] newstack; newstack = new int[_runstack.Length * 2]; System.Array.Copy(_runstack, 0, newstack, _runstack.Length, _runstack.Length); _runstackpos += _runstack.Length; _runstack = newstack; } /// <summary> /// Increases the size of the longjump unrolling stack. /// </summary> protected void DoubleCrawl() { int[] newcrawl; newcrawl = new int[_runcrawl.Length * 2]; System.Array.Copy(_runcrawl, 0, newcrawl, _runcrawl.Length, _runcrawl.Length); _runcrawlpos += _runcrawl.Length; _runcrawl = newcrawl; } /// <summary> /// Save a number on the longjump unrolling stack /// </summary> protected void Crawl(int i) { if (_runcrawlpos == 0) DoubleCrawl(); _runcrawl[--_runcrawlpos] = i; } /// <summary> /// Remove a number from the longjump unrolling stack /// </summary> protected int Popcrawl() { return _runcrawl[_runcrawlpos++]; } /// <summary> /// Get the height of the stack /// </summary> protected int Crawlpos() { return _runcrawl.Length - _runcrawlpos; } /// <summary> /// Called by Go() to capture a subexpression. Note that the /// capnum used here has already been mapped to a non-sparse /// index (by the code generator RegexWriter). /// </summary> protected void Capture(int capnum, int start, int end) { if (end < start) { int T; T = end; end = start; start = T; } Crawl(capnum); _runmatch.AddMatch(capnum, start, end - start); } /// <summary> /// Called by Go() to capture a subexpression. Note that the /// capnum used here has already been mapped to a non-sparse /// index (by the code generator RegexWriter). /// </summary> protected void TransferCapture(int capnum, int uncapnum, int start, int end) { int start2; int end2; // these are the two intervals that are cancelling each other if (end < start) { int T; T = end; end = start; start = T; } start2 = MatchIndex(uncapnum); end2 = start2 + MatchLength(uncapnum); // The new capture gets the innermost defined interval if (start >= end2) { end = start; start = end2; } else if (end <= start2) { start = start2; } else { if (end > end2) end = end2; if (start2 > start) start = start2; } Crawl(uncapnum); _runmatch.BalanceMatch(uncapnum); if (capnum != -1) { Crawl(capnum); _runmatch.AddMatch(capnum, start, end - start); } } /* * Called by Go() to revert the last capture */ protected void Uncapture() { int capnum = Popcrawl(); _runmatch.RemoveMatch(capnum); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected bool IsMatched(int cap) { return _runmatch.IsMatched(cap); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected int MatchIndex(int cap) { return _runmatch.MatchIndex(cap); } /// <summary> /// Call out to runmatch to get around visibility issues /// </summary> protected int MatchLength(int cap) { return _runmatch.MatchLength(cap); } #if DEBUG /// <summary> /// Dump the current state /// </summary> internal virtual void DumpState() { Debug.WriteLine("Text: " + TextposDescription()); Debug.WriteLine("Track: " + StackDescription(_runtrack, _runtrackpos)); Debug.WriteLine("Stack: " + StackDescription(_runstack, _runstackpos)); } internal static String StackDescription(int[] a, int index) { var sb = new StringBuilder(); sb.Append(a.Length - index); sb.Append('/'); sb.Append(a.Length); if (sb.Length < 8) sb.Append(' ', 8 - sb.Length); sb.Append('('); for (int i = index; i < a.Length; i++) { if (i > index) sb.Append(' '); sb.Append(a[i]); } sb.Append(')'); return sb.ToString(); } internal virtual String TextposDescription() { var sb = new StringBuilder(); int remaining; sb.Append(_runtextpos); if (sb.Length < 8) sb.Append(' ', 8 - sb.Length); if (_runtextpos > _runtextbeg) sb.Append(RegexCharClass.CharDescription(_runtext[_runtextpos - 1])); else sb.Append('^'); sb.Append('>'); remaining = _runtextend - _runtextpos; for (int i = _runtextpos; i < _runtextend; i++) { sb.Append(RegexCharClass.CharDescription(_runtext[i])); } if (sb.Length >= 64) { sb.Length = 61; sb.Append("..."); } else { sb.Append('$'); } return sb.ToString(); } #endif } }
#region License /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #endregion using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Gremlin.Net.Driver; using Xunit; namespace Gremlin.Net.UnitTest.Driver { public class CopyOnWriteCollectionTests { [Fact] public void ShouldStartEmpty() { var collection = new CopyOnWriteCollection<int>(); Assert.Equal(new int[0], collection.Snapshot); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(8)] public void AddRangeShouldResultInExpectedCount(int expectedCount) { var items = Enumerable.Range(0, expectedCount).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(items); Assert.Equal(expectedCount, collection.Count); } [Fact] public void AddRangeShouldAddNewItemsAfterOldItems() { var oldItems = Enumerable.Range(0, 5).ToArray(); var newItems = Enumerable.Range(0, 3).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(oldItems); collection.AddRange(newItems); var expectedItems = new int[oldItems.Length + newItems.Length]; oldItems.CopyTo(expectedItems, 0); newItems.CopyTo(expectedItems, oldItems.Length); Assert.Equal(expectedItems, collection.Snapshot); } [Fact] public void TryRemoveShouldReturnFalseForUnknownItem() { const int unknownItem = -1; var knownItems = Enumerable.Range(0, 5).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(knownItems); Assert.False(collection.TryRemove(unknownItem)); } [Fact] public void TryRemoveShouldNotChangeCountForUnknownItem() { const int unknownItem = -1; var knownItems = Enumerable.Range(0, 5).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(knownItems); collection.TryRemove(unknownItem); Assert.Equal(knownItems.Length, collection.Count); } [Fact] public void TryRemoveShouldReturnTrueForKnownItem() { var knownItems = Enumerable.Range(0, 5).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(knownItems); Assert.True(collection.TryRemove(knownItems[2])); } [Fact] public void TryRemoveShouldRemoveKnownItem() { var knownItems = Enumerable.Range(0, 5).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(knownItems); collection.TryRemove(knownItems[2]); Assert.DoesNotContain(knownItems[2], collection.Snapshot); } [Fact] public void TryRemoveShouldDecrementCountForKnownItem() { var knownItems = Enumerable.Range(0, 5).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(knownItems); collection.TryRemove(knownItems[2]); Assert.Equal(knownItems.Length - 1, collection.Count); } [Fact] public void TryRemoveOfLastItemShouldEmptyTheArray() { var collection = new CopyOnWriteCollection<int>(); const int lastItem = 3; collection.AddRange(new[] {lastItem}); collection.TryRemove(lastItem); Assert.Equal(new int[0], collection.Snapshot); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(8)] public void SnapshotShouldReturnAddedItems(int nrItems) { var items = Enumerable.Range(0, nrItems).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(items); Assert.Equal(items, collection.Snapshot); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(8)] public void RemoveAndGetAllShouldReturnAllItems(int nrItems) { var items = Enumerable.Range(0, nrItems).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(items); Assert.Equal(items, collection.RemoveAndGetAll()); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(8)] public void RemoveAndGetAllShouldEmptyTheArray(int nrItems) { var items = Enumerable.Range(0, nrItems).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(items); collection.RemoveAndGetAll(); Assert.Equal(new int[0], collection.Snapshot); } [Fact] public void AddRangeShouldAllowParallelCalls() { var collection = new CopyOnWriteCollection<int>(); const int nrOfParallelOperations = 100; var addRangeActions = new Action[nrOfParallelOperations]; var addedItems = new List<int>(nrOfParallelOperations * 3); for (var i = 0; i < nrOfParallelOperations; i++) { var itemsToAdd = new[] {i, i + 1, i + 2}; addedItems.AddRange(itemsToAdd); addRangeActions[i] = () => collection.AddRange(itemsToAdd); } Parallel.Invoke(addRangeActions); AssertCollectionContainsExactlyUnordered(collection, addedItems); } private static void AssertCollectionContainsExactlyUnordered(CopyOnWriteCollection<int> collection, IEnumerable<int> expectedItems) { foreach (var item in expectedItems) { Assert.True(collection.TryRemove(item)); } Assert.Equal(0, collection.Count); } [Fact] public void TryRemoveShouldAllowParallelCalls() { const int nrOfParallelOperations = 100; var items = Enumerable.Range(0, nrOfParallelOperations).ToArray(); var collection = new CopyOnWriteCollection<int>(); collection.AddRange(items); var tryRemoveActions = new Action[nrOfParallelOperations]; for (var i = 0; i < nrOfParallelOperations; i++) { var item = i; tryRemoveActions[i] = () => collection.TryRemove(item); } Parallel.Invoke(tryRemoveActions); Assert.Equal(0, collection.Count); } } }
//#define ASTAR_SINGLE_THREAD_OPTIMIZE #define ASTAR_FAST_NO_EXCEPTIONS //Needs to be enabled for the iPhone build setting Fast But No Exceptions to work. using UnityEngine; using System.Collections; using System.Collections.Generic; //using Pathfinding; using Pathfinding.Util; namespace Pathfinding { [System.Serializable] /** Stores the navigation graphs for the A* Pathfinding System. * \ingroup relevant * * An instance of this class is assigned to AstarPath.astarData, from it you can access all graphs loaded through the #graphs variable.\n * This class also handles a lot of the high level serialization. */ public class AstarData { /** Shortcut to AstarPath.active */ public AstarPath active { get { return AstarPath.active; } } #region Fields [System.NonSerialized] public NavMeshGraph navmesh; /**< Shortcut to the first NavMeshGraph. Updated at scanning time. This is the only reference to NavMeshGraph in the core pathfinding scripts */ [System.NonSerialized] public GridGraph gridGraph; /**< Shortcut to the first GridGraph. Updated at scanning time. This is the only reference to GridGraph in the core pathfinding scripts */ [System.NonSerialized] public PointGraph pointGraph; /**< Shortcut to the first PointGraph. Updated at scanning time. This is the only reference to PointGraph in the core pathfinding scripts */ [System.NonSerialized] /** Holds temporary path data for pathfinders. * One array for every thread. * Every array is itself an array with a number of NodeRun object of which there is one per node of. * These objects holds the temporary path data, such as the G and H scores and the parent node. * This is separate from the static path data, e.g connections between nodes. * \see CreateNodeRuns */ public NodeRun[][] nodeRuns; /** All supported graph types. Populated through reflection search */ public System.Type[] graphTypes = null; #if ASTAR_FAST_NO_EXCEPTIONS /** Graph types to use when building with Fast But No Exceptions for iPhone. * If you add any custom graph types, you need to add them to this hard-coded list. */ public static readonly System.Type[] DefaultGraphTypes = new System.Type[] { typeof(GridGraph), typeof(PointGraph), typeof(NavMeshGraph) }; #endif [System.NonSerialized] /** All graphs this instance holds. * This will be filled only after deserialization has completed. * May contain null entries if graph have been removed. */ public NavGraph[] graphs = new NavGraph[0]; /** Links placed by the user in the scene view. */ [System.NonSerialized] public UserConnection[] userConnections = new UserConnection[0]; //Serialization Settings /** Has the data been reverted by an undo operation. * Used by the editor's undo logic to check if the AstarData has been reverted by an undo operation and should be deserialized */ public bool hasBeenReverted = false; [SerializeField] /** Serialized data for all graphs and settings. */ private byte[] data; public uint dataChecksum; /** Backup data if deserialization failed. */ public byte[] data_backup; /** Serialized data for cached startup */ public byte[] data_cachedStartup; public byte[] revertData; /** Should graph-data be cached. * Caching the startup means saving the whole graphs, not only the settings to an internal array (#data_cachedStartup) which can * be loaded faster than scanning all graphs at startup. This is setup from the editor. */ public bool cacheStartup = false; public bool compress = false; //End Serialization Settings #endregion public byte[] GetData () { return data; } public void SetData (byte[] data, uint checksum) { this.data = data; dataChecksum = checksum; } /** Loads the graphs from memory, will load cached graphs if any exists */ public void Awake () { /* Set up default values, to not throw null reference errors */ userConnections = new UserConnection[0]; graphs = new NavGraph[0]; /* End default values */ if (cacheStartup && data_cachedStartup != null) { LoadFromCache (); } else { DeserializeGraphs (); } } [System.Obsolete] public void CollectNodes (int numTemporary) { /*int nodeCount = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i].nodes != null) nodeCount += graphs[i].nodes.Length; } nodes = new Node[nodeCount + numTemporary]; int counter = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i].nodes != null) { Node[] gNodes = graphs[i].nodes; for (int j=0;j<gNodes.Length;j++, counter++) { nodes[counter] = gNodes[j]; gNodes[j].nodeIndex = counter; } } }*/ } public void AssignNodeIndices () { int counter = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null || graphs[i].nodes == null) continue; Node[] nodes = graphs[i].nodes; for (int j=0;j<nodes.Length;j++, counter++) { if (nodes[j] != null) nodes[j].SetNodeIndex(counter); } } } /** Creates the structure for holding temporary path data. * The data is for example the G, H and F scores and the search tree. * The number of nodeRuns must be no less than the number of nodes contained in all graphs. * So after adding nodes, this MUST be called.\n * Ideally, I would code an update function which reuses most of the previous ones instead of recreating it every time. * \param numParallel Number of parallel threads which will use the data. * \see #nodeRuns * \see AstarPath.UpdatePathThreadInfoNodes */ public void CreateNodeRuns (int numParallel) { if (graphs == null) throw new System.Exception ("Cannot create NodeRuns when no graphs exist. (Scan and or Load graphs first)"); int nodeCount = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i].nodes != null) nodeCount += graphs[i].nodes.Length; } AssignNodeIndices (); active.UpdatePathThreadInfoNodes (); } /** Updates shortcuts to the first graph of different types. * Hard coding references to some graph types is not really a good thing imo. I want to keep it dynamic and flexible. * But these references ease the use of the system, so I decided to keep them. It is the only reference to specific graph types in the pathfinding core.\n */ public void UpdateShortcuts () { navmesh = (NavMeshGraph)FindGraphOfType (typeof(NavMeshGraph)); gridGraph = (GridGraph)FindGraphOfType (typeof(GridGraph)); pointGraph = (PointGraph)FindGraphOfType (typeof(PointGraph)); } public void LoadFromCache () { if (data_cachedStartup != null && data_cachedStartup.Length > 0) { //AstarSerializer serializer = new AstarSerializer (active); //DeserializeGraphs (serializer,data_cachedStartup); DeserializeGraphs (data_cachedStartup); GraphModifier.TriggerEvent (GraphModifier.EventType.PostCacheLoad); } else { Debug.LogError ("Can't load from cache since the cache is empty"); } } public void SaveCacheData (Pathfinding.Serialization.SerializeSettings settings) { data_cachedStartup = SerializeGraphs (settings); cacheStartup = true; } #region Serialization /** Serializes all graphs settings to a byte array. * \see DeserializeGraphs(byte[]) */ public byte[] SerializeGraphs () { return SerializeGraphs (Pathfinding.Serialization.SerializeSettings.Settings); } /** Main serializer function. */ public byte[] SerializeGraphs (Pathfinding.Serialization.SerializeSettings settings) { uint checksum; return SerializeGraphs (settings, out checksum); } /** Main serializer function. * Serializes all graphs to a byte array * A similar function exists in the AstarEditor.cs script to save additional info */ public byte[] SerializeGraphs (Pathfinding.Serialization.SerializeSettings settings, out uint checksum) { Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this, settings); sr.OpenSerialize(); SerializeGraphsPart (sr); byte[] bytes = sr.CloseSerialize(); checksum = sr.GetChecksum (); return bytes; } /** Serializes common info to the serializer. * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void SerializeGraphsPart (Pathfinding.Serialization.AstarSerializer sr) { sr.SerializeGraphs(graphs); sr.SerializeUserConnections (userConnections); sr.SerializeNodes(); sr.SerializeExtraInfo(); } /** Deserializes graphs from #data */ public void DeserializeGraphs () { if (data != null) { DeserializeGraphs (data); } } /** Deserializes graphs from the specified byte array. * If an error ocurred, it will try to deserialize using the old deserializer. * A warning will be logged if all deserializers failed. */ public void DeserializeGraphs (byte[] bytes) { try { if (bytes != null) { Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this); if (sr.OpenDeserialize(bytes)) { DeserializeGraphsPart (sr); sr.CloseDeserialize(); } else { Debug.Log ("Invalid data file (cannot read zip). Trying to load with old deserializer (pre 3.1)..."); AstarSerializer serializer = new AstarSerializer (active); DeserializeGraphs_oldInternal (serializer); } } else { throw new System.ArgumentNullException ("Bytes should not be null when passed to DeserializeGraphs"); } active.DataUpdate (); } catch (System.Exception e) { Debug.LogWarning ("Caught exception while deserializing data.\n"+e); data_backup = bytes; } } /** Deserializes graphs from the specified byte array additively. * If an error ocurred, it will try to deserialize using the old deserializer. * A warning will be logged if all deserializers failed. * This function will add loaded graphs to the current ones */ public void DeserializeGraphsAdditive (byte[] bytes) { try { if (bytes != null) { Pathfinding.Serialization.AstarSerializer sr = new Pathfinding.Serialization.AstarSerializer(this); if (sr.OpenDeserialize(bytes)) { DeserializeGraphsPartAdditive (sr); sr.CloseDeserialize(); } else { Debug.Log ("Invalid data file (cannot read zip)."); } } else { throw new System.ArgumentNullException ("Bytes should not be null when passed to DeserializeGraphs"); } active.DataUpdate (); } catch (System.Exception e) { Debug.LogWarning ("Caught exception while deserializing data.\n"+e); } } /** Deserializes common info. * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void DeserializeGraphsPart (Pathfinding.Serialization.AstarSerializer sr) { graphs = sr.DeserializeGraphs (); userConnections = sr.DeserializeUserConnections(); sr.DeserializeNodes(); sr.DeserializeExtraInfo(); sr.PostDeserialization(); } /** Deserializes common info additively * Common info is what is shared between the editor serialization and the runtime serializer. * This is mostly everything except the graph inspectors which serialize some extra data in the editor */ public void DeserializeGraphsPartAdditive (Pathfinding.Serialization.AstarSerializer sr) { if (graphs == null) graphs = new NavGraph[0]; if (userConnections == null) userConnections = new UserConnection[0]; List<NavGraph> gr = new List<NavGraph>(graphs); gr.AddRange (sr.DeserializeGraphs ()); graphs = gr.ToArray(); List<UserConnection> conns = new List<UserConnection>(userConnections); conns.AddRange (sr.DeserializeUserConnections()); userConnections = conns.ToArray (); sr.DeserializeNodes(); sr.DeserializeExtraInfo(); sr.PostDeserialization(); for (int i=0;i<graphs.Length;i++) { for (int j=i+1;j<graphs.Length;j++) { if (graphs[i] != null && graphs[j] != null && graphs[i].guid == graphs[j].guid) { Debug.LogWarning ("Guid Conflict when importing graphs additively. Imported graph will get a new Guid.\nThis message is (relatively) harmless."); graphs[i].guid = Pathfinding.Util.Guid.NewGuid (); break; } } } } #region OldSerializer /** Main deserializer function (old), loads from the #data variable \deprecated */ [System.Obsolete("This function is obsolete. Use DeserializeGraphs () instead")] public void DeserializeGraphs (AstarSerializer serializer) { DeserializeGraphs_oldInternal (serializer); } /** Main deserializer function (old), loads from the #data variable \deprecated */ public void DeserializeGraphs_oldInternal (AstarSerializer serializer) { DeserializeGraphs_oldInternal (serializer, data); } /** Main deserializer function (old). Loads from \a bytes variable \deprecated */ [System.Obsolete("This function is obsolete. Use DeserializeGraphs (bytes) instead")] public void DeserializeGraphs (AstarSerializer serializer, byte[] bytes) { DeserializeGraphs_oldInternal (serializer, bytes); } /** Main deserializer function (old). Loads from \a bytes variable \deprecated */ public void DeserializeGraphs_oldInternal (AstarSerializer serializer, byte[] bytes) { System.DateTime startTime = System.DateTime.UtcNow; if (bytes == null || bytes.Length == 0) { Debug.Log ("No previous data, assigning default"); graphs = new NavGraph[0]; return; } Debug.Log ("Deserializing..."); serializer = serializer.OpenDeserialize (bytes); DeserializeGraphsPart (serializer); serializer.Close (); System.DateTime endTime = System.DateTime.UtcNow; Debug.Log ("Deserialization complete - Process took "+((endTime-startTime).Ticks*0.0001F).ToString ("0.00")+" ms"); } /** Deserializes all graphs and also user connections \deprecated */ public void DeserializeGraphsPart (AstarSerializer serializer) { if (serializer.error != AstarSerializer.SerializerError.Nothing) { data_backup = (serializer.readerStream.BaseStream as System.IO.MemoryStream).ToArray (); Debug.Log ("Error encountered : "+serializer.error+"\nWriting data to AstarData.data_backup"); graphs = new NavGraph[0]; return; } try { int count1 = serializer.readerStream.ReadInt32 (); int count2 = serializer.readerStream.ReadInt32 (); if (count1 != count2) { Debug.LogError ("Data is corrupt ("+count1 +" != "+count2+")"); graphs = new NavGraph[0]; return; } NavGraph[] _graphs = new NavGraph[count1]; //graphs = new NavGraph[count1]; for (int i=0;i<_graphs.Length;i++) { if (!serializer.MoveToAnchor ("Graph"+i)) { Debug.LogError ("Couldn't find graph "+i+" in the data"); Debug.Log ("Logging... "+serializer.anchors.Count); foreach (KeyValuePair<string,int> value in serializer.anchors) { Debug.Log ("KeyValuePair "+value.Key); } _graphs[i] = null; continue; } string graphType = serializer.readerStream.ReadString (); //Old graph naming graphType = graphType.Replace ("ListGraph","PointGraph"); Guid guid = new Guid (serializer.readerStream.ReadString ()); //Search for existing graphs with the same GUID. If one is found, that means that we are loading another version of that graph //Use that graph then and just load it with some new settings NavGraph existingGraph = GuidToGraph (guid); if (existingGraph != null) { _graphs[i] = existingGraph; //Replace //graph.guid = new System.Guid (); //serializer.loadedGraphGuids[i] = graph.guid.ToString (); } else { _graphs[i] = CreateGraph (graphType); } NavGraph graph = _graphs[i]; if (graph == null) { Debug.LogError ("One of the graphs saved was of an unknown type, the graph was of type '"+graphType+"'"); data_backup = data; graphs = new NavGraph[0]; return; } _graphs[i].guid = guid; //Set an unique prefix for all variables in this graph serializer.sPrefix = i.ToString (); serializer.DeSerializeSettings (graph,active); } serializer.SetUpGraphRefs (_graphs); for (int i=0;i<_graphs.Length;i++) { NavGraph graph = _graphs[i]; if (serializer.MoveToAnchor ("GraphNodes_Graph"+i)) { serializer.mask = serializer.readerStream.ReadInt32 (); serializer.sPrefix = i.ToString ()+"N"; serializer.DeserializeNodes (graph,_graphs,i,active); serializer.sPrefix = ""; } //Debug.Log ("Graph "+i+" has loaded "+(graph.nodes != null ? graph.nodes.Length.ToString () : "null")+" nodes"); } userConnections = serializer.DeserializeUserConnections (); //Remove null graphs List<NavGraph> tmp = new List<NavGraph>(_graphs); for (int i=0;i<_graphs.Length;i++) { if (_graphs[i] == null) { tmp.Remove (_graphs[i]); } } graphs = tmp.ToArray (); } catch (System.Exception e) { data_backup = (serializer.readerStream.BaseStream as System.IO.MemoryStream).ToArray (); Debug.LogWarning ("Deserializing Error Encountered - Writing data to AstarData.data_backup:\n"+e.ToString ()); graphs = new NavGraph[0]; return; } } #endregion #endregion /** Find all graph types supported in this build. * Using reflection, the assembly is searched for types which inherit from NavGraph. */ public void FindGraphTypes () { #if !ASTAR_FAST_NO_EXCEPTIONS System.Reflection.Assembly asm = System.Reflection.Assembly.GetAssembly (typeof(AstarPath)); System.Type[] types = asm.GetTypes (); List<System.Type> graphList = new List<System.Type> (); foreach (System.Type type in types) { System.Type baseType = type.BaseType; while (baseType != null) { if (baseType == typeof(NavGraph)) { graphList.Add (type); break; } baseType = baseType.BaseType; } } graphTypes = graphList.ToArray (); #else graphTypes = DefaultGraphTypes; #endif } #region GraphCreation /** \returns A System.Type which matches the specified \a type string. If no mathing graph type was found, null is returned */ public System.Type GetGraphType (string type) { for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { return graphTypes[i]; } } return null; } /** Creates a new instance of a graph of type \a type. If no matching graph type was found, an error is logged and null is returned * \returns The created graph * \see CreateGraph(System.Type) */ public NavGraph CreateGraph (string type) { Debug.Log ("Creating Graph of type '"+type+"'"); for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { return CreateGraph (graphTypes[i]); } } Debug.LogError ("Graph type ("+type+") wasn't found"); return null; } /** Creates a new graph instance of type \a type * \see CreateGraph(string) */ public NavGraph CreateGraph (System.Type type) { NavGraph g = System.Activator.CreateInstance (type) as NavGraph; g.active = active; return g; } /** Adds a graph of type \a type to the #graphs array */ public NavGraph AddGraph (string type) { NavGraph graph = null; for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i].Name == type) { graph = CreateGraph (graphTypes[i]); } } if (graph == null) { Debug.LogError ("No NavGraph of type '"+type+"' could be found"); return null; } AddGraph (graph); return graph; } /** Adds a graph of type \a type to the #graphs array */ public NavGraph AddGraph (System.Type type) { NavGraph graph = null; for (int i=0;i<graphTypes.Length;i++) { if (graphTypes[i] == type) { graph = CreateGraph (graphTypes[i]); } } if (graph == null) { Debug.LogError ("No NavGraph of type '"+type+"' could be found, "+graphTypes.Length+" graph types are avaliable"); return null; } AddGraph (graph); return graph; } /** Adds the specified graph to the #graphs array */ public void AddGraph (NavGraph graph) { //Try to fill in an empty position for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { graphs[i] = graph; return; } } //Add a new entry to the list List<NavGraph> ls = new List<NavGraph> (graphs); ls.Add (graph); graphs = ls.ToArray (); } /** Removes the specified graph from the #graphs array and Destroys it in a safe manner. * To avoid changing graph indices for the other graphs, the graph is simply nulled in the array instead * of actually removing it from the array. * The empty position will be reused if a new graph is added. * * \returns True if the graph was sucessfully removed (i.e it did exist in the #graphs array). False otherwise. * * \see NavGraph.SafeOnDestroy * * \version Changed in 3.2.5 to call SafeOnDestroy before removing * and nulling it in the array instead of removing the element completely in the #graphs array. * */ public bool RemoveGraph (NavGraph graph) { //Safe OnDestroy is called since there is a risk that the pathfinding is searching through the graph right now, //and if we don't wait until the search has completed we could end up with evil NullReferenceExceptions graph.SafeOnDestroy (); int i=0; for (;i<graphs.Length;i++) if (graphs[i] == graph) break; if (i == graphs.Length) { return false; } graphs[i] = null; return true; } #endregion #region GraphUtility /** Returns the graph which contains the specified node. The graph must be in the #graphs array. * \returns Returns the graph which contains the node. Null if the graph wasn't found */ public static NavGraph GetGraph (Node node) { if (node == null) return null; AstarPath script = AstarPath.active; if (script == null) return null; AstarData data = script.astarData; if (data == null) return null; if (data.graphs == null) return null; int graphIndex = node.graphIndex; if (graphIndex < 0 || graphIndex >= data.graphs.Length) { return null; } return data.graphs[graphIndex]; } /** Returns the node at \a graphs[graphIndex].nodes[nodeIndex]. All kinds of error checking is done to make sure no exceptions are thrown. */ public Node GetNode (int graphIndex, int nodeIndex) { return GetNode (graphIndex,nodeIndex, graphs); } /** Returns the node at \a graphs[graphIndex].nodes[nodeIndex]. The graphIndex refers to the specified graphs array.\n * All kinds of error checking is done to make sure no exceptions are thrown */ public Node GetNode (int graphIndex, int nodeIndex, NavGraph[] graphs) { if (graphs == null) { return null; } if (graphIndex < 0 || graphIndex >= graphs.Length) { Debug.LogError ("Graph index is out of range"+graphIndex+ " [0-"+(graphs.Length-1)+"]"); return null; } NavGraph graph = graphs[graphIndex]; if (graph.nodes == null) { return null; } if (nodeIndex < 0 || nodeIndex >= graph.nodes.Length) { Debug.LogError ("Node index is out of range : "+nodeIndex+ " [0-"+(graph.nodes.Length-1)+"]"+" (graph "+graphIndex+")"); return null; } return graph.nodes[nodeIndex]; } /** Returns the first graph of type \a type found in the #graphs array. Returns null if none was found */ public NavGraph FindGraphOfType (System.Type type) { for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i].GetType () == type) { return graphs[i]; } } return null; } /** Loop through this function to get all graphs of type 'type' * \code foreach (GridGraph graph in AstarPath.astarData.FindGraphsOfType (typeof(GridGraph))) { * //Do something with the graph * } \endcode * \see AstarPath.RegisterSafeNodeUpdate */ public IEnumerable FindGraphsOfType (System.Type type) { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i].GetType () == type) { yield return graphs[i]; } } } /** All graphs which implements the UpdateableGraph interface * \code foreach (IUpdatableGraph graph in AstarPath.astarData.GetUpdateableGraphs ()) { * //Do something with the graph * } \endcode * \see AstarPath.RegisterSafeNodeUpdate * \see Pathfinding.IUpdatableGraph */ public IEnumerable GetUpdateableGraphs () { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i] is IUpdatableGraph) { yield return graphs[i]; } } } /** All graphs which implements the UpdateableGraph interface * \code foreach (IRaycastableGraph graph in AstarPath.astarData.GetRaycastableGraphs ()) { * //Do something with the graph * } \endcode * \see Pathfinding.IRaycastableGraph*/ public IEnumerable GetRaycastableGraphs () { if (graphs == null) { yield break; } for (int i=0;i<graphs.Length;i++) { if (graphs[i] != null && graphs[i] is IRaycastableGraph) { yield return graphs[i]; } } } /** Gets the index of the NavGraph in the #graphs array */ public int GetGraphIndex (NavGraph graph) { if (graph == null) throw new System.ArgumentNullException ("graph"); for (int i=0;i<graphs.Length;i++) { if (graph == graphs[i]) { return i; } } Debug.LogError ("Graph doesn't exist"); return -1; } /** Tries to find a graph with the specified GUID in the #graphs array. * If a graph is found it returns its index, otherwise it returns -1 * \see GuidToGraph */ public int GuidToIndex (Guid guid) { if (graphs == null) { return -1; //CollectGraphs (); } for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { continue; } if (graphs[i].guid == guid) { return i; } } return -1; } /** Tries to find a graph with the specified GUID in the #graphs array. Returns null if none is found * \see GuidToIndex */ public NavGraph GuidToGraph (Guid guid) { if (graphs == null) { return null; //CollectGraphs (); } for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) { continue; } if (graphs[i].guid == guid) { return graphs[i]; } } return null; } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace Light.WebAPI.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
/* ==================================================================== Copyright (C) 2004-2008 fyiReporting Software, LLC Copyright (C) 2011 Peter Gill <peter@majorsilence.com> This file is part of the fyiReporting RDL project. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, email info@fyireporting.com or visit the website www.fyiReporting.com. */ using System; using System.Xml; using System.Text; namespace fyiReporting.RDL { ///<summary> /// The type (dotted, solid, ...) of border. Expressions for all sides as well as default expression. ///</summary> [Serializable] internal class StyleBorderStyle : ReportLink { Expression _Default; // (Enum BorderStyle) Style of the border (unless overridden for a specific side) // Default: none Expression _Left; // (Enum BorderStyle) Style of the left border Expression _Right; // (Enum BorderStyle) Style of the right border Expression _Top; // (Enum BorderStyle) Style of the top border Expression _Bottom; // (Enum BorderStyle) Style of the bottom border internal StyleBorderStyle(ReportDefn r, ReportLink p, XmlNode xNode) : base(r, p) { _Default=null; _Left=null; _Right=null; _Top=null; _Bottom=null; // Loop thru all the child nodes foreach(XmlNode xNodeLoop in xNode.ChildNodes) { if (xNodeLoop.NodeType != XmlNodeType.Element) continue; switch (xNodeLoop.Name) { case "Default": _Default = new Expression(r, this, xNodeLoop, ExpressionType.Enum); break; case "Left": _Left = new Expression(r, this, xNodeLoop, ExpressionType.Enum); break; case "Right": _Right = new Expression(r, this, xNodeLoop, ExpressionType.Enum); break; case "Top": _Top = new Expression(r, this, xNodeLoop, ExpressionType.Enum); break; case "Bottom": _Bottom = new Expression(r, this, xNodeLoop, ExpressionType.Enum); break; default: // don't know this element - log it OwnerReport.rl.LogError(4, "Unknown BorderStyle element '" + xNodeLoop.Name + "' ignored."); break; } } } // Handle parsing of function in final pass override internal void FinalPass() { if (_Default != null) _Default.FinalPass(); if (_Left != null) _Left.FinalPass(); if (_Right != null) _Right.FinalPass(); if (_Top != null) _Top.FinalPass(); if (_Bottom != null) _Bottom.FinalPass(); return; } // Generate a CSS string from the specified styles internal string GetCSS(Report rpt, Row row, bool bDefaults) { StringBuilder sb = new StringBuilder(); if (_Default != null) sb.AppendFormat("border-style:{0};",_Default.EvaluateString(rpt, row)); else if (bDefaults) sb.Append("border-style:none;"); if (_Left != null) sb.AppendFormat("border-left-style:{0};",_Left.EvaluateString(rpt, row)); if (_Right != null) sb.AppendFormat("border-right-style:{0};",_Right.EvaluateString(rpt, row)); if (_Top != null) sb.AppendFormat("border-top-style:{0};",_Top.EvaluateString(rpt, row)); if (_Bottom != null) sb.AppendFormat("border-bottom-style:{0};",_Bottom.EvaluateString(rpt, row)); return sb.ToString(); } internal bool IsConstant() { bool rc = true; if (_Default != null) rc = _Default.IsConstant(); if (!rc) return false; if (_Left != null) rc = _Left.IsConstant(); if (!rc) return false; if (_Right != null) rc = _Right.IsConstant(); if (!rc) return false; if (_Top != null) rc = _Top.IsConstant(); if (!rc) return false; if (_Bottom != null) rc = _Bottom.IsConstant(); return rc; } static internal string GetCSSDefaults() { return "border-style:none;"; } internal Expression Default { get { return _Default; } set { _Default = value; } } internal BorderStyleEnum EvalDefault(Report rpt, Row r) { if (_Default == null) return BorderStyleEnum.None; string bs = _Default.EvaluateString(rpt, r); return GetBorderStyle(bs, BorderStyleEnum.Solid); } internal Expression Left { get { return _Left; } set { _Left = value; } } internal BorderStyleEnum EvalLeft(Report rpt, Row r) { if (_Left == null) return EvalDefault(rpt, r); string bs = _Left.EvaluateString(rpt, r); return GetBorderStyle(bs, BorderStyleEnum.Solid); } internal Expression Right { get { return _Right; } set { _Right = value; } } internal BorderStyleEnum EvalRight(Report rpt, Row r) { if (_Right == null) return EvalDefault(rpt, r); string bs = _Right.EvaluateString(rpt, r); return GetBorderStyle(bs, BorderStyleEnum.Solid); } internal Expression Top { get { return _Top; } set { _Top = value; } } internal BorderStyleEnum EvalTop(Report rpt, Row r) { if (_Top == null) return EvalDefault(rpt, r); string bs = _Top.EvaluateString(rpt, r); return GetBorderStyle(bs, BorderStyleEnum.Solid); } internal Expression Bottom { get { return _Bottom; } set { _Bottom = value; } } internal BorderStyleEnum EvalBottom(Report rpt, Row r) { if (_Bottom == null) return EvalDefault(rpt, r); string bs = _Bottom.EvaluateString(rpt, r); return GetBorderStyle(bs, BorderStyleEnum.Solid); } // return the BorderStyleEnum given a particular string value static internal BorderStyleEnum GetBorderStyle(string v, BorderStyleEnum def) { BorderStyleEnum bs; switch (v) { case "None": bs = BorderStyleEnum.None; break; case "Dotted": bs = BorderStyleEnum.Dotted; break; case "Dashed": bs = BorderStyleEnum.Dashed; break; case "Solid": bs = BorderStyleEnum.Solid; break; case "Double": bs = BorderStyleEnum.Double; break; case "Groove": bs = BorderStyleEnum.Groove; break; case "Ridge": bs = BorderStyleEnum.Ridge; break; case "Inset": bs = BorderStyleEnum.Inset; break; case "WindowInset": bs = BorderStyleEnum.WindowInset; break; case "Outset": bs = BorderStyleEnum.Outset; break; default: bs = def; break; } return bs; } } /// <summary> /// Allowed values for border styles. Note: these may not be actually supported depending /// on the renderer used. /// </summary> public enum BorderStyleEnum { /// <summary> /// No border /// </summary> None, /// <summary> /// Dotted line border /// </summary> Dotted, /// <summary> /// Dashed lin border /// </summary> Dashed, /// <summary> /// Solid line border /// </summary> Solid, /// <summary> /// Double line border /// </summary> Double, /// <summary> /// Grooved border /// </summary> Groove, /// <summary> /// Ridge border /// </summary> Ridge, /// <summary> /// Inset border /// </summary> Inset, /// <summary> /// Windows Inset border /// </summary> WindowInset, /// <summary> /// Outset border /// </summary> Outset } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // <spec>http://webdata/xml/specs/XslCompiledTransform.xml</spec> //------------------------------------------------------------------------------ using System.CodeDom.Compiler; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Reflection; using System.Reflection.Emit; using System.Security; using System.Xml.XPath; using System.Xml.Xsl.Qil; using System.Xml.Xsl.Runtime; using System.Xml.Xsl.Xslt; using System.Runtime.Versioning; using System.Collections.Generic; using System.Linq; namespace System.Xml.Xsl { #if ! HIDE_XSL //---------------------------------------------------------------------------------------------------- // Clarification on null values in this API: // stylesheet, stylesheetUri - cannot be null // settings - if null, XsltSettings.Default will be used // stylesheetResolver - if null, XmlNullResolver will be used for includes/imports. // However, if the principal stylesheet is given by its URI, that // URI will be resolved using XmlUrlResolver (for compatibility // with XslTransform and XmlReader). // typeBuilder - cannot be null // scriptAssemblyPath - can be null only if scripts are disabled // compiledStylesheet - cannot be null // executeMethod, queryData - cannot be null // earlyBoundTypes - null means no script types // documentResolver - if null, XmlNullResolver will be used // input, inputUri - cannot be null // arguments - null means no arguments // results, resultsFile - cannot be null //---------------------------------------------------------------------------------------------------- public sealed class XslCompiledTransform { // Reader settings used when creating XmlReader from inputUri private static readonly XmlReaderSettings s_readerSettings = null; // Version for GeneratedCodeAttribute private readonly string _version = typeof(XslCompiledTransform).Assembly.GetName().Version.ToString(); static XslCompiledTransform() { s_readerSettings = new XmlReaderSettings(); } // Options of compilation private bool _enableDebug = false; // Results of compilation private CompilerErrorCollection _compilerErrorColl = null; private XmlWriterSettings _outputSettings = null; private QilExpression _qil = null; #if !uap // Executable command for the compiled stylesheet private XmlILCommand _command = null; #endif public XslCompiledTransform() { } public XslCompiledTransform(bool enableDebug) { _enableDebug = enableDebug; } /// <summary> /// This function is called on every recompilation to discard all previous results /// </summary> private void Reset() { _compilerErrorColl = null; _outputSettings = null; _qil = null; #if !uap _command = null; #endif } /// <summary> /// Writer settings specified in the stylesheet /// </summary> public XmlWriterSettings OutputSettings { get { return _outputSettings; } } //------------------------------------------------ // Load methods //------------------------------------------------ // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Load(XmlReader stylesheet) { Reset(); LoadInternal(stylesheet, XsltSettings.Default, CreateDefaultResolver()); } // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Load(XmlReader stylesheet, XsltSettings settings, XmlResolver stylesheetResolver) { Reset(); LoadInternal(stylesheet, settings, stylesheetResolver); } // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Load(IXPathNavigable stylesheet) { Reset(); LoadInternal(stylesheet, XsltSettings.Default, CreateDefaultResolver()); } // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Load(IXPathNavigable stylesheet, XsltSettings settings, XmlResolver stylesheetResolver) { Reset(); LoadInternal(stylesheet, settings, stylesheetResolver); } public void Load(string stylesheetUri) { Reset(); if (stylesheetUri == null) { throw new ArgumentNullException(nameof(stylesheetUri)); } LoadInternal(stylesheetUri, XsltSettings.Default, CreateDefaultResolver()); } public void Load(string stylesheetUri, XsltSettings settings, XmlResolver stylesheetResolver) { Reset(); if (stylesheetUri == null) { throw new ArgumentNullException(nameof(stylesheetUri)); } LoadInternal(stylesheetUri, settings, stylesheetResolver); } private CompilerErrorCollection LoadInternal(object stylesheet, XsltSettings settings, XmlResolver stylesheetResolver) { if (stylesheet == null) { throw new ArgumentNullException(nameof(stylesheet)); } if (settings == null) { settings = XsltSettings.Default; } CompileXsltToQil(stylesheet, settings, stylesheetResolver); CompilerError error = GetFirstError(); if (error != null) { throw new XslLoadException(error); } if (!settings.CheckOnly) { CompileQilToMsil(settings); } return _compilerErrorColl; } private void CompileXsltToQil(object stylesheet, XsltSettings settings, XmlResolver stylesheetResolver) { _compilerErrorColl = new Compiler(settings, _enableDebug, null).Compile(stylesheet, stylesheetResolver, out _qil); } /// <summary> /// Returns the first compiler error except warnings /// </summary> private CompilerError GetFirstError() { foreach (CompilerError error in _compilerErrorColl) { if (!error.IsWarning) { return error; } } return null; } private void CompileQilToMsil(XsltSettings settings) { #if !uap _command = new XmlILGenerator().Generate(_qil, /*typeBuilder:*/null); _outputSettings = _command.StaticData.DefaultWriterSettings; _qil = null; #else throw new PlatformNotSupportedException(SR.Xslt_NotSupported); #endif } //------------------------------------------------ // Load compiled stylesheet from a Type //------------------------------------------------ public void Load(Type compiledStylesheet) { #if !uap Reset(); if (compiledStylesheet == null) throw new ArgumentNullException(nameof(compiledStylesheet)); object[] customAttrs = compiledStylesheet.GetCustomAttributes(typeof(GeneratedCodeAttribute), /*inherit:*/false); GeneratedCodeAttribute generatedCodeAttr = customAttrs.Length > 0 ? (GeneratedCodeAttribute)customAttrs[0] : null; // If GeneratedCodeAttribute is not there, it is not a compiled stylesheet class if (generatedCodeAttr != null && generatedCodeAttr.Tool == typeof(XslCompiledTransform).FullName) { if (new Version(_version).CompareTo(new Version(generatedCodeAttr.Version)) < 0) { throw new ArgumentException(SR.Format(SR.Xslt_IncompatibleCompiledStylesheetVersion, generatedCodeAttr.Version, _version), nameof(compiledStylesheet)); } FieldInfo fldData = compiledStylesheet.GetField(XmlQueryStaticData.DataFieldName, BindingFlags.Static | BindingFlags.NonPublic); FieldInfo fldTypes = compiledStylesheet.GetField(XmlQueryStaticData.TypesFieldName, BindingFlags.Static | BindingFlags.NonPublic); // If private fields are not there, it is not a compiled stylesheet class if (fldData != null && fldTypes != null) { // Retrieve query static data from the type byte[] queryData = fldData.GetValue(/*this:*/null) as byte[]; if (queryData != null) { MethodInfo executeMethod = compiledStylesheet.GetMethod("Execute", BindingFlags.Static | BindingFlags.NonPublic); Type[] earlyBoundTypes = (Type[])fldTypes.GetValue(/*this:*/null); // Load the stylesheet Load(executeMethod, queryData, earlyBoundTypes); return; } } } // Throw an exception if the command was not loaded if (_command == null) throw new ArgumentException(SR.Format(SR.Xslt_NotCompiledStylesheet, compiledStylesheet.FullName), nameof(compiledStylesheet)); #else throw new PlatformNotSupportedException(SR.Xslt_NotSupported); #endif } public void Load(MethodInfo executeMethod, byte[] queryData, Type[] earlyBoundTypes) { #if !uap Reset(); if (executeMethod == null) throw new ArgumentNullException(nameof(executeMethod)); if (queryData == null) throw new ArgumentNullException(nameof(queryData)); DynamicMethod dm = executeMethod as DynamicMethod; Delegate delExec = (dm != null) ? dm.CreateDelegate(typeof(ExecuteDelegate)) : executeMethod.CreateDelegate(typeof(ExecuteDelegate)); _command = new XmlILCommand((ExecuteDelegate)delExec, new XmlQueryStaticData(queryData, earlyBoundTypes)); _outputSettings = _command.StaticData.DefaultWriterSettings; #else throw new PlatformNotSupportedException(SR.Xslt_NotSupported); #endif } //------------------------------------------------ // Transform methods which take an IXPathNavigable //------------------------------------------------ public void Transform(IXPathNavigable input, XmlWriter results) { CheckArguments(input, results); Transform(input, (XsltArgumentList)null, results, CreateDefaultResolver()); } public void Transform(IXPathNavigable input, XsltArgumentList arguments, XmlWriter results) { CheckArguments(input, results); Transform(input, arguments, results, CreateDefaultResolver()); } public void Transform(IXPathNavigable input, XsltArgumentList arguments, TextWriter results) { CheckArguments(input, results); using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(input, arguments, writer, CreateDefaultResolver()); writer.Close(); } } public void Transform(IXPathNavigable input, XsltArgumentList arguments, Stream results) { CheckArguments(input, results); using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(input, arguments, writer, CreateDefaultResolver()); writer.Close(); } } //------------------------------------------------ // Transform methods which take an XmlReader //------------------------------------------------ public void Transform(XmlReader input, XmlWriter results) { CheckArguments(input, results); Transform(input, (XsltArgumentList)null, results, CreateDefaultResolver()); } public void Transform(XmlReader input, XsltArgumentList arguments, XmlWriter results) { CheckArguments(input, results); Transform(input, arguments, results, CreateDefaultResolver()); } public void Transform(XmlReader input, XsltArgumentList arguments, TextWriter results) { CheckArguments(input, results); using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(input, arguments, writer, CreateDefaultResolver()); writer.Close(); } } public void Transform(XmlReader input, XsltArgumentList arguments, Stream results) { CheckArguments(input, results); using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(input, arguments, writer, CreateDefaultResolver()); writer.Close(); } } //------------------------------------------------ // Transform methods which take a uri // SxS Note: Annotations should propagate to the caller to have him either check that // the passed URIs are SxS safe or decide that they don't have to be SxS safe and // suppress the message. //------------------------------------------------ [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings")] public void Transform(string inputUri, XmlWriter results) { CheckArguments(inputUri, results); using (XmlReader reader = XmlReader.Create(inputUri, s_readerSettings)) { Transform(reader, (XsltArgumentList)null, results, CreateDefaultResolver()); } } [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings")] public void Transform(string inputUri, XsltArgumentList arguments, XmlWriter results) { CheckArguments(inputUri, results); using (XmlReader reader = XmlReader.Create(inputUri, s_readerSettings)) { Transform(reader, arguments, results, CreateDefaultResolver()); } } [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings")] public void Transform(string inputUri, XsltArgumentList arguments, TextWriter results) { CheckArguments(inputUri, results); using (XmlReader reader = XmlReader.Create(inputUri, s_readerSettings)) using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(reader, arguments, writer, CreateDefaultResolver()); writer.Close(); } } [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings")] public void Transform(string inputUri, XsltArgumentList arguments, Stream results) { CheckArguments(inputUri, results); using (XmlReader reader = XmlReader.Create(inputUri, s_readerSettings)) using (XmlWriter writer = XmlWriter.Create(results, OutputSettings)) { Transform(reader, arguments, writer, CreateDefaultResolver()); writer.Close(); } } [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings")] public void Transform(string inputUri, string resultsFile) { if (inputUri == null) throw new ArgumentNullException(nameof(inputUri)); if (resultsFile == null) throw new ArgumentNullException(nameof(resultsFile)); // SQLBUDT 276415: Prevent wiping out the content of the input file if the output file is the same using (XmlReader reader = XmlReader.Create(inputUri, s_readerSettings)) using (XmlWriter writer = XmlWriter.Create(resultsFile, OutputSettings)) { Transform(reader, (XsltArgumentList)null, writer, CreateDefaultResolver()); writer.Close(); } } //------------------------------------------------ // Main Transform overloads //------------------------------------------------ // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Transform(XmlReader input, XsltArgumentList arguments, XmlWriter results, XmlResolver documentResolver) { #if !uap CheckArguments(input, results); CheckCommand(); _command.Execute((object)input, documentResolver, arguments, results); #else throw new PlatformNotSupportedException(SR.Xslt_NotSupported); #endif } // SxS: This method does not take any resource name and does not expose any resources to the caller. // It's OK to suppress the SxS warning. public void Transform(IXPathNavigable input, XsltArgumentList arguments, XmlWriter results, XmlResolver documentResolver) { #if !uap CheckArguments(input, results); CheckCommand(); _command.Execute((object)input.CreateNavigator(), documentResolver, arguments, results); #else throw new PlatformNotSupportedException(SR.Xslt_NotSupported); #endif } //------------------------------------------------ // Helper methods //------------------------------------------------ private static void CheckArguments(object input, object results) { if (input == null) throw new ArgumentNullException(nameof(input)); if (results == null) throw new ArgumentNullException(nameof(results)); } private static void CheckArguments(string inputUri, object results) { if (inputUri == null) throw new ArgumentNullException(nameof(inputUri)); if (results == null) throw new ArgumentNullException(nameof(results)); } private void CheckCommand() { #if !uap if (_command == null) { throw new InvalidOperationException(SR.Xslt_NoStylesheetLoaded); } #else throw new InvalidOperationException(SR.Xslt_NoStylesheetLoaded); #endif } private static XmlResolver CreateDefaultResolver() { if (LocalAppContextSwitches.AllowDefaultResolver) { return new XmlUrlResolver(); } else { return XmlNullResolver.Singleton; } } //------------------------------------------------ // Test suites entry points //------------------------------------------------ private QilExpression TestCompile(object stylesheet, XsltSettings settings, XmlResolver stylesheetResolver) { Reset(); CompileXsltToQil(stylesheet, settings, stylesheetResolver); return _qil; } private void TestGenerate(XsltSettings settings) { Debug.Assert(_qil != null, "You must compile to Qil first"); CompileQilToMsil(settings); } #if !uap private void Transform(string inputUri, XsltArgumentList arguments, XmlWriter results, XmlResolver documentResolver) { _command.Execute(inputUri, documentResolver, arguments, results); } #endif } #endif // ! HIDE_XSL }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.2.2.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Search.Fluent.Models { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Azure.Management.Search; using Microsoft.Azure.Management.Search.Fluent; using Microsoft.Rest; using Microsoft.Rest.Azure; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; /// <summary> /// Describes an Azure Search service and its current state. /// </summary> [Rest.Serialization.JsonTransformation] public partial class SearchServiceInner : Microsoft.Azure.Management.ResourceManager.Fluent.Resource { /// <summary> /// Initializes a new instance of the SearchServiceInner class. /// </summary> public SearchServiceInner() { CustomInit(); } /// <summary> /// Initializes a new instance of the SearchServiceInner class. /// </summary> /// <param name="sku">The SKU of the Search Service, which determines /// price tier and capacity limits.</param> /// <param name="replicaCount">The number of replicas in the Search /// service. If specified, it must be a value between 1 and 12 /// inclusive for standard SKUs or between 1 and 3 inclusive for basic /// SKU.</param> /// <param name="partitionCount">The number of partitions in the Search /// service; if specified, it can be 1, 2, 3, 4, 6, or 12. Values /// greater than 1 are only valid for standard SKUs. For 'standard3' /// services with hostingMode set to 'highDensity', the allowed values /// are between 1 and 3.</param> /// <param name="hostingMode">Applicable only for the standard3 SKU. /// You can set this property to enable up to 3 high density partitions /// that allow up to 1000 indexes, which is much higher than the /// maximum indexes allowed for any other SKU. For the standard3 SKU, /// the value is either 'default' or 'highDensity'. For all other SKUs, /// this value must be 'default'. Possible values include: 'default', /// 'highDensity'</param> /// <param name="status">The status of the Search service. Possible /// values include: 'running': The Search service is running and no /// provisioning operations are underway. 'provisioning': The Search /// service is being provisioned or scaled up or down. 'deleting': The /// Search service is being deleted. 'degraded': The Search service is /// degraded. This can occur when the underlying search units are not /// healthy. The Search service is most likely operational, but /// performance might be slow and some requests might be dropped. /// 'disabled': The Search service is disabled. In this state, the /// service will reject all API requests. 'error': The Search service /// is in an error state. If your service is in the degraded, disabled, /// or error states, it means the Azure Search team is actively /// investigating the underlying issue. Dedicated services in these /// states are still chargeable based on the number of search units /// provisioned. Possible values include: 'running', 'provisioning', /// 'deleting', 'degraded', 'disabled', 'error'</param> /// <param name="statusDetails">The details of the Search service /// status.</param> /// <param name="provisioningState">The state of the last provisioning /// operation performed on the Search service. Provisioning is an /// intermediate state that occurs while service capacity is being /// established. After capacity is set up, provisioningState changes to /// either 'succeeded' or 'failed'. Client applications can poll /// provisioning status (the recommended polling interval is from 30 /// seconds to one minute) by using the Get Search Service operation to /// see when an operation is completed. If you are using the free /// service, this value tends to come back as 'succeeded' directly in /// the call to Create Search service. This is because the free service /// uses capacity that is already set up. Possible values include: /// 'succeeded', 'provisioning', 'failed'</param> public SearchServiceInner(Sku sku, string location = default(string), string id = default(string), string name = default(string), string type = default(string), IDictionary<string, string> tags = default(IDictionary<string, string>), int? replicaCount = default(int?), int? partitionCount = default(int?), HostingMode? hostingMode = default(HostingMode?), SearchServiceStatus? status = default(SearchServiceStatus?), string statusDetails = default(string), ProvisioningState? provisioningState = default(ProvisioningState?)) : base(location, id, name, type, tags) { ReplicaCount = replicaCount; PartitionCount = partitionCount; HostingMode = hostingMode; Status = status; StatusDetails = statusDetails; ProvisioningState = provisioningState; Sku = sku; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// Gets or sets the number of replicas in the Search service. If /// specified, it must be a value between 1 and 12 inclusive for /// standard SKUs or between 1 and 3 inclusive for basic SKU. /// </summary> [JsonProperty(PropertyName = "properties.replicaCount")] public int? ReplicaCount { get; set; } /// <summary> /// Gets or sets the number of partitions in the Search service; if /// specified, it can be 1, 2, 3, 4, 6, or 12. Values greater than 1 /// are only valid for standard SKUs. For 'standard3' services with /// hostingMode set to 'highDensity', the allowed values are between 1 /// and 3. /// </summary> [JsonProperty(PropertyName = "properties.partitionCount")] public int? PartitionCount { get; set; } /// <summary> /// Gets or sets applicable only for the standard3 SKU. You can set /// this property to enable up to 3 high density partitions that allow /// up to 1000 indexes, which is much higher than the maximum indexes /// allowed for any other SKU. For the standard3 SKU, the value is /// either 'default' or 'highDensity'. For all other SKUs, this value /// must be 'default'. Possible values include: 'default', /// 'highDensity' /// </summary> [JsonProperty(PropertyName = "properties.hostingMode")] public HostingMode? HostingMode { get; set; } /// <summary> /// Gets the status of the Search service. Possible values include: /// 'running': The Search service is running and no provisioning /// operations are underway. 'provisioning': The Search service is /// being provisioned or scaled up or down. 'deleting': The Search /// service is being deleted. 'degraded': The Search service is /// degraded. This can occur when the underlying search units are not /// healthy. The Search service is most likely operational, but /// performance might be slow and some requests might be dropped. /// 'disabled': The Search service is disabled. In this state, the /// service will reject all API requests. 'error': The Search service /// is in an error state. If your service is in the degraded, disabled, /// or error states, it means the Azure Search team is actively /// investigating the underlying issue. Dedicated services in these /// states are still chargeable based on the number of search units /// provisioned. Possible values include: 'running', 'provisioning', /// 'deleting', 'degraded', 'disabled', 'error' /// </summary> [JsonProperty(PropertyName = "properties.status")] public SearchServiceStatus? Status { get; private set; } /// <summary> /// Gets the details of the Search service status. /// </summary> [JsonProperty(PropertyName = "properties.statusDetails")] public string StatusDetails { get; private set; } /// <summary> /// Gets the state of the last provisioning operation performed on the /// Search service. Provisioning is an intermediate state that occurs /// while service capacity is being established. After capacity is set /// up, provisioningState changes to either 'succeeded' or 'failed'. /// Client applications can poll provisioning status (the recommended /// polling interval is from 30 seconds to one minute) by using the Get /// Search Service operation to see when an operation is completed. If /// you are using the free service, this value tends to come back as /// 'succeeded' directly in the call to Create Search service. This is /// because the free service uses capacity that is already set up. /// Possible values include: 'succeeded', 'provisioning', 'failed' /// </summary> [JsonProperty(PropertyName = "properties.provisioningState")] public ProvisioningState? ProvisioningState { get; private set; } /// <summary> /// Gets or sets the SKU of the Search Service, which determines price /// tier and capacity limits. /// </summary> [JsonProperty(PropertyName = "sku")] public Sku Sku { get; set; } /// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public override void Validate() { if (Sku == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Sku"); } if (ReplicaCount > 12) { throw new ValidationException(ValidationRules.InclusiveMaximum, "ReplicaCount", 12); } if (ReplicaCount < 1) { throw new ValidationException(ValidationRules.InclusiveMinimum, "ReplicaCount", 1); } if (PartitionCount > 12) { throw new ValidationException(ValidationRules.InclusiveMaximum, "PartitionCount", 12); } if (PartitionCount < 1) { throw new ValidationException(ValidationRules.InclusiveMinimum, "PartitionCount", 1); } } } }
using FluentNHibernate.MappingModel; using FluentNHibernate.MappingModel.Collections; using FluentNHibernate.MappingModel.Output; using FluentNHibernate.Testing.Testing; using NUnit.Framework; namespace FluentNHibernate.Testing.MappingModel.Output { [TestFixture] public class XmlMapWriterTester { private IXmlWriter<CollectionMapping> writer; [SetUp] public void GetWriterFromContainer() { var container = new XmlWriterContainer(); writer = container.Resolve<IXmlWriter<CollectionMapping>>(); } [Test] public void ShouldWriteAccessAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Access, "acc").MapsToAttribute("access"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteBatchSizeAttribute() { var testHelper = Helper(); testHelper.Check(x => x.BatchSize, 10).MapsToAttribute("batch-size"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteCascadeAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Cascade, "all").MapsToAttribute("cascade"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteCheckAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Check, "ck").MapsToAttribute("check"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteCollectionTypeAttribute() { var testHelper = Helper(); testHelper.Check(x => x.CollectionType, new TypeReference("type")).MapsToAttribute("collection-type"); testHelper.VerifyAll(writer); } [Test] public void ShouldNotWriteCollectionTypeWhenEmpty() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.CollectionType, Layer.Defaults, TypeReference.Empty); writer.VerifyXml(mapping) .DoesntHaveAttribute("collection-type"); } [Test] public void ShouldWriteFetchAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Fetch, "fetch").MapsToAttribute("fetch"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteGenericAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Generic, true).MapsToAttribute("generic"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteInverseAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Inverse, true).MapsToAttribute("inverse"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteLazyAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Lazy, Lazy.True).MapsToAttribute("lazy"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteNameAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Name, "name").MapsToAttribute("name"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteOptimisticLockAttribute() { var testHelper = Helper(); testHelper.Check(x => x.OptimisticLock, true).MapsToAttribute("optimistic-lock"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteOrderByAttribute() { var testHelper = Helper(); testHelper.Check(x => x.OrderBy, "ord").MapsToAttribute("order-by"); testHelper.VerifyAll(writer); } [Test] public void ShouldWritePersisterAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Persister, new TypeReference(typeof(string))).MapsToAttribute("persister"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteSchemaAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Schema, "dbo").MapsToAttribute("schema"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteTableAttribute() { var testHelper = Helper(); testHelper.Check(x => x.TableName, "table").MapsToAttribute("table"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteWhereAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Where, "x = 1").MapsToAttribute("where"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteSortAttribute() { var testHelper = Helper(); testHelper.Check(x => x.Sort, "asc").MapsToAttribute("sort"); testHelper.VerifyAll(writer); } [Test] public void ShouldWriteKey() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.Key, Layer.Defaults, new KeyMapping()); writer.VerifyXml(mapping) .Element("key").Exists(); } [Test] public void ShouldWriteRelationshipElement() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.Relationship, Layer.Defaults, new OneToManyMapping()); writer.VerifyXml(mapping) .Element("one-to-many").Exists(); } [Test] public void ShouldWriteIndexElement() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.Index, Layer.Defaults, new IndexMapping()); writer.VerifyXml(mapping) .Element("index").Exists(); } [Test] public void ShouldWriteCacheElement() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.Cache, Layer.Defaults, new CacheMapping()); writer.VerifyXml(mapping) .Element("cache").Exists(); } [Test] public void ShouldWriteElement() { var mapping = CollectionMapping.Map(); mapping.Set(x => x.Element, Layer.Defaults, new ElementMapping()); writer.VerifyXml(mapping) .Element("element").Exists(); } static XmlWriterTestHelper<CollectionMapping> Helper() { var helper = new XmlWriterTestHelper<CollectionMapping>(); helper.CreateInstance(CollectionMapping.Map); return helper; } } }
namespace MobileApp.Core.Services.HttpService { #region using System; using System.Collections.Generic; using Cirrious.CrossCore; using MobileApp.Core.Converters; using MobileApp.Core.Models; using MobileApp.Core.Properties; using MobileApp.Core.Services.UtilityService; using MobileApp.Core.Services.UtilityService.Logger; using Newtonsoft.Json; #endregion /// <summary> /// The Mediation Server's API provider. /// </summary> public class ApiInteraction { #region Static Fields private static readonly object IsBusy = new object(); #endregion #region Fields private readonly Action<Exception> externalError; private readonly IDictionary<string, object> externalParams; private readonly Action<object> externalSuccess; private readonly ApiMethodType externalType; private readonly DateTime timer = DateTime.Now; #endregion #region Constructors and Destructors private ApiInteraction(ApiMethodType type, Action<object> success = null, Action<Exception> error = null, IDictionary<string, object> parametrs = null) { this.externalSuccess = o => { Mvx.Resolve<ILogger>().Debug(string.Format("API {0}: Ok; duration: {2} param: {1}", type, parametrs, (DateTime.Now - timer).TotalMilliseconds ), this); success(o); }; this.externalError = o => { Mvx.Resolve<ILogger>().Debug(string.Format("API {0}: Error; duration: {2} param: {1}; Error: {3}", type, parametrs, (DateTime.Now - timer).TotalMilliseconds, o), this); error(o); }; this.externalParams = parametrs; this.externalType = type; } #endregion #region Public Properties public static CredentialsModel Authentication { private get; set; } #endregion #region Public Methods and Operators /// <summary> /// Create request to server /// </summary> /// <param name="type"> /// Type of operation /// </param> /// <param name="success"> /// Success callback /// </param> /// <param name="error"> /// InternalError callback /// </param> /// <param name="parameters"> /// Some parameters needs for success <see cref="ApiInteraction" /> /// </param> /// <returns> /// The <see cref="ApiInteraction" />. /// </returns> public static ApiInteraction Create(ApiMethodType type, IDictionary<string, object> parameters, Action<object> success = null, Action<Exception> error = null) { var item = new ApiInteraction(type, success, error, parameters); Request(error, item, parameters); return item; } /// <summary> /// Create request to server /// </summary> /// <param name="type"> /// Type of operation /// </param> /// <param name="success"> /// Success callback /// </param> /// <param name="error"> /// InternalError callback /// </param> /// <param name="data"></param> /// <returns> /// The <see cref="ApiInteraction" />. /// </returns> public static ApiInteraction Create(ApiMethodType type, Action<object> success = null, Action<Exception> error = null, object data = null) { var parameters = data.ToAbstractPropertyDictionary(); var item = new ApiInteraction(type, success, error, parameters); Request(error, item, parameters); return item; } public static ApiInteraction Get(Action<object> success, Action<Exception> error = null, Type type = null) { return GetApiService(success, error, type); } #endregion #region Methods private static ApiInteraction GetApiService(Action<object> success, Action<Exception> error, Type type, IDictionary<string, object> parameters = null) { ApiInteraction item; switch (type.Name) { case "Contact": item = new ApiInteraction(ApiMethodType.GetContacts, success, error); break; case "Device": item = parameters != null ? new ApiInteraction(ApiMethodType.UpdateDeviceState, success, error, parameters) : new ApiInteraction(ApiMethodType.GetDevices, success, error); break; case "Queue": item = new ApiInteraction(ApiMethodType.GetQueues, success, error); break; case "Number": item = new ApiInteraction(ApiMethodType.GetNumbers, success, error); break; case "Voicemail": item = new ApiInteraction(ApiMethodType.GetVoicemails, success, error); break; default: return null; } Request(error, item); return item; } private static void Request(Action<Exception> error, ApiInteraction item, IDictionary<string, object> parameters = null) { lock (IsBusy) { var apiMethod = ApiMethodsCollections.GetMethod(item.externalType); apiMethod.Request(apiMethod.MediaRequest ? item.externalSuccess : item.InternalSuccess, error, parameters); } } private void InternalSuccess(object obj) { try { var responce = obj as Responce ?? JsonConvert.DeserializeObject<Responce>(obj.ToString()); if (responce.Result) { this.externalSuccess(obj); } else { if (responce.ErrorCode == 401 && Authentication.IsValid()) { Mvx.Resolve<ILogger>().Debug(string.Format("{0} - {1} - 401 Session Timeout. Reconnecting... ", this.GetType().Name, this.externalType), this); ApiMethodsCollections.GetMethod(ApiMethodType.GetAuth).Request( a => { Responce resp; try { resp = JsonConvert.DeserializeObject<Responce>(a.ToString()); } catch (Exception e) { Mvx.Resolve<ILogger>().Error(e, this); return; } if (resp.Result) { ApiMethodsCollections.GetMethod(this.externalType).Request(this.InternalSuccess, this.externalError, this.externalParams); } else { Mvx.Resolve<ILogger>().Debug(string.Format("Reconnection unsuccessful"), this); Mvx.Resolve<INavigateMyselfService>().Breakout("Error authorization"); } }, exception => Mvx.Resolve<INavigateMyselfService>().Breakout("Reconnection unsuccessful"), new Dictionary<string, object> { { "INSTANCE", Authentication.Instance }, { "PIN", Authentication.Pin }, { "USERID", Authentication.UserId }, { "VERSION", Resources.BuildVersion } }); } else { this.externalSuccess(responce); } } } catch (Exception exc) { this.externalError(exc); } } #endregion } }
using System.Collections.Generic; using Lucene.Net.Documents; using Lucene.Net.Index; namespace Lucene.Net.Search.Similarities { using NUnit.Framework; using Directory = Lucene.Net.Store.Directory; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Document = Documents.Document; using Field = Field; using FieldType = FieldType; using IndexReader = Lucene.Net.Index.IndexReader; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using SpanOrQuery = Lucene.Net.Search.Spans.SpanOrQuery; using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery; using Term = Lucene.Net.Index.Term; using TextField = TextField; /// <summary> /// Tests against all the similarities we have /// </summary> [TestFixture] public class TestSimilarity2 : LuceneTestCase { internal IList<Similarity> Sims; [SetUp] public override void SetUp() { base.SetUp(); Sims = new List<Similarity>(); Sims.Add(new DefaultSimilarity()); Sims.Add(new BM25Similarity()); // TODO: not great that we dup this all with TestSimilarityBase foreach (BasicModel basicModel in TestSimilarityBase.BASIC_MODELS) { foreach (AfterEffect afterEffect in TestSimilarityBase.AFTER_EFFECTS) { foreach (Normalization normalization in TestSimilarityBase.NORMALIZATIONS) { Sims.Add(new DFRSimilarity(basicModel, afterEffect, normalization)); } } } foreach (Distribution distribution in TestSimilarityBase.DISTRIBUTIONS) { foreach (Lambda lambda in TestSimilarityBase.LAMBDAS) { foreach (Normalization normalization in TestSimilarityBase.NORMALIZATIONS) { Sims.Add(new IBSimilarity(distribution, lambda, normalization)); } } } Sims.Add(new LMDirichletSimilarity()); Sims.Add(new LMJelinekMercerSimilarity(0.1f)); Sims.Add(new LMJelinekMercerSimilarity(0.7f)); } /// <summary> /// because of stupid things like querynorm, its possible we computeStats on a field that doesnt exist at all /// test this against a totally empty index, to make sure sims handle it /// </summary> [Test] public virtual void TestEmptyIndex() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; Assert.AreEqual(0, @is.Search(new TermQuery(new Term("foo", "bar")), 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// similar to the above, but ORs the query with a real field </summary> [Test] public virtual void TestEmptyField() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewTextField("foo", "bar", Field.Store.NO)); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; BooleanQuery query = new BooleanQuery(true); query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD); query.Add(new TermQuery(new Term("bar", "baz")), Occur.SHOULD); Assert.AreEqual(1, @is.Search(query, 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// similar to the above, however the field exists, but we query with a term that doesnt exist too </summary> [Test] public virtual void TestEmptyTerm() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewTextField("foo", "bar", Field.Store.NO)); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; BooleanQuery query = new BooleanQuery(true); query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD); query.Add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD); Assert.AreEqual(1, @is.Search(query, 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// make sure we can retrieve when norms are disabled </summary> [Test] public virtual void TestNoNorms() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.OmitNorms = true; ft.Freeze(); doc.Add(NewField("foo", "bar", ft)); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; BooleanQuery query = new BooleanQuery(true); query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD); Assert.AreEqual(1, @is.Search(query, 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// make sure all sims work if TF is omitted </summary> [Test] public virtual void TestOmitTF() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.IndexOptions = IndexOptions.DOCS_ONLY; ft.Freeze(); Field f = NewField("foo", "bar", ft); doc.Add(f); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; BooleanQuery query = new BooleanQuery(true); query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD); Assert.AreEqual(1, @is.Search(query, 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// make sure all sims work if TF and norms is omitted </summary> [Test] public virtual void TestOmitTFAndNorms() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.IndexOptions = IndexOptions.DOCS_ONLY; ft.OmitNorms = true; ft.Freeze(); Field f = NewField("foo", "bar", ft); doc.Add(f); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; BooleanQuery query = new BooleanQuery(true); query.Add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD); Assert.AreEqual(1, @is.Search(query, 10).TotalHits); } ir.Dispose(); dir.Dispose(); } /// <summary> /// make sure all sims work with spanOR(termX, termY) where termY does not exist </summary> [Test] public virtual void TestCrazySpans() { // The problem: "normal" lucene queries create scorers, returning null if terms dont exist // this means they never score a term that does not exist. // however with spans, there is only one scorer for the whole hierarchy: // inner queries are not real queries, their boosts are ignored, etc. Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); doc.Add(NewField("foo", "bar", ft)); iw.AddDocument(doc); IndexReader ir = iw.Reader; iw.Dispose(); IndexSearcher @is = NewSearcher(ir); foreach (Similarity sim in Sims) { @is.Similarity = sim; SpanTermQuery s1 = new SpanTermQuery(new Term("foo", "bar")); SpanTermQuery s2 = new SpanTermQuery(new Term("foo", "baz")); Query query = new SpanOrQuery(s1, s2); TopDocs td = @is.Search(query, 10); Assert.AreEqual(1, td.TotalHits); float score = td.ScoreDocs[0].Score; Assert.IsTrue(score >= 0.0f); Assert.IsFalse(float.IsInfinity(score), "inf score for " + sim); } ir.Dispose(); dir.Dispose(); } } }
using System; using System.Net; using System.Net.Sockets; using System.Text; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Net.Http; using System.Net.Http.Headers; using System.Collections.Specialized; namespace ContactRoute.Client { internal class HttpNetworkClient : ContactRoute.Client.IHttpNetworkClient { private ServicePoint m_ServicePoint; protected static char[] WhiteSpace = new char[] { ' ', '\t' }; protected static char[] HeaderSeparator = new char[] { ':' }; protected static char[] ListSeparator = new char[] { ';' }; protected static char[] ValueSeparator = new char[] { '=' }; public static int PreferedBlockSize = 1400; public static int DefaultTimeout = 30000; private static System.Reflection.AssemblyName LinkAssemblyName; private static string DefaultUserAgent; private IPAddress[] m_DnsServers; private string m_Host; private string m_ServerName; private int m_ServerPort; private Uri m_ServerUri; private IPEndPoint m_ServerEndPoint; private IPAddress[] m_ServerAddresses; private CookieContainer m_Cookies = new CookieContainer(5); private string m_UserAgent = DefaultUserAgent; private int m_Timeout = DefaultTimeout; private byte[] m_Buffer = new byte[PreferedBlockSize]; private int m_BufferPos = 0; private NameValueCollection m_RequestHeaders = new NameValueCollection(10); private NameValueCollection m_ResponseHeaders = new NameValueCollection(50); private int m_ContentLength; private string m_ContentType; private string m_ContentEncoding; private int m_StatusCode = -1; private bool m_Aborted = false; public bool TraceNetwork { get; set; } public IPEndPoint ClientEndpoint { get; private set; } static HttpNetworkClient() { LinkAssemblyName = System.Reflection.Assembly.GetExecutingAssembly().GetName(); DefaultUserAgent = string.Format("Nixxis/{0} {1}.{2}.{3}", LinkAssemblyName.Name, LinkAssemblyName.Version.Major, LinkAssemblyName.Version.Minor, LinkAssemblyName.Version.Build); } public HttpNetworkClient(Uri uri) { m_ServerPort = uri.Port; if (m_ServerPort == 0) m_ServerPort = 80; if (m_ServerPort == 80) { m_Host = uri.Host; } else { m_Host = string.Concat(uri.Host, ':', uri.Port.ToString()); } m_ServerName = uri.Host; m_ServerPort = uri.Port; m_ServerUri = uri; } public CookieContainer Cookies { get { return m_Cookies; } } public string UserAgent { get { return m_UserAgent; } set { m_UserAgent = value; } } public int Timeout { get { return m_Timeout; } set { m_Timeout = value; } } public int ContentLength { get { return m_ContentLength; } } public string ContentType { get { return m_ContentType; } } public int StatusCode { get { return m_StatusCode; } } public byte[] RawData { get { return m_Buffer; } } public bool Connect() { //TODO: manage servicepoint here return true; } protected bool Send(string method, string query, byte[] postData, int offset, int count) { int Retry = 5; m_Aborted = false; lock (this) { StringBuilder SB = null; if (TraceNetwork) { SB = new StringBuilder(); SB.Append(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff")); SB.Append(" ").Append(method).Append(" "); SB.Append(query); SB.AppendLine(); if (count > 0) SB.Append("\t").AppendLine(Encoding.UTF8.GetString(postData, offset, count).Replace("\r", "").Replace("\n", "\r\n\t")); } while (Retry-- > 0) { HttpWebRequest Request = WebRequest.Create(new Uri(m_ServerUri, query)) as HttpWebRequest; Request.Method = method; Request.ProtocolVersion = new Version(1, 1); Request.UserAgent = m_UserAgent; Request.Headers.Add(m_RequestHeaders); Request.CookieContainer = m_Cookies; try { if (count > 0) { Request.ContentType = m_ContentType; Request.ContentLength = count; using (Stream RS = Request.GetRequestStream()) { RS.Write(postData, offset, count); } } using (WebResponse Response = Request.GetResponse()) { HttpWebResponse HttpResponse = Response as HttpWebResponse; m_ContentLength = (int)Response.ContentLength; m_ContentType = Response.ContentType; if(HttpResponse != null) { m_StatusCode = (int)HttpResponse.StatusCode; } if(m_ContentLength > 0) { if(m_Buffer.Length < m_ContentLength) m_Buffer = new byte[m_ContentLength]; m_BufferPos = 0; using(Stream ResponseStream = Response.GetResponseStream()) { while(m_BufferPos < m_ContentLength) m_BufferPos += ResponseStream.Read(m_Buffer, m_BufferPos, m_ContentLength - m_BufferPos); } } if (TraceNetwork) { SB.Append(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff")).Append(" HTTP ").Append(m_StatusCode).Append(" ").Append(m_ContentLength); SB.AppendLine(); if (m_ContentLength > 0) { SB.Append("\t").AppendLine(Encoding.UTF8.GetString(m_Buffer, 0, m_ContentLength).Replace("\r", "").Replace("\n", "\r\n\t")); } Trace.WriteLine(SB.ToString()); } return true; } } catch (Exception Ex) { Trace.WriteLine(string.Concat(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff"), " ", Ex.ToString()), "HttpClientSocket"); } } return false; } } public bool Get(string query) { m_ContentType = string.Empty; return Send("GET", query, null, 0, 0); } public bool Post(string query, byte[] postData) { m_ContentType = "application/octet-stream"; return Send("POST", query, postData, 0, postData.Length); } public bool Post(string query, byte[] postData, string contentType) { m_ContentType = contentType; return Send("POST", query, postData, 0, postData.Length); } public bool Post(string query, byte[] postData, int offset, int count) { m_ContentType = "application/octet-stream"; return Send("POST", query, postData, offset, count); } public bool Post(string query, byte[] postData, int offset, int count, string contentType) { m_ContentType = contentType; return Send("POST", query, postData, offset, count); } public void Abort() { m_Aborted = true; } public void ClearCookies() { m_Cookies = new CookieContainer(10); } } internal class HttpNetworkSocket : IHttpNetworkClient { protected static char[] WhiteSpace = new char[] { ' ', '\t' }; protected static char[] HeaderSeparator = new char[] { ':' }; protected static char[] ListSeparator = new char[] { ';' }; protected static char[] ValueSeparator = new char[] { '=' }; public static int PreferedBlockSize = 1400; public static int DefaultTimeout = 30000; public static string DefaultUserAgent = "Nixxis/HttpLink.2.0"; private IPAddress[] m_DnsServers; private string m_Host; private string m_ServerName; private Uri m_Uri; private int m_ServerPort; private IPEndPoint m_ServerEndPoint; private IPAddress[] m_ServerAddresses; private CookieContainer m_Cookies = new CookieContainer(5); private Socket m_Socket; private string m_UserAgent = DefaultUserAgent; private int m_Timeout = DefaultTimeout; private byte[] m_Buffer = new byte[PreferedBlockSize]; private int m_BufferPos = 0; private List<string> m_RequestHeaders = new List<string>(); private SortedDictionary<string, string> m_ResponseHeaders = new SortedDictionary<string, string>(); private int m_ContentLength; private string m_ContentType; private string m_ContentEncoding; private int m_StatusCode = -1; private bool m_Aborted = false; public bool TraceNetwork { get; set; } public IPEndPoint ClientEndpoint { get; private set; } public HttpNetworkSocket(string host) : this(host, null) { } public HttpNetworkSocket(string host, IPAddress[] dnsServers) { m_Host = host; m_DnsServers = dnsServers; m_Uri = new Uri("http://" + m_Host); int Sep = m_Host.IndexOf(':'); if (Sep >= 0) { m_ServerName = m_Host.Substring(0, Sep); m_ServerPort = Convert.ToInt32(m_Host.Substring(Sep + 1)); } else { m_ServerName = m_Host; m_ServerPort = 80; } AddDefaultHeaders(); ResolveServerName(); } public HttpNetworkSocket(Uri uri) : this(uri, null) { } public HttpNetworkSocket(Uri uri, IPAddress[] dnsServers) { m_Uri = uri; m_ServerPort = uri.Port; if (m_ServerPort == 0) m_ServerPort = 80; if (m_ServerPort == 80) { m_Host = uri.Host; } else { m_Host = string.Concat(uri.Host, ':', uri.Port.ToString()); } m_DnsServers = dnsServers; m_ServerName = uri.Host; m_ServerPort = uri.Port; AddDefaultHeaders(); ResolveServerName(); } private void AddDefaultHeaders() { m_RequestHeaders.Add(string.Concat("host: ", m_Host, "\r\n")); m_RequestHeaders.Add(string.Concat("user-agent: ", m_UserAgent, "\r\n")); } private void ResolveServerName() { IPAddress Addr; if (IPAddress.TryParse(m_ServerName, out Addr)) { m_ServerAddresses = new IPAddress[] { Addr }; } else { if (m_DnsServers != null && m_DnsServers.Length > 0) { Nixxis.Client.DnsClient Client = new Nixxis.Client.DnsClient(m_DnsServers); Nixxis.Client.DnsBase[] Results = Client.ExecuteRequest(new Nixxis.Client.DnsRequest(m_ServerName, Nixxis.Client.DnsResourceType.A, true), 10000); if (Results.Length > 0) { List<IPAddress> Adresses = new List<IPAddress>(); for (int i = 0; i < Results.Length; i++) { if (Results[0] is Nixxis.Client.DnsA) { Adresses.Add(((Nixxis.Client.DnsA)Results[0]).Address); } } m_ServerAddresses = Adresses.ToArray(); } } else { m_ServerAddresses = Dns.GetHostAddresses(m_ServerName); } } } public CookieContainer Cookies { get { return m_Cookies; } } public string UserAgent { get { return m_UserAgent; } set { lock (this) { m_UserAgent = value; for (int i = 0; i < m_RequestHeaders.Count; i++) { if (m_RequestHeaders[i].StartsWith("user-agent:", StringComparison.OrdinalIgnoreCase)) { m_RequestHeaders[i] = string.Concat("user-agent: ", m_UserAgent, "\r\n"); break; } } } } } public int Timeout { get { return m_Timeout; } set { m_Timeout = value; } } public int ContentLength { get { return m_ContentLength; } } public string ContentType { get { return m_ContentType; } } public int StatusCode { get { return m_StatusCode; } } public byte[] RawData { get { return m_Buffer; } } public bool Connect() { lock (this) { if (TraceNetwork) Trace.WriteLine(string.Concat(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff"), " Opening socket connection")); if (m_Socket != null) { if (TraceNetwork) Trace.WriteLine("\t Closing existing socket"); try { m_Socket.Close(); } catch { } } ClientEndpoint = null; m_Socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); try { m_Socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.SendTimeout, 1000); m_Socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.TypeOfService, 0x03); m_Socket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.NoDelay, true); } catch { } try { if (TraceNetwork) Trace.WriteLine(string.Concat("\t Connecting ", m_ServerAddresses[0].ToString(), ":", m_ServerPort.ToString())); m_Socket.Connect(m_ServerAddresses, m_ServerPort); } catch { m_Socket = null; } if (m_Socket != null) { if (TraceNetwork) Trace.WriteLine("\t Connected"); ClientEndpoint = m_Socket.LocalEndPoint as IPEndPoint; return true; } if (TraceNetwork) Trace.WriteLine(" Failed"); return false; } } private void Add(string text, Encoding encoding) { int Size = encoding.GetByteCount(text); if (m_BufferPos + Size > m_Buffer.Length) { byte[] NewBuffer = new byte[(((m_BufferPos + Size) / 256) + 1) * 256]; if (m_BufferPos > 0) Buffer.BlockCopy(m_Buffer, 0, NewBuffer, 0, m_BufferPos); m_Buffer = NewBuffer; } m_BufferPos += encoding.GetBytes(text, 0, text.Length, m_Buffer, m_BufferPos); } private void Add(string text) { Add(text, Encoding.UTF8); } private void AddHeaders(string path) { for (int i = 0; i < m_RequestHeaders.Count; i++) { Add(m_RequestHeaders[i], Encoding.UTF8); } Add("accept-encoding: deflate, gzip\r\n"); if (m_Cookies.Count > 0) { bool First = true; CookieCollection Cookies = m_Cookies.GetCookies(m_Uri); for (int i = 0; i < Cookies.Count; i++) { Cookie C = Cookies[i]; if (path.StartsWith(C.Path, StringComparison.OrdinalIgnoreCase)) { if (!First) { Add(";", Encoding.UTF8); } else { Add("cookie: ", Encoding.UTF8); First = false; } Add(C.Name, Encoding.UTF8); Add("=", Encoding.UTF8); Add(C.Value, Encoding.UTF8); } } if (!First) Add("\r\n", Encoding.UTF8); } } private void OnResponseHeader(string name, string value) { m_ResponseHeaders.Add(name, value); try { if (name.Equals("content-length", StringComparison.OrdinalIgnoreCase)) { m_ContentLength = Convert.ToInt32(value); } else if (name.Equals("set-cookie", StringComparison.OrdinalIgnoreCase)) { string[] Parts = value.Split(ListSeparator, StringSplitOptions.RemoveEmptyEntries); string[] NameValue = Parts[0].Split(ValueSeparator, 2); string CookieName = NameValue[0].Trim(); string CookieValue = NameValue[1].Trim(); string CookiePath = "/"; for (int i = 1; i < Parts.Length; i++) { string[] PartValue = Parts[i].Split(ValueSeparator, 2); if (PartValue[0].Equals("path", StringComparison.OrdinalIgnoreCase)) { CookiePath = PartValue[1]; } } m_Cookies.SetCookies(m_Uri, value); } else if (name.Equals("content-type", StringComparison.OrdinalIgnoreCase)) { m_ContentType = value; } else if (name.Equals("content-encoding", StringComparison.OrdinalIgnoreCase)) { m_ContentEncoding = value; } } catch { } } private bool GetResponse() { int Retry = 5; int Available; bool Complete = false; int GetRawData = -1; string CurHeader = null; string CurValue = null; m_StatusCode = -1; m_BufferPos = 0; m_ResponseHeaders.Clear(); m_ContentLength = 0; m_ContentType = string.Empty; m_ContentEncoding = null; while (!Complete) { for(int i = 0; i < 100; i++) { if (m_Socket.Poll(m_Timeout * 10, SelectMode.SelectRead) || m_Aborted) break; } if (m_Aborted || (Available = m_Socket.Available) <= 0) { try { m_Socket.Close(); } catch { } m_Socket = null; return false; } if (m_BufferPos + Available > m_Buffer.Length) { byte[] NewBuffer = new byte[(((m_BufferPos + Available) / 256) + 1) * 256]; if (m_BufferPos > 0) Buffer.BlockCopy(m_Buffer, 0, NewBuffer, 0, m_BufferPos); m_Buffer = NewBuffer; } int Received = m_Socket.Receive(m_Buffer, m_BufferPos, Available, SocketFlags.None); if (Received == 0) { try { m_Socket.Close(); } catch { } m_Socket = null; return false; } m_BufferPos += Received; if (GetRawData < 0) { byte LastSep = 0; int StartPos = 0; for (int i = 0; i < m_BufferPos; i++) { if (m_Buffer[i] == '\r' || m_Buffer[i] == '\n') { if (LastSep != 0 && m_Buffer[i] != LastSep) { LastSep = 0; StartPos++; continue; } LastSep = m_Buffer[i]; string TextLine = Encoding.UTF8.GetString(m_Buffer, StartPos, i - StartPos); i++; if (i < m_BufferPos && (m_Buffer[i] == '\r' || m_Buffer[i] == '\n') && m_Buffer[i] != LastSep) { i++; LastSep = 0; } if (m_BufferPos > i) { m_BufferPos -= i; Buffer.BlockCopy(m_Buffer, i, m_Buffer, 0, m_BufferPos); i = -1; } else { m_BufferPos = 0; } if (TextLine.Length == 0) { if (CurHeader != null) { OnResponseHeader(CurHeader, CurValue); CurHeader = null; CurValue = null; } GetRawData = m_ContentLength; break; } else { if (StatusCode >= 200) { if (TextLine[0] == ' ' || TextLine[0] == '\t') { if (CurValue != null) CurValue = string.Concat(CurValue, TextLine.TrimStart(WhiteSpace)); else CurValue = TextLine.TrimStart(WhiteSpace); } else { if (CurHeader != null) { OnResponseHeader(CurHeader, CurValue); } string[] Header = TextLine.Split(HeaderSeparator, 2); CurHeader = Header[0]; CurValue = (Header.Length > 1) ? Header[1].TrimStart(WhiteSpace) : string.Empty; } } else { if (TextLine.StartsWith("HTTP/1.1", StringComparison.OrdinalIgnoreCase)) { string[] Parts = TextLine.Split(WhiteSpace, StringSplitOptions.RemoveEmptyEntries); if (Parts.Length >= 2) { m_StatusCode = Convert.ToInt32(Parts[1]); } } else { return false; } } } } } } if (GetRawData >= 0) { Complete = (m_BufferPos >= GetRawData); } } if (StatusCode >= 300) { if (m_Socket != null) { try { m_Socket.Close(); } catch { } m_Socket = null; } } if (GetRawData > 0) { if (!string.IsNullOrEmpty(m_ContentEncoding)) { Stream RawStream = null; Stream EncodingStream = null; if (m_ContentEncoding.Equals("deflate", StringComparison.OrdinalIgnoreCase)) { byte[] RawBuffer = new byte[GetRawData]; Buffer.BlockCopy(m_Buffer, 0, RawBuffer, 0, GetRawData); RawStream = new MemoryStream(RawBuffer, 0, GetRawData, false); RawStream.Seek(0, SeekOrigin.Begin); EncodingStream = new System.IO.Compression.DeflateStream(RawStream, System.IO.Compression.CompressionMode.Decompress); } else if (m_ContentEncoding.Equals("gzip", StringComparison.OrdinalIgnoreCase)) { byte[] RawBuffer = new byte[GetRawData]; Buffer.BlockCopy(m_Buffer, 0, RawBuffer, 0, GetRawData); RawStream = new MemoryStream(RawBuffer, 0, GetRawData, false); RawStream.Seek(0, SeekOrigin.Begin); EncodingStream = new System.IO.Compression.GZipStream(RawStream, System.IO.Compression.CompressionMode.Decompress); } if (EncodingStream != null) { int Read; m_BufferPos = 0; while ((Read = EncodingStream.Read(m_Buffer, m_BufferPos, m_Buffer.Length - m_BufferPos)) > 0) { m_BufferPos += Read; if (m_BufferPos == m_Buffer.Length) { byte[] NewBuffer = new byte[m_BufferPos + 512]; Buffer.BlockCopy(m_Buffer, 0, NewBuffer, 0, m_BufferPos); m_Buffer = NewBuffer; } } m_ContentLength = m_BufferPos; EncodingStream.Close(); } } } return Complete; } protected bool Send(string method, string query, byte[] postData, int offset, int count) { int Retry = 5; m_Aborted = false; lock (this) { StringBuilder SB = null; if (TraceNetwork) { SB = new StringBuilder(); SB.Append(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff")); SB.Append(" ").Append(method).Append(" "); SB.Append(query); SB.AppendLine(); if (count > 0) SB.Append("\t").AppendLine(Encoding.UTF8.GetString(postData, offset, count).Replace("\r", "").Replace("\n", "\r\n\t")); } while (Retry-- > 0) { m_BufferPos = 0; Add(method, Encoding.UTF8); Add(" ", Encoding.UTF8); Add(query, Encoding.UTF8); Add(" HTTP/1.1\r\n", Encoding.UTF8); AddHeaders(query); if (count > 0) { Add("content-type: "); Add(m_ContentType); Add(string.Format("\r\ncontent-length: {0}\r\n", count)); } Add("\r\n", Encoding.UTF8); int SameSend = 0; if (count > 0 && m_BufferPos < PreferedBlockSize) { SameSend = PreferedBlockSize - m_BufferPos; if (SameSend > count) SameSend = count; Buffer.BlockCopy(postData, offset, m_Buffer, m_BufferPos, SameSend); m_BufferPos += SameSend; } int BufferSent = 0; int PostSent = SameSend; if (m_Socket == null) { if (!Connect()) continue; } while (BufferSent < m_BufferPos) { int Sent = 0; try { Sent = m_Socket.Send(m_Buffer, BufferSent, m_BufferPos - BufferSent, SocketFlags.None); } catch (SocketException SockEx) { Trace.WriteLine(string.Concat(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff"), " ", SockEx.ToString()), "HttpClientSocket"); Sent = 0; } catch(Exception Ex) { Trace.WriteLine(string.Concat(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff"), " ", Ex.ToString()), "HttpClientSocket"); } if (m_Aborted) { try { m_Socket.Close(); } catch { } m_Socket = null; return false; } if (Sent == 0) { try { m_Socket.Close(); } catch { } m_Socket = null; break; } else { BufferSent += Sent; } } if (BufferSent < m_BufferPos) continue; while (PostSent < count) { int Sent = 0; int ToSend = count - PostSent; if (ToSend > PreferedBlockSize) ToSend = PreferedBlockSize; try { Sent = m_Socket.Send(postData, offset + PostSent, ToSend, SocketFlags.None); } catch (SocketException SockEx) { Trace.WriteLine(string.Concat(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff"), " ", SockEx.ToString()), "HttpClientSocket"); Sent = 0; } catch { } if (m_Aborted) { try { m_Socket.Close(); } catch { } m_Socket = null; return false; } if (Sent == 0) { try { m_Socket.Close(); } catch { } m_Socket = null; break; } else { PostSent += Sent; } } if (PostSent < count) continue; if (GetResponse()) { if (TraceNetwork) { SB.Append(DateTime.Now.ToString("dd/MM/yyyy HH:mm:ss.fff")).Append(" HTTP ").Append(m_StatusCode).Append(" ").Append(m_ContentLength); SB.AppendLine(); if (m_ContentLength > 0) { SB.Append("\t").AppendLine(Encoding.UTF8.GetString(m_Buffer, 0, m_ContentLength).Replace("\r", "").Replace("\n", "\r\n\t")); } Trace.WriteLine(SB.ToString()); } return true; } if (m_Socket != null) { try { m_Socket.Close(); } catch { } m_Socket = null; } } return false; } } public bool Get(string query) { m_ContentType = string.Empty; return Send("GET", query, null, 0, 0); } public bool Post(string query, byte[] postData) { m_ContentType = "application/octet-stream"; return Send("POST", query, postData, 0, postData.Length); } public bool Post(string query, byte[] postData, string contentType) { m_ContentType = contentType; return Send("POST", query, postData, 0, postData.Length); } public bool Post(string query, byte[] postData, int offset, int count) { m_ContentType = "application/octet-stream"; return Send("POST", query, postData, offset, count); } public bool Post(string query, byte[] postData, int offset, int count, string contentType) { m_ContentType = contentType; return Send("POST", query, postData, offset, count); } public void Abort() { m_Aborted = true; } public void ClearCookies() { m_Cookies = new CookieContainer(5); } } }
//------------------------------------------------------------------------------ // <copyright file="WebPermission.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Net { using System.Collections; using System.Security; using System.Security.Permissions; using System.Text.RegularExpressions; using System.Globalization; using System.Runtime.Serialization; //NOTE: While WebPermissionAttribute resides in System.DLL, // no classes from that DLL are able to make declarative usage of WebPermission. // THE syntax of this attribute is as followed // [WebPermission(SecurityAction.Assert, Connect="http://hostname/path/url", Accept="http://localhost/path/url")] // [WebPermission(SecurityAction.Assert, ConnectPattern="http://hostname/www\.microsoft\.*/url/*", AcceptPattern="http://localhost/*")] // WHERE: //======= // - 'Connect' and 'Accept' keywords allow you to specify the final URI // - 'ConnectPattern' and 'AcceptPattern' keywords allow you to specify a set of URI in escaped Regex form // - They take '.*' as special "everything" indicators, which are fast-pathed. [ AttributeUsage( AttributeTargets.Method | AttributeTargets.Constructor | AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Assembly, AllowMultiple = true, Inherited = false )] [Serializable()] sealed public class WebPermissionAttribute: CodeAccessSecurityAttribute { private object m_accept = null; private object m_connect = null; public WebPermissionAttribute( SecurityAction action ): base( action ) { } public string Connect { get { return m_connect as string;} set { if (m_connect != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "Connect", value), "value"); } m_connect = value; } } public string Accept { get { return m_accept as string; } set { if (m_accept != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "Accept", value), "value"); } m_accept = value; } } public string ConnectPattern { get { return m_connect is DelayedRegex ? m_connect.ToString() : m_connect is bool && (bool) m_connect ? WebPermission.MatchAll : null; } set { if (m_connect != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "ConnectPatern", value), "value"); } if (value == WebPermission.MatchAll) { m_connect = true; } else { m_connect = new DelayedRegex(value); } } } public string AcceptPattern { get { return m_accept is DelayedRegex ? m_accept.ToString() : m_accept is bool && (bool) m_accept ? WebPermission.MatchAll : null; } set { if (m_accept != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "AcceptPattern", value), "value"); } if (value == WebPermission.MatchAll) { m_accept = true; } else { m_accept = new DelayedRegex(value); } } } /* public bool ConnectAll { get { return m_connect is bool ? (bool) m_connect : false; } set { if (m_connect != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "ConnectAll", value), "value"); } m_connect = value; } } public bool AcceptAll { get { return m_accept is bool ? (bool) m_accept : false; } set { if (m_accept != null) { throw new ArgumentException(SR.GetString(SR.net_perm_attrib_multi, "AcceptAll", value), "value"); } m_accept = value; } } */ public override IPermission CreatePermission() { WebPermission perm = null; if (Unrestricted) { perm = new WebPermission( PermissionState.Unrestricted); } else { NetworkAccess access = (NetworkAccess) 0; if (m_connect is bool) { if ((bool) m_connect) { access |= NetworkAccess.Connect; } m_connect = null; } if (m_accept is bool) { if ((bool) m_accept) { access |= NetworkAccess.Accept; } m_accept = null; } perm = new WebPermission(access); if (m_accept != null) { if (m_accept is DelayedRegex) { perm.AddAsPattern(NetworkAccess.Accept, (DelayedRegex)m_accept); } else { perm.AddPermission(NetworkAccess.Accept, (string)m_accept); } } if (m_connect != null) { if (m_connect is DelayedRegex) { perm.AddAsPattern(NetworkAccess.Connect, (DelayedRegex)m_connect); } else { perm.AddPermission(NetworkAccess.Connect, (string)m_connect); } } } return perm; } } [Serializable] internal class DelayedRegex { private Regex _AsRegex; private string _AsString; internal DelayedRegex(string regexString) { if (regexString == null) throw new ArgumentNullException("regexString"); _AsString = regexString; } internal DelayedRegex(Regex regex) { if (regex == null) throw new ArgumentNullException("regex"); _AsRegex = regex; } internal Regex AsRegex { get { if (_AsRegex == null) { _AsRegex = new Regex(_AsString + "[/]?", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.CultureInvariant); } return _AsRegex; } } public override string ToString() { return _AsString != null ? _AsString : (_AsString = _AsRegex.ToString()); } } /// <devdoc> /// <para> /// Controls rights to make or accept connections on a Web address. /// </para> /// </devdoc> [Serializable] public sealed class WebPermission : CodeAccessPermission, IUnrestrictedPermission { private bool m_noRestriction; [OptionalField] private bool m_UnrestrictedConnect; [OptionalField] private bool m_UnrestrictedAccept; private ArrayList m_connectList = new ArrayList(); private ArrayList m_acceptList = new ArrayList(); internal const string MatchAll = ".*"; private static volatile Regex s_MatchAllRegex; internal static Regex MatchAllRegex { get { if (s_MatchAllRegex == null) { s_MatchAllRegex = new Regex(".*"); } return s_MatchAllRegex; } } /// <devdoc> /// <para> /// Returns the enumeration of permissions to connect a remote URI. /// </para> /// </devdoc> public IEnumerator ConnectList { get { if (m_UnrestrictedConnect) { return (new Regex[] { MatchAllRegex }).GetEnumerator(); } ArrayList cloned = new ArrayList(m_connectList.Count); for (int i = 0; i < m_connectList.Count; ++i) cloned.Add(m_connectList[i] is DelayedRegex? (object)((DelayedRegex)m_connectList[i]).AsRegex : m_connectList[i] is Uri? (object)((Uri)m_connectList[i]).GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped) : m_connectList[i]); return cloned.GetEnumerator(); } } /// <devdoc> /// <para> /// Returns the enumeration of permissions to export a local URI. /// </para> /// </devdoc> public IEnumerator AcceptList { get { if (m_UnrestrictedAccept) { return (new Regex[] { MatchAllRegex }).GetEnumerator(); } ArrayList cloned = new ArrayList(m_acceptList.Count); for (int i = 0; i < m_acceptList.Count; ++i) cloned.Add(m_acceptList[i] is DelayedRegex? (object)((DelayedRegex)m_acceptList[i]).AsRegex : m_acceptList[i] is Uri? (object)((Uri)m_acceptList[i]).GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped) : m_acceptList[i]); return cloned.GetEnumerator(); } } /// <devdoc> /// <para> /// Creates a new instance of the <see cref='System.Net.WebPermission'/> /// class that passes all demands or /// that fails all demands. /// </para> /// </devdoc> public WebPermission(PermissionState state) { m_noRestriction = (state == PermissionState.Unrestricted); } internal WebPermission(bool unrestricted) { m_noRestriction = unrestricted; } /// <devdoc> /// <para> /// Creates a new instance of the <see cref='System.Net.WebPermission'/> class. /// </para> /// </devdoc> public WebPermission() { } internal WebPermission(NetworkAccess access) { m_UnrestrictedConnect = (access & NetworkAccess.Connect) != 0; m_UnrestrictedAccept = (access & NetworkAccess.Accept) != 0; } /// <devdoc> /// <para> /// Creates a new instance of the <see cref='System.Net.WebPermission'/> /// class with the specified access rights for /// the specified URI Pattern. /// Suitable only for WebPermission policy object construction /// </para> /// </devdoc> public WebPermission(NetworkAccess access, Regex uriRegex) { AddPermission(access, uriRegex); } /// <devdoc> /// <para> /// Creates a new instance of the <see cref='System.Net.WebPermission'/> /// class with the specified access rights for /// the specified Uniform Resource Identifier . /// Suitable for requesting particular WebPermission /// </para> /// </devdoc> // < public WebPermission(NetworkAccess access, String uriString) { AddPermission(access, uriString); } // // < internal WebPermission(NetworkAccess access, Uri uri) { AddPermission(access, uri); } // Methods specific to this class /// <devdoc> /// <para> /// Adds a new instance of the WebPermission /// class with the specified access rights for the particular Uniform Resource Identifier. /// </para> /// </devdoc> // < public void AddPermission(NetworkAccess access, String uriString) { if (uriString == null) { throw new ArgumentNullException("uriString"); } if (m_noRestriction) { return; } Uri uri; if (Uri.TryCreate(uriString, UriKind.Absolute, out uri)) AddPermission(access, uri); else { ArrayList lists = new ArrayList(); if ((access & NetworkAccess.Connect) != 0 && !m_UnrestrictedConnect) lists.Add(m_connectList); if ((access & NetworkAccess.Accept) != 0 && !m_UnrestrictedAccept) lists.Add(m_acceptList); foreach (ArrayList list in lists) { // avoid duplicated uris in the list bool found = false; foreach (object obj in list) { string str = obj as string; if (str != null && string.Compare(str, uriString, StringComparison.OrdinalIgnoreCase ) == 0) { found = true; break; } } if (!found) { list.Add(uriString); } } } } // < internal void AddPermission(NetworkAccess access, Uri uri) { if (uri == null) { throw new ArgumentNullException("uri"); } if (m_noRestriction) { return; } ArrayList lists = new ArrayList(); if ((access & NetworkAccess.Connect) != 0 && !m_UnrestrictedConnect) lists.Add(m_connectList); if ((access & NetworkAccess.Accept) != 0 && !m_UnrestrictedAccept) lists.Add(m_acceptList); foreach (ArrayList list in lists) { // avoid duplicated uris in the list bool found = false; foreach (object permObj in list) { if ((permObj is Uri) && uri.Equals(permObj)) { found = true; break; } } if (!found) { list.Add(uri); } } } /// <devdoc> /// <para>Adds a new instance of the <see cref='System.Net.WebPermission'/> /// class with the specified access rights for the specified URI Pattern. /// Should be used during a policy object creation and not for particular URI permission check</para> /// </devdoc> public void AddPermission(NetworkAccess access, Regex uriRegex) { if (uriRegex == null) { throw new ArgumentNullException("uriRegex"); } if (m_noRestriction) { return; } if (uriRegex.ToString() == MatchAll) { if (!m_UnrestrictedConnect && (access & NetworkAccess.Connect) != 0) { m_UnrestrictedConnect = true; m_connectList.Clear(); } if (!m_UnrestrictedAccept && (access & NetworkAccess.Accept) != 0) { m_UnrestrictedAccept = true; m_acceptList.Clear(); } return; } AddAsPattern(access, new DelayedRegex(uriRegex)); } // Overloaded form using string inputs // Enforces case-insensitive matching /// Adds a new instance of the System.Net.WebPermission /// class with the specified access rights for the specified URI Pattern internal void AddAsPattern(NetworkAccess access, DelayedRegex uriRegexPattern) { ArrayList lists = new ArrayList(); if ((access & NetworkAccess.Connect) != 0 && !m_UnrestrictedConnect) lists.Add(m_connectList); if ((access & NetworkAccess.Accept) != 0 && !m_UnrestrictedAccept) lists.Add(m_acceptList); foreach (ArrayList list in lists) { // avoid duplicated regexes in the list bool found = false; foreach (object obj in list) { if ((obj is DelayedRegex) && (string.Compare(uriRegexPattern.ToString(), obj.ToString(), StringComparison.OrdinalIgnoreCase ) == 0)) { found = true; break; } } if (!found) { list.Add(uriRegexPattern); } } } // IUnrestrictedPermission interface methods /// <devdoc> /// <para> /// Checks the overall permisison state of the object. /// </para> /// </devdoc> public bool IsUnrestricted() { return m_noRestriction; } // IPermission interface methods /// <devdoc> /// <para> /// Creates a copy of a <see cref='System.Net.WebPermission'/> instance. /// </para> /// </devdoc> public override IPermission Copy() { if (m_noRestriction) { return new WebPermission(true); } WebPermission wp = new WebPermission((m_UnrestrictedConnect ? NetworkAccess.Connect : (NetworkAccess) 0) | (m_UnrestrictedAccept ? NetworkAccess.Accept : (NetworkAccess)0)); wp.m_acceptList = (ArrayList)m_acceptList.Clone(); wp.m_connectList = (ArrayList)m_connectList.Clone(); return wp; } /// <devdoc> /// <para>Compares two <see cref='System.Net.WebPermission'/> instances.</para> /// </devdoc> public override bool IsSubsetOf(IPermission target) { // Pattern suggested by security engine if (target == null) { return !m_noRestriction && !m_UnrestrictedConnect && !m_UnrestrictedAccept && m_connectList.Count == 0 && m_acceptList.Count == 0; } WebPermission other = target as WebPermission; if (other == null) { throw new ArgumentException(SR.GetString(SR.net_perm_target), "target"); } if (other.m_noRestriction) { return true; } else if (m_noRestriction) { return false; } // // Besides SPECIAL case, this method is restricted to only final URIs (strings) on // the current object. // The restriction comes from the problem of finding a Regex to be a subset of another Regex // DelayedRegex regex = null; if (!other.m_UnrestrictedAccept) { if (m_UnrestrictedAccept) { return false; } else if (m_acceptList.Count != 0) { if (other.m_acceptList.Count == 0) { return false; } foreach(object obj in this.m_acceptList) { regex = obj as DelayedRegex; if(regex != null) { if(isSpecialSubsetCase(obj.ToString(), other.m_acceptList)) continue; throw new NotSupportedException(SR.GetString(SR.net_perm_both_regex)); } if(!isMatchedURI(obj, other.m_acceptList)) return false; } } } if (!other.m_UnrestrictedConnect) { if (m_UnrestrictedConnect) { return false; } else if (m_connectList.Count != 0) { if (other.m_connectList.Count == 0) { return false; } foreach(object obj in this.m_connectList) { regex = obj as DelayedRegex; if(regex != null) { if(isSpecialSubsetCase(obj.ToString(), other.m_connectList)) continue; throw new NotSupportedException(SR.GetString(SR.net_perm_both_regex)); } if(!isMatchedURI(obj, other.m_connectList)) return false; } } } return true; } //Checks special case when testing Regex to be a subset of other Regex //Support only the case when both Regexes are identical as strings. private static bool isSpecialSubsetCase(String regexToCheck, ArrayList permList) { Uri uri; foreach(object uriPattern in permList) { DelayedRegex regex = uriPattern as DelayedRegex; if(regex != null) { //regex parameter against regex permission if (String.Compare(regexToCheck, regex.ToString(), StringComparison.OrdinalIgnoreCase ) == 0) return true; } else if ((uri = uriPattern as Uri) != null) { //regex parameter against Uri permission if (String.Compare(regexToCheck, Regex.Escape(uri.GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped)), StringComparison.OrdinalIgnoreCase ) == 0) return true; } else if (String.Compare(regexToCheck, Regex.Escape(uriPattern.ToString()), StringComparison.OrdinalIgnoreCase ) == 0) { //regex parameter against string permission return true; } } return false; } // The union of two web permissions is formed by concatenating // the list of allowed regular expressions. There is no check // for duplicates/overlaps /// <devdoc> /// <para>Returns the logical union between two <see cref='System.Net.WebPermission'/> instances.</para> /// </devdoc> public override IPermission Union(IPermission target) { // Pattern suggested by Security engine if (target==null) { return this.Copy(); } WebPermission other = target as WebPermission; if(other == null) { throw new ArgumentException(SR.GetString(SR.net_perm_target), "target"); } if (m_noRestriction || other.m_noRestriction) { return new WebPermission(true); } WebPermission result = new WebPermission(); if (m_UnrestrictedConnect || other.m_UnrestrictedConnect) { result.m_UnrestrictedConnect = true; } else { result.m_connectList = (ArrayList) other.m_connectList.Clone(); for (int i = 0; i < m_connectList.Count; i++) { DelayedRegex uriPattern = m_connectList[i] as DelayedRegex; if(uriPattern == null) if (m_connectList[i] is string) result.AddPermission(NetworkAccess.Connect, (string)m_connectList[i]); else result.AddPermission(NetworkAccess.Connect, (Uri)m_connectList[i]); else result.AddAsPattern(NetworkAccess.Connect, uriPattern); } } if (m_UnrestrictedAccept || other.m_UnrestrictedAccept) { result.m_UnrestrictedAccept = true; } else { result.m_acceptList = (ArrayList) other.m_acceptList.Clone(); for (int i = 0; i < m_acceptList.Count; i++) { DelayedRegex uriPattern = m_acceptList[i] as DelayedRegex; if(uriPattern == null) if (m_acceptList[i] is string) result.AddPermission(NetworkAccess.Accept, (string)m_acceptList[i]); else result.AddPermission(NetworkAccess.Accept, (Uri)m_acceptList[i]); else result.AddAsPattern(NetworkAccess.Accept, uriPattern); } } return result; } /// <devdoc> /// <para>Returns the logical intersection between two <see cref='System.Net.WebPermission'/> instances.</para> /// </devdoc> public override IPermission Intersect(IPermission target) { // Pattern suggested by Security engine if (target == null) { return null; } WebPermission other = target as WebPermission; if(other == null) { throw new ArgumentException(SR.GetString(SR.net_perm_target), "target"); } if (m_noRestriction) { return other.Copy(); } if (other.m_noRestriction) { return Copy(); } WebPermission result = new WebPermission(); if (m_UnrestrictedConnect && other.m_UnrestrictedConnect) { result.m_UnrestrictedConnect = true; } else if (m_UnrestrictedConnect || other.m_UnrestrictedConnect) { result.m_connectList = (ArrayList) (m_UnrestrictedConnect ? other : this).m_connectList.Clone(); } else { intersectList(m_connectList, other.m_connectList, result.m_connectList); } if (m_UnrestrictedAccept && other.m_UnrestrictedAccept) { result.m_UnrestrictedAccept = true; } else if (m_UnrestrictedAccept || other.m_UnrestrictedAccept) { result.m_acceptList = (ArrayList) (m_UnrestrictedAccept ? other : this).m_acceptList.Clone(); } else { intersectList(m_acceptList, other.m_acceptList, result.m_acceptList); } // return null if resulting permission is restricted and empty if (!result.m_UnrestrictedConnect && !result.m_UnrestrictedAccept && result.m_connectList.Count == 0 && result.m_acceptList.Count == 0) { return null; } return result; } /// <devdoc> /// </devdoc> public override void FromXml(SecurityElement securityElement) { if (securityElement == null) { // // null SecurityElement // throw new ArgumentNullException("securityElement"); } if (!securityElement.Tag.Equals("IPermission")) { // // SecurityElement must be a permission element // throw new ArgumentException(SR.GetString(SR.net_not_ipermission), "securityElement"); } string className = securityElement.Attribute("class"); if (className == null) { // // SecurityElement must be a permission element for this type // throw new ArgumentException(SR.GetString(SR.net_no_classname), "securityElement"); } if (className.IndexOf(this.GetType().FullName) < 0) { // // SecurityElement must be a permission element for this type // throw new ArgumentException(SR.GetString(SR.net_no_typename), "securityElement"); } String str = securityElement.Attribute("Unrestricted"); m_connectList = new ArrayList(); m_acceptList = new ArrayList(); m_UnrestrictedAccept = m_UnrestrictedConnect = false; if (str != null && string.Compare(str, "true", StringComparison.OrdinalIgnoreCase ) == 0) { m_noRestriction = true; return; } m_noRestriction = false; SecurityElement et = securityElement.SearchForChildByTag("ConnectAccess"); string uriPattern; if (et != null) { foreach(SecurityElement uriElem in et.Children) { //NOTE: Any stuff coming from XML is treated as URI PATTERN! if (uriElem.Tag.Equals("URI")) { try { uriPattern = uriElem.Attribute("uri"); } catch { uriPattern = null; } if (uriPattern == null) { throw new ArgumentException(SR.GetString(SR.net_perm_invalid_val_in_element), "ConnectAccess"); } if (uriPattern == MatchAll) { m_UnrestrictedConnect = true; m_connectList = new ArrayList(); break; } else { AddAsPattern(NetworkAccess.Connect, new DelayedRegex(uriPattern)); } } else { // improper tag found, just ignore } } } et = securityElement.SearchForChildByTag("AcceptAccess"); if (et != null) { foreach(SecurityElement uriElem in et.Children) { //NOTE: Any stuff coming from XML is treated as URI PATTERN! if (uriElem.Tag.Equals("URI")) { try { uriPattern = uriElem.Attribute("uri"); } catch { uriPattern = null; } if (uriPattern == null) { throw new ArgumentException(SR.GetString(SR.net_perm_invalid_val_in_element), "AcceptAccess"); } if (uriPattern == MatchAll) { m_UnrestrictedAccept = true; m_acceptList = new ArrayList(); break; } else { AddAsPattern(NetworkAccess.Accept, new DelayedRegex(uriPattern)); } } else { // improper tag found, just ignore } } } } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public override SecurityElement ToXml() { SecurityElement securityElement = new SecurityElement("IPermission"); securityElement.AddAttribute( "class", this.GetType().FullName + ", " + this.GetType().Module.Assembly.FullName.Replace( '\"', '\'' ) ); securityElement.AddAttribute( "version", "1" ); if (!IsUnrestricted()) { String tempStr=null; if (m_UnrestrictedConnect || m_connectList.Count > 0) { SecurityElement connectElement = new SecurityElement( "ConnectAccess" ); if (m_UnrestrictedConnect) { SecurityElement uripattern = new SecurityElement("URI"); uripattern.AddAttribute("uri", SecurityElement.Escape(MatchAll)); connectElement.AddChild(uripattern); } else { //NOTE All strings going to XML will become URI PATTERNS i.e. escaped to Regex foreach(object obj in m_connectList) { Uri uri = obj as Uri; if(uri != null) tempStr=Regex.Escape(uri.GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped)); else tempStr=obj.ToString(); if (obj is string) tempStr = Regex.Escape(tempStr); SecurityElement uripattern = new SecurityElement("URI"); uripattern.AddAttribute("uri", SecurityElement.Escape(tempStr)); connectElement.AddChild(uripattern); } } securityElement.AddChild( connectElement ); } if (m_UnrestrictedAccept || m_acceptList.Count > 0) { SecurityElement acceptElement = new SecurityElement("AcceptAccess"); if (m_UnrestrictedAccept) { SecurityElement uripattern = new SecurityElement("URI"); uripattern.AddAttribute("uri", SecurityElement.Escape(MatchAll)); acceptElement.AddChild(uripattern); } else { //NOTE All strings going to XML will become URI PATTERNS i.e. escaped to Regex foreach(object obj in m_acceptList) { Uri uri = obj as Uri; if(uri != null) tempStr=Regex.Escape(uri.GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped)); else tempStr=obj.ToString(); if (obj is string) tempStr = Regex.Escape(tempStr); SecurityElement uripattern = new SecurityElement("URI"); uripattern.AddAttribute("uri", SecurityElement.Escape(tempStr)); acceptElement.AddChild(uripattern); } } securityElement.AddChild( acceptElement ); } } else { securityElement.AddAttribute( "Unrestricted", "true" ); } return securityElement; } // Verifies a single Uri against a set of regular expressions private static bool isMatchedURI(object uriToCheck, ArrayList uriPatternList) { string stringUri = uriToCheck as string; foreach(object uriPattern in uriPatternList) { DelayedRegex R = uriPattern as DelayedRegex; //perform case insensitive comparison of final URIs or strings, a Uri is never equal compares a string (strings are invalid Uris) if(R == null) { if (uriToCheck.GetType() == uriPattern.GetType()) { if (stringUri != null && string.Compare(stringUri, (string)uriPattern, StringComparison.OrdinalIgnoreCase ) == 0) { return true; } else if(stringUri == null && uriToCheck.Equals(uriPattern)) { return true; } } continue; } //Otherwise trying match final URI against given Regex pattern string s = stringUri != null? stringUri: ((Uri)uriToCheck).GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped); Match M = R.AsRegex.Match(s); if ((M != null) // Found match for the regular expression? && (M.Index == 0) // ... which starts at the begining && (M.Length == s.Length)) { // ... and the whole string matched return true; } if (stringUri != null) continue; // // check if the URI was presented in non-canonical form // s = ((Uri)uriToCheck).GetComponents(UriComponents.HttpRequestUrl, UriFormat.SafeUnescaped); M = R.AsRegex.Match(s); if ((M != null) // Found match for the regular expression? && (M.Index == 0) // ... which starts at the begining && (M.Length == s.Length)) { // ... and the whole string matched return true; } } return false; } // We should keep the result as compact as possible since otherwise even // simple scenarios in Policy Wizard won;t work due to repeated Union/Intersect calls // The issue comes from the "hard" Regex.IsSubsetOf(Regex) problem. private static void intersectList(ArrayList A, ArrayList B, ArrayList result) { bool[] aDone = new bool[A.Count]; bool[] bDone = new bool[B.Count]; int ia=0, ib; // The optimization is done according to the following truth // (A|B|C) intersect (B|C|E|D)) == B|C|(A inter E)|(A inter D) // // We also check on any duplicates in the result // Round 1st // Getting rid of same permissons in the input arrays (assuming X /\ X = X) foreach (object a in A) { ib = 0; foreach (object b in B) { // check to see if b is in the result already if (!bDone[ib]) { //if both are regexes or both are Uris or both are strings if (a.GetType() == b.GetType()) { if (a is Uri) { // both are uris if (a.Equals(b)) { result.Add(a); aDone[ia]=bDone[ib]=true; //since permissions are ORed we can break and go to the next A break; } } else { // regexes and strings uses ToString() output if (string.Compare(a.ToString(), b.ToString(), StringComparison.OrdinalIgnoreCase ) == 0) { result.Add(a); aDone[ia]=bDone[ib]=true; //since permissions are ORed we can break and go to the next A break; } } } } ++ib; } //foreach b in B ++ia; } //foreach a in A ia = 0; // Round second // Grab only intersections of objects not found in both A and B foreach (object a in A) { if (!aDone[ia]) { ib = 0; foreach(object b in B) { if (!bDone[ib]) { bool resultUri; object intesection = intersectPair(a, b, out resultUri); if (intesection != null) { bool found = false; // check to see if we already have the same result foreach (object obj in result) { if (resultUri == (obj is Uri)) { if(resultUri ? intesection.Equals(obj) : string.Compare(obj.ToString(), intesection.ToString(), StringComparison.OrdinalIgnoreCase ) == 0) { found = true; break; } } } if (!found) { result.Add(intesection); } } } ++ib; } } ++ia; } } private static object intersectPair(object L, object R, out bool isUri) { //VERY OLD OPTION: return new Regex("(?=(" + ((Regex)X[i]).ToString()+ "))(" + ((Regex)Y[j]).ToString() + ")","i"); //STILL OLD OPTION: return new Regex("(?=.*?(" + L.ToString() + "))" + "(?=.*?(" + R.ToString() + "))"); // check RegexSpec.doc //CURRENT OPTION: return new Regex("(?=(" + L.ToString() + "))(" + R.ToString() + ")", RegexOptions.IgnoreCase ); isUri = false; DelayedRegex L_Pattern =L as DelayedRegex; DelayedRegex R_Pattern =R as DelayedRegex; if(L_Pattern != null && R_Pattern != null) { //both are Regex return new DelayedRegex("(?=(" + L_Pattern.ToString() + "))(" + R_Pattern.ToString() + ")"); } else if(L_Pattern != null && R_Pattern == null) { //only L is a Regex isUri = R is Uri; string uriString = isUri? ((Uri)R).GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped): R.ToString(); Match M = L_Pattern.AsRegex.Match(uriString); if ((M != null) // Found match for the regular expression? && (M.Index == 0) // ... which starts at the begining && (M.Length == uriString.Length)) { // ... and the whole string matched return R; } return null; } else if(L_Pattern == null && R_Pattern != null) { //only R is a Regex isUri = L is Uri; string uriString = isUri? ((Uri)L).GetComponents(UriComponents.HttpRequestUrl, UriFormat.UriEscaped): L.ToString(); Match M = R_Pattern.AsRegex.Match(uriString); if ((M != null) // Found match for the regular expression? && (M.Index == 0) // ... which starts at the begining && (M.Length == uriString.Length)) { // ... and the whole string matched return L; } return null; } //both are Uris or strings isUri = L is Uri; if (isUri) return L.Equals(R)? L : null; else return string.Compare(L.ToString(), R.ToString(), StringComparison.OrdinalIgnoreCase ) == 0? L : null; } } // class WebPermission } // namespace System.Net
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Xunit.Performance; using Xunit; namespace System.Memory.Tests { public class Perf_Span_BinarySearch { private const int InnerCount = 100000; private const string NumberFormat = "D9"; [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_Int_FirstIndex(int size) { BenchmarkAndAssertArray(size, 0, 0); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_Int_MiddleIndex(int size) { BenchmarkAndAssertArray(size, (size - 1) / 2, (size - 1) / 2); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_Int_LastIndex(int size) { BenchmarkAndAssertArray(size, size - 1, size - 1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_Int_NotFoundBefore(int size) { BenchmarkAndAssertArray(size, -1, -1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_Int_NotFoundAfter(int size) { BenchmarkAndAssertArray(size, size, ~size); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_String_FirstIndex(int size) { BenchmarkAndAssertArray(size, 0.ToString(NumberFormat), 0); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_String_MiddleIndex(int size) { BenchmarkAndAssertArray(size, ((size - 1) / 2).ToString(NumberFormat), (size - 1) / 2); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_String_LastIndex(int size) { BenchmarkAndAssertArray(size, (size - 1).ToString(NumberFormat), size - 1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_String_NotFoundBefore(int size) { // "/" is just before zero in character table BenchmarkAndAssertArray(size, "/", -1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void ArrayBinarySearch_String_NotFoundAfter(int size) { BenchmarkAndAssertArray(size, (size).ToString(NumberFormat), ~size); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_Int_FirstIndex(int size) { BenchmarkAndAssertSpan(size, 0, 0); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_Int_MiddleIndex(int size) { BenchmarkAndAssertSpan(size, (size - 1) / 2, (size - 1) / 2); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_Int_LastIndex(int size) { BenchmarkAndAssertSpan(size, size - 1, size - 1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_Int_NotFoundBefore(int size) { BenchmarkAndAssertSpan(size, -1, -1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_Int_NotFoundAfter(int size) { BenchmarkAndAssertSpan(size, size, ~size); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_String_FirstIndex(int size) { BenchmarkAndAssertSpan(size, 0.ToString(NumberFormat), 0); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_String_MiddleIndex(int size) { BenchmarkAndAssertSpan(size, ((size - 1) / 2).ToString(NumberFormat), (size - 1) / 2); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_String_LastIndex(int size) { BenchmarkAndAssertSpan(size, (size - 1).ToString(NumberFormat), size - 1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_String_NotFoundBefore(int size) { // "/" is just before zero in character table BenchmarkAndAssertSpan(size, "/", -1); } [Benchmark(InnerIterationCount = InnerCount)] [InlineData(1)] [InlineData(10)] [InlineData(100)] [InlineData(1000)] public void SpanBinarySearch_String_NotFoundAfter(int size) { BenchmarkAndAssertSpan(size, (size).ToString(NumberFormat), ~size); } private static void BenchmarkAndAssertArray(int size, int value, int expectedIndex) { BenchmarkAndAssertArray(size, i => i, value, expectedIndex); } private static void BenchmarkAndAssertArray(int size, string value, int expectedIndex) { BenchmarkAndAssertArray(size, i => i.ToString(NumberFormat), value, expectedIndex); } private static void BenchmarkAndAssertArray<T>(int size, Func<int, T> toValue, T value, int expectedIndex) where T : IComparable<T> { var array = new T[size]; for (int i = 0; i < array.Length; i++) { array[i] = toValue(i); } int index = 0; foreach (BenchmarkIteration iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { for (int i = 0; i < Benchmark.InnerIterationCount; i++) { index |= Array.BinarySearch(array, value); } } } Assert.Equal(expectedIndex, index); } private static void BenchmarkAndAssertSpan(int size, int value, int expectedIndex) { BenchmarkAndAssertSpan(size, i => i, value, expectedIndex); } private static void BenchmarkAndAssertSpan(int size, string value, int expectedIndex) { BenchmarkAndAssertSpan(size, i => i.ToString(NumberFormat), value, expectedIndex); } private static void BenchmarkAndAssertSpan<T>(int size, Func<int, T> toValue, T value, int expectedIndex) where T : IComparable<T> { Span<T> span = new T[size]; for (int i = 0; i < span.Length; i++) { span[i] = toValue(i); } int index = 0; foreach (BenchmarkIteration iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { for (int i = 0; i < Benchmark.InnerIterationCount; i++) { index |= span.BinarySearch(value); } } } Assert.Equal(expectedIndex, index); } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text; using Moonfish.Tags; namespace Moonfish.Guerilla { public class QueueableBinaryWriter : BinaryWriter { private readonly Dictionary<object, QueueItem> _lookupDictionary; private readonly Queue<QueueItem> _queue; private int _queueAddress; public QueueableBinaryWriter( Stream output, int serializedSize ) : base( output, Encoding.Default, true ) { _queueAddress = serializedSize; _queue = new Queue<QueueItem>( 100 ); _lookupDictionary = new Dictionary<object, QueueItem>( 100 ); } public void QueueWrite( GuerillaBlock[] dataBlocks ) { // if the array is empty there's nothing to write, so return if ( dataBlocks.Length <= 0 ) return; Enqueue( dataBlocks ); // all guerilla blocks implement IWriteQueueable foreach ( IWriteQueueable block in dataBlocks ) block.QueueWrites( this ); } public void QueueWrite( byte[] data ) { // if the array is empty there's nothing to write, so return if ( data.Length <= 0 ) return; Enqueue( data ); } public void QueueWrite( short[] data ) { // if the array is empty there's nothing to write, so return if ( data.Length <= 0 ) return; Enqueue( data ); } public void WritePointer( object instanceFIeld ) { QueueItem queueItem; this.Write( _lookupDictionary.TryGetValue( instanceFIeld, out queueItem ) ? queueItem.Pointer : BlamPointer.Null ); } public void WriteQueue( ) { while ( _queue.Count > 0 ) { var item = Dequeue( ); // if the pointer has data, and the stream is not already at the data start address // then seek the data start address using a current stream offset to preserve the read/write // cache. if ( !BlamPointer.IsNull( item.Pointer ) && BaseStream.Position != item.Pointer.StartAddress ) { #if DEBUG var offset = item.Pointer.StartAddress - BaseStream.Position; if ( offset < 0 ) { throw new Exception( "That breaks the maps" ); } #endif BaseStream.Seek( item.Pointer.StartAddress ); } var dataQueueItem = item as ByteDataQueueItem; if ( dataQueueItem != null ) { Write( dataQueueItem.Data ); continue; } var shortDataQueueItem = item as ShortDataQueueItem; if ( shortDataQueueItem != null ) { var buffer = shortDataQueueItem.Data; for ( var i = 0; i < buffer.Length; ++i ) Write( shortDataQueueItem.Data[ i ] ); continue; } var guerillaBlockQueueItem = item as GuerillaQueueItem; if ( guerillaBlockQueueItem == null ) continue; // then foreach element in the block array call the write method foreach ( GuerillaBlock block in guerillaBlockQueueItem.DataBlocks ) { block.Write_( this ); } } } internal void QueueWrite( byte[] data, int alignment ) { Enqueue( data, alignment ); } private QueueItem Dequeue( ) { var queueItem = _queue.Dequeue( ); _lookupDictionary.Remove( queueItem.ReferenceField ); return queueItem; } private void Enqueue( byte[] data, int alignment = 4 ) { var blamPointer = new BlamPointer( data.Length, _queueAddress, 1, alignment ); var dataQueueItem = new ByteDataQueueItem( data ) {Pointer = blamPointer}; _lookupDictionary[ data ] = dataQueueItem; _queue.Enqueue( dataQueueItem ); _queueAddress = blamPointer.EndAddress; } private void Enqueue( short[] data ) { var blamPointer = new BlamPointer( data.Length, _queueAddress, 2 ); var dataQueueItem = new ShortDataQueueItem( data ) {Pointer = blamPointer}; _lookupDictionary[ data ] = dataQueueItem; _queue.Enqueue( dataQueueItem ); _queueAddress = blamPointer.EndAddress; } private void Enqueue( GuerillaBlock[] dataBlocks ) { var elementSize = dataBlocks.GetElementSize( ); var alignment = dataBlocks.GetAlignment( ); var blamPointer = new BlamPointer( dataBlocks.Length, _queueAddress, elementSize, alignment ); var guerillaQueueItem = new GuerillaQueueItem( dataBlocks ) {Pointer = blamPointer}; _lookupDictionary[ dataBlocks ] = guerillaQueueItem; _queue.Enqueue( guerillaQueueItem ); _queueAddress = blamPointer.EndAddress; } private abstract class QueueItem { public abstract BlamPointer Pointer { get; set; } public abstract object ReferenceField { get; } }; private class ByteDataQueueItem : QueueItem { public ByteDataQueueItem( byte[] data ) { Data = data; } public byte[] Data { get; private set; } public override BlamPointer Pointer { get; set; } public override object ReferenceField { get { return Data; } } } private class ShortDataQueueItem : QueueItem { public ShortDataQueueItem( short[] data ) { Data = data; } public short[] Data { get; private set; } public override BlamPointer Pointer { get; set; } public override object ReferenceField { get { return Data; } } } private class GuerillaQueueItem : QueueItem { public GuerillaQueueItem( GuerillaBlock[] dataBlocks ) { DataBlocks = dataBlocks; } public GuerillaBlock[] DataBlocks { get; private set; } public override BlamPointer Pointer { get; set; } public override object ReferenceField { get { return DataBlocks; } } }; }; }
// Copyright (C) 2014 dot42 // // Original filename: FileStream.cs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using Java.IO; namespace System.IO { public class FileStream : Stream { private readonly RandomAccessFile file; private readonly FileAccess access; /// <summary> /// Initialize a filestream from and input and/or output stream. /// </summary> private FileStream(RandomAccessFile file, FileAccess access) { if (file == null) throw new ArgumentNullException("file"); this.file = file; this.access = access; } /// <summary> /// Open a new file stream for the given path, mode and access. /// </summary> /// <param name="path">Path of the file</param> /// <param name="mode">How to open the file</param> /// <param name="access">Read/write access</param> public FileStream(string path, FileMode mode, FileAccess access) : this(Open(path, mode, access), access) { } /// <summary> /// Open a new file stream for the given path, mode and access. /// </summary> /// <param name="path">Path of the file</param> /// <param name="mode">How to open the file</param> /// <param name="access">Read/write access</param> /// <param name="share">The file share</param> public FileStream(string path, FileMode mode, FileAccess access, FileShare share) : this(Open(path, mode, access), access) { } public FileStream(string path, FileMode mode, FileAccess access, FileShare share, int bufferSize) : this(Open(path, mode, access), access) { } /// <summary> /// Open a new file stream for the given path, mode and access. /// </summary> /// <param name="path">Path of the file</param> /// <param name="mode">How to open the file</param> public FileStream(string path, FileMode mode) : this(path, mode, ModeToAccess(mode)) { } /// <summary> /// Is reading from this stream supported? /// </summary> public override bool CanRead { get { return ((access & FileAccess.Read) == FileAccess.Read); } } /// <summary> /// Is writing to this stream supported? /// </summary> public override bool CanWrite { get { return ((access & FileAccess.Write) == FileAccess.Write); } } /// <summary> /// Is seeking supported? /// </summary> public override bool CanSeek { get { return true; } } /// <summary> /// Gets the length of this sequence in bytes. /// </summary> public override long Length { get { return file.Length(); } } /// <summary> /// Gets/sets the position within this stream. /// </summary> public override long Position { get { return file.FilePointer; } set { file.Seek(value); } } /// <summary> /// Ensure that any buffered data is written to the underlying device. /// </summary> public override void Flush() { } /// <summary> /// Read a sequence of bytes from this stream and advance the position of this stream. /// </summary> /// <param name="buffer">Destination</param> /// <param name="offset">Offset within the buffer</param> /// <param name="count">Number of bytes to read.</param> /// <returns>The total number of bytes read or 0 if the end of the stream has been reached.</returns> public override int Read(byte[] buffer, int offset, int count) { var rc = file.Read(buffer, offset, count); return (rc < 0) ? 0 : rc; } /// <summary> /// Set the position of this stream. /// </summary> /// <param name="offset">Byte offset relative to origin.</param> /// <param name="origin">Reference point.</param> /// <returns>New position of the stream.</returns> public override long Seek(long offset, SeekOrigin origin) { long target; switch (origin) { case SeekOrigin.Current: target = Position + offset; break; case SeekOrigin.Begin: target = offset; break; case SeekOrigin.End: target = file.Length() + offset; break; default: throw new ArgumentException("Invalid origin"); } if ((target < 0) || (target > file.Length())) { throw new IOException("Seek outside file limits"); } file.Seek(target); return Position; } /// <summary> /// Sets the length of the current stream. /// </summary> /// <param name="value">The new length of the stream.</param> public override void SetLength(long value) { file.SetLength(value); } /// <summary> /// Write a sequence of bytes to this stream and advance the position of this stream. /// </summary> /// <param name="buffer">Destination</param> /// <param name="offset">Offset within the buffer</param> /// <param name="count">Number of bytes to write.</param> public override void Write(byte[] buffer, int offset, int count) { file.Write(buffer, offset, count); } /// <summary> /// Open a random access file for the given path, more and access. /// </summary> private static RandomAccessFile Open(string path, FileMode mode, FileAccess access) { var file = new Java.IO.File(path); switch (mode) { case FileMode.CreateNew: if (file.Exists()) throw new IOException("File already exists"); break; case FileMode.Open: if (!file.Exists()) throw new FileNotFoundException(path); break; case FileMode.Append: access = FileAccess.Write; break; } switch (mode) { case FileMode.Create: case FileMode.CreateNew: case FileMode.OpenOrCreate: if (access == FileAccess.Read) { //create empty file, so it can be opened again with read only right, //otherwise an FilNotFoundException is thrown. var additinalAccessFile = new RandomAccessFile(file, "rw"); } break; } var jMode = (access == FileAccess.Read) ? "r" : "rw"; var randomAccessFile = new RandomAccessFile(file, jMode); switch (mode) { case FileMode.Truncate: randomAccessFile.SetLength(0); break; case FileMode.Append: randomAccessFile.Seek(randomAccessFile.Length()); break; } return randomAccessFile; } /// <summary> /// Derive a file access from a file mode. /// </summary> private static FileAccess ModeToAccess(FileMode mode) { switch (mode) { case FileMode.Append: return FileAccess.Write; default: return FileAccess.ReadWrite; } } protected override void Dispose(bool disposing) { if (disposing) { try { file.Close(); } catch {} } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Reflection.Emit; using System.Runtime.CompilerServices; using System.Dynamic.Utils; using static System.Linq.Expressions.CachedReflectionInfo; using System.Collections; namespace System.Linq.Expressions.Compiler { internal enum VariableStorageKind { Local, Hoisted } /// <summary> /// CompilerScope is the data structure which the Compiler keeps information /// related to compiling scopes. It stores the following information: /// 1. Parent relationship (for resolving variables) /// 2. Information about hoisted variables /// 3. Information for resolving closures /// /// Instances are produced by VariableBinder, which does a tree walk /// looking for scope nodes: LambdaExpression, BlockExpression, and CatchBlock. /// </summary> internal sealed partial class CompilerScope { /// <summary> /// parent scope, if any /// </summary> private CompilerScope _parent; /// <summary> /// The expression node for this scope /// Can be LambdaExpression, BlockExpression, or CatchBlock /// </summary> internal readonly object Node; /// <summary> /// True if this node corresponds to an IL method. /// Can only be true if the Node is a LambdaExpression. /// But inlined lambdas will have it set to false. /// </summary> internal readonly bool IsMethod; /// <summary> /// Does this scope (or any inner scope) close over variables from any /// parent scope? /// Populated by VariableBinder /// </summary> internal bool NeedsClosure; /// <summary> /// Variables defined in this scope, and whether they're hoisted or not /// Populated by VariableBinder /// </summary> internal readonly Dictionary<ParameterExpression, VariableStorageKind> Definitions = new Dictionary<ParameterExpression, VariableStorageKind>(); /// <summary> /// Each variable referenced within this scope, and how often it was referenced /// Populated by VariableBinder /// </summary> internal Dictionary<ParameterExpression, int> ReferenceCount; /// <summary> /// Scopes whose variables were merged into this one /// /// Created lazily as we create hundreds of compiler scopes w/o merging scopes when compiling rules. /// </summary> internal HashSet<BlockExpression> MergedScopes; /// <summary> /// The scope's hoisted locals, if any. /// Provides storage for variables that are referenced from nested lambdas /// </summary> private HoistedLocals _hoistedLocals; /// <summary> /// The closed over hoisted locals /// </summary> private HoistedLocals _closureHoistedLocals; /// <summary> /// Mutable dictionary that maps non-hoisted variables to either local /// slots or argument slots /// </summary> private readonly Dictionary<ParameterExpression, Storage> _locals = new Dictionary<ParameterExpression, Storage>(); internal CompilerScope(object node, bool isMethod) { Node = node; IsMethod = isMethod; IReadOnlyList<ParameterExpression> variables = GetVariables(node); Definitions = new Dictionary<ParameterExpression, VariableStorageKind>(variables.Count); foreach (ParameterExpression v in variables) { Definitions.Add(v, VariableStorageKind.Local); } } /// <summary> /// This scope's hoisted locals, or the closed over locals, if any /// Equivalent to: _hoistedLocals ?? _closureHoistedLocals /// </summary> internal HoistedLocals NearestHoistedLocals { get { return _hoistedLocals ?? _closureHoistedLocals; } } /// <summary> /// Called when entering a lambda/block. Performs all variable allocation /// needed, including creating hoisted locals and IL locals for accessing /// parent locals /// </summary> internal CompilerScope Enter(LambdaCompiler lc, CompilerScope parent) { SetParent(lc, parent); AllocateLocals(lc); if (IsMethod && _closureHoistedLocals != null) { EmitClosureAccess(lc, _closureHoistedLocals); } EmitNewHoistedLocals(lc); if (IsMethod) { EmitCachedVariables(); } return this; } /// <summary> /// Frees unnamed locals, clears state associated with this compiler /// </summary> internal CompilerScope Exit() { // free scope's variables if (!IsMethod) { foreach (Storage storage in _locals.Values) { storage.FreeLocal(); } } // Clear state that is associated with this parent // (because the scope can be reused in another context) CompilerScope parent = _parent; _parent = null; _hoistedLocals = null; _closureHoistedLocals = null; _locals.Clear(); return parent; } #region RuntimeVariablesExpression support internal void EmitVariableAccess(LambdaCompiler lc, ReadOnlyCollection<ParameterExpression> vars) { if (NearestHoistedLocals != null && vars.Count > 0) { // Find what array each variable is on & its index var indexes = new ArrayBuilder<long>(vars.Count); foreach (ParameterExpression variable in vars) { // For each variable, find what array it's defined on ulong parents = 0; HoistedLocals locals = NearestHoistedLocals; while (!locals.Indexes.ContainsKey(variable)) { parents++; locals = locals.Parent; Debug.Assert(locals != null); } // combine the number of parents we walked, with the // real index of variable to get the index to emit. ulong index = (parents << 32) | (uint)locals.Indexes[variable]; indexes.UncheckedAdd((long)index); } EmitGet(NearestHoistedLocals.SelfVariable); lc.EmitConstantArray(indexes.ToArray()); lc.IL.Emit(OpCodes.Call, RuntimeOps_CreateRuntimeVariables_ObjectArray_Int64Array); } else { // No visible variables lc.IL.Emit(OpCodes.Call, RuntimeOps_CreateRuntimeVariables); } } #endregion #region Variable access /// <summary> /// Adds a new virtual variable corresponding to an IL local /// </summary> internal void AddLocal(LambdaCompiler gen, ParameterExpression variable) { _locals.Add(variable, new LocalStorage(gen, variable)); } internal void EmitGet(ParameterExpression variable) { ResolveVariable(variable).EmitLoad(); } internal void EmitSet(ParameterExpression variable) { ResolveVariable(variable).EmitStore(); } internal void EmitAddressOf(ParameterExpression variable) { ResolveVariable(variable).EmitAddress(); } private Storage ResolveVariable(ParameterExpression variable) { return ResolveVariable(variable, NearestHoistedLocals); } /// <summary> /// Resolve a local variable in this scope or a closed over scope /// Throws if the variable is not defined /// </summary> private Storage ResolveVariable(ParameterExpression variable, HoistedLocals hoistedLocals) { // Search IL locals and arguments, but only in this lambda for (CompilerScope s = this; s != null; s = s._parent) { Storage storage; if (s._locals.TryGetValue(variable, out storage)) { return storage; } // if this is a lambda, we're done if (s.IsMethod) { break; } } // search hoisted locals for (HoistedLocals h = hoistedLocals; h != null; h = h.Parent) { int index; if (h.Indexes.TryGetValue(variable, out index)) { return new ElementBoxStorage( ResolveVariable(h.SelfVariable, hoistedLocals), index, variable ); } } // // If this is an unbound variable in the lambda, the error will be // thrown from VariableBinder. So an error here is generally caused // by an internal error, e.g. a scope was created but it bypassed // VariableBinder. // throw Error.UndefinedVariable(variable.Name, variable.Type, CurrentLambdaName); } #endregion private void SetParent(LambdaCompiler lc, CompilerScope parent) { Debug.Assert(_parent == null && parent != this); _parent = parent; if (NeedsClosure && _parent != null) { _closureHoistedLocals = _parent.NearestHoistedLocals; } ReadOnlyCollection<ParameterExpression> hoistedVars = GetVariables().Where(p => Definitions[p] == VariableStorageKind.Hoisted).ToReadOnly(); if (hoistedVars.Count > 0) { _hoistedLocals = new HoistedLocals(_closureHoistedLocals, hoistedVars); AddLocal(lc, _hoistedLocals.SelfVariable); } } // Emits creation of the hoisted local storage private void EmitNewHoistedLocals(LambdaCompiler lc) { if (_hoistedLocals == null) { return; } // create the array lc.IL.EmitPrimitive(_hoistedLocals.Variables.Count); lc.IL.Emit(OpCodes.Newarr, typeof(object)); // initialize all elements int i = 0; foreach (ParameterExpression v in _hoistedLocals.Variables) { // array[i] = new StrongBox<T>(...); lc.IL.Emit(OpCodes.Dup); lc.IL.EmitPrimitive(i++); Type boxType = typeof(StrongBox<>).MakeGenericType(v.Type); int index; if (IsMethod && (index = lc.Parameters.IndexOf(v)) >= 0) { // array[i] = new StrongBox<T>(argument); lc.EmitLambdaArgument(index); lc.IL.Emit(OpCodes.Newobj, boxType.GetConstructor(new Type[] { v.Type })); } else if (v == _hoistedLocals.ParentVariable) { // array[i] = new StrongBox<T>(closure.Locals); ResolveVariable(v, _closureHoistedLocals).EmitLoad(); lc.IL.Emit(OpCodes.Newobj, boxType.GetConstructor(new Type[] { v.Type })); } else { // array[i] = new StrongBox<T>(); lc.IL.Emit(OpCodes.Newobj, boxType.GetConstructor(Type.EmptyTypes)); } // if we want to cache this into a local, do it now if (ShouldCache(v)) { lc.IL.Emit(OpCodes.Dup); CacheBoxToLocal(lc, v); } lc.IL.Emit(OpCodes.Stelem_Ref); } // store it EmitSet(_hoistedLocals.SelfVariable); } // If hoisted variables are referenced "enough", we cache the // StrongBox<T> in an IL local, which saves an array index and a cast // when we go to look it up later private void EmitCachedVariables() { if (ReferenceCount == null) { return; } foreach (KeyValuePair<ParameterExpression, int> refCount in ReferenceCount) { if (ShouldCache(refCount.Key, refCount.Value)) { var storage = ResolveVariable(refCount.Key) as ElementBoxStorage; if (storage != null) { storage.EmitLoadBox(); CacheBoxToLocal(storage.Compiler, refCount.Key); } } } } private bool ShouldCache(ParameterExpression v, int refCount) { // This caching is too aggressive in the face of conditionals and // switch. Also, it is too conservative for variables used inside // of loops. return refCount > 2 && !_locals.ContainsKey(v); } private bool ShouldCache(ParameterExpression v) { if (ReferenceCount == null) { return false; } int refCount; return ReferenceCount.TryGetValue(v, out refCount) && ShouldCache(v, refCount); } private void CacheBoxToLocal(LambdaCompiler lc, ParameterExpression v) { Debug.Assert(ShouldCache(v) && !_locals.ContainsKey(v)); var local = new LocalBoxStorage(lc, v); local.EmitStoreBox(); _locals.Add(v, local); } // Creates IL locals for accessing closures private void EmitClosureAccess(LambdaCompiler lc, HoistedLocals locals) { if (locals == null) { return; } EmitClosureToVariable(lc, locals); while ((locals = locals.Parent) != null) { ParameterExpression v = locals.SelfVariable; var local = new LocalStorage(lc, v); local.EmitStore(ResolveVariable(v)); _locals.Add(v, local); } } private void EmitClosureToVariable(LambdaCompiler lc, HoistedLocals locals) { lc.EmitClosureArgument(); lc.IL.Emit(OpCodes.Ldfld, Closure_Locals); AddLocal(lc, locals.SelfVariable); EmitSet(locals.SelfVariable); } // Allocates slots for IL locals or IL arguments private void AllocateLocals(LambdaCompiler lc) { foreach (ParameterExpression v in GetVariables()) { if (Definitions[v] == VariableStorageKind.Local) { // // If v is in lc.Parameters, it is a parameter. // Otherwise, it is a local variable. // // Also, for inlined lambdas we'll create a local, which // is possibly a byref local if the parameter is byref. // Storage s; if (IsMethod && lc.Parameters.Contains(v)) { s = new ArgumentStorage(lc, v); } else { s = new LocalStorage(lc, v); } _locals.Add(v, s); } } } private IEnumerable<ParameterExpression> GetVariables() => MergedScopes == null ? GetVariables(Node) : GetVariablesIncludingMerged(); private IEnumerable<ParameterExpression> GetVariablesIncludingMerged() { foreach (ParameterExpression param in GetVariables(Node)) { yield return param; } foreach (BlockExpression scope in MergedScopes) { foreach (ParameterExpression param in scope.Variables) { yield return param; } } } private static IReadOnlyList<ParameterExpression> GetVariables(object scope) { var lambda = scope as LambdaExpression; if (lambda != null) { return new ParameterList(lambda); } var block = scope as BlockExpression; if (block != null) { return block.Variables; } return new[] { ((CatchBlock)scope).Variable }; } private string CurrentLambdaName { get { CompilerScope s = this; while (s != null) { var lambda = s.Node as LambdaExpression; if (lambda != null) { return lambda.Name; } s = s._parent; } throw ContractUtils.Unreachable; } } } internal static class ParameterProviderExtensions { public static int IndexOf(this IParameterProvider provider, ParameterExpression parameter) { for (int i = 0, n = provider.ParameterCount; i < n; i++) { if (provider.GetParameter(i) == parameter) { return i; } } return -1; } public static bool Contains(this IParameterProvider provider, ParameterExpression parameter) { return provider.IndexOf(parameter) >= 0; } } internal sealed class ParameterList : IReadOnlyList<ParameterExpression> { private readonly IParameterProvider _provider; public ParameterList(IParameterProvider provider) { _provider = provider; } public ParameterExpression this[int index] { get { return _provider.GetParameter(index); } } public int Count => _provider.ParameterCount; public IEnumerator<ParameterExpression> GetEnumerator() { for (int i = 0, n = _provider.ParameterCount; i < n; i++) { yield return _provider.GetParameter(i); } } IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Threading; namespace System.Data.SqlClient.SNI { /// <summary> /// SNI MARS connection. Multiple MARS streams will be overlaid on this connection. /// </summary> internal class SNIMarsConnection { private readonly Guid _connectionId = Guid.NewGuid(); private readonly Dictionary<int, SNIMarsHandle> _sessions = new Dictionary<int, SNIMarsHandle>(); private readonly byte[] _headerBytes = new byte[SNISMUXHeader.HEADER_LENGTH]; private SNIHandle _lowerHandle; private ushort _nextSessionId = 0; private int _currentHeaderByteCount = 0; private int _dataBytesLeft = 0; private SNISMUXHeader _currentHeader; private SNIPacket _currentPacket; /// <summary> /// Connection ID /// </summary> public Guid ConnectionId { get { return _connectionId; } } /// <summary> /// Constructor /// </summary> /// <param name="lowerHandle">Lower handle</param> public SNIMarsConnection(SNIHandle lowerHandle) { _lowerHandle = lowerHandle; _lowerHandle.SetAsyncCallbacks(HandleReceiveComplete, HandleSendComplete); } public SNIMarsHandle CreateSession(object callbackObject, bool async) { lock (this) { ushort sessionId = _nextSessionId++; SNIMarsHandle handle = new SNIMarsHandle(this, sessionId, callbackObject, async); _sessions.Add(sessionId, handle); return handle; } } /// <summary> /// Start receiving /// </summary> /// <returns></returns> public uint StartReceive() { SNIPacket packet = null; if (ReceiveAsync(ref packet) == TdsEnums.SNI_SUCCESS_IO_PENDING) { return TdsEnums.SNI_SUCCESS_IO_PENDING; } return SNICommon.ReportSNIError(SNIProviders.SMUX_PROV, 0, SNICommon.ConnNotUsableError, string.Empty); } /// <summary> /// Send a packet synchronously /// </summary> /// <param name="packet">SNI packet</param> /// <returns>SNI error code</returns> public uint Send(SNIPacket packet) { lock (this) { return _lowerHandle.Send(packet); } } /// <summary> /// Send a packet asynchronously /// </summary> /// <param name="packet">SNI packet</param> /// <param name="callback">Completion callback</param> /// <returns>SNI error code</returns> public uint SendAsync(SNIPacket packet, SNIAsyncCallback callback) { lock (this) { return _lowerHandle.SendAsync(packet, callback); } } /// <summary> /// Receive a packet asynchronously /// </summary> /// <param name="packet">SNI packet</param> /// <returns>SNI error code</returns> public uint ReceiveAsync(ref SNIPacket packet) { lock (this) { return _lowerHandle.ReceiveAsync(ref packet); } } /// <summary> /// Check SNI handle connection /// </summary> /// <param name="handle"></param> /// <returns>SNI error status</returns> public uint CheckConnection() { lock (this) { return _lowerHandle.CheckConnection(); } } /// <summary> /// Process a receive error /// </summary> public void HandleReceiveError() { Debug.Assert(Monitor.IsEntered(this), "HandleReceiveError was called without being locked."); foreach (SNIMarsHandle handle in _sessions.Values) { handle.HandleReceiveError(); } } /// <summary> /// Process a send completion /// </summary> /// <param name="packet">SNI packet</param> /// <param name="sniErrorCode">SNI error code</param> public void HandleSendComplete(SNIPacket packet, uint sniErrorCode) { packet.InvokeCompletionCallback(sniErrorCode); } /// <summary> /// Process a receive completion /// </summary> /// <param name="packet">SNI packet</param> /// <param name="sniErrorCode">SNI error code</param> public void HandleReceiveComplete(SNIPacket packet, uint sniErrorCode) { SNISMUXHeader currentHeader = null; SNIPacket currentPacket = null; SNIMarsHandle currentSession = null; if (sniErrorCode != TdsEnums.SNI_SUCCESS) { lock (this) { HandleReceiveError(); return; } } while (true) { lock (this) { if (_currentHeaderByteCount != SNISMUXHeader.HEADER_LENGTH) { currentHeader = null; currentPacket = null; currentSession = null; while (_currentHeaderByteCount != SNISMUXHeader.HEADER_LENGTH) { int bytesTaken = packet.TakeData(_headerBytes, _currentHeaderByteCount, SNISMUXHeader.HEADER_LENGTH - _currentHeaderByteCount); _currentHeaderByteCount += bytesTaken; if (bytesTaken == 0) { sniErrorCode = ReceiveAsync(ref packet); if (sniErrorCode == TdsEnums.SNI_SUCCESS_IO_PENDING) { return; } HandleReceiveError(); return; } } _currentHeader = new SNISMUXHeader() { SMID = _headerBytes[0], flags = _headerBytes[1], sessionId = BitConverter.ToUInt16(_headerBytes, 2), length = BitConverter.ToUInt32(_headerBytes, 4) - SNISMUXHeader.HEADER_LENGTH, sequenceNumber = BitConverter.ToUInt32(_headerBytes, 8), highwater = BitConverter.ToUInt32(_headerBytes, 12) }; _dataBytesLeft = (int)_currentHeader.length; _currentPacket = new SNIPacket(null); _currentPacket.Allocate((int)_currentHeader.length); } currentHeader = _currentHeader; currentPacket = _currentPacket; if (_currentHeader.flags == (byte)SNISMUXFlags.SMUX_DATA) { if (_dataBytesLeft > 0) { int length = packet.TakeData(_currentPacket, _dataBytesLeft); _dataBytesLeft -= length; if (_dataBytesLeft > 0) { sniErrorCode = ReceiveAsync(ref packet); if (sniErrorCode == TdsEnums.SNI_SUCCESS_IO_PENDING) { return; } HandleReceiveError(); return; } } } _currentHeaderByteCount = 0; if (!_sessions.ContainsKey(_currentHeader.sessionId)) { SNILoadHandle.SingletonInstance.LastError = new SNIError(SNIProviders.SMUX_PROV, 0, SNICommon.InvalidParameterError, string.Empty); HandleReceiveError(); _lowerHandle.Dispose(); _lowerHandle = null; return; } if (_currentHeader.flags == (byte)SNISMUXFlags.SMUX_FIN) { _sessions.Remove(_currentHeader.sessionId); } else { currentSession = _sessions[_currentHeader.sessionId]; } } if (currentHeader.flags == (byte)SNISMUXFlags.SMUX_DATA) { currentSession.HandleReceiveComplete(currentPacket, currentHeader); } if (_currentHeader.flags == (byte)SNISMUXFlags.SMUX_ACK) { try { currentSession.HandleAck(currentHeader.highwater); } catch (Exception e) { SNICommon.ReportSNIError(SNIProviders.SMUX_PROV, SNICommon.InternalExceptionError, e); } } lock (this) { if (packet.DataLeft == 0) { sniErrorCode = ReceiveAsync(ref packet); if (sniErrorCode == TdsEnums.SNI_SUCCESS_IO_PENDING) { return; } HandleReceiveError(); return; } } } } /// <summary> /// Enable SSL /// </summary> public uint EnableSsl(uint options) { return _lowerHandle.EnableSsl(options); } /// <summary> /// Disable SSL /// </summary> public void DisableSsl() { _lowerHandle.DisableSsl(); } #if DEBUG /// <summary> /// Test handle for killing underlying connection /// </summary> public void KillConnection() { _lowerHandle.KillConnection(); } #endif } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace Fabrikam.Module1.Uc1.Services.WebApi.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Runtime.CompilerServices; using System.Security.Cryptography; using Microsoft.AspNetCore.Cryptography.KeyDerivation; using Microsoft.Extensions.Identity.Core; using Microsoft.Extensions.Options; namespace Microsoft.AspNetCore.Identity { /// <summary> /// Implements the standard Identity password hashing. /// </summary> /// <typeparam name="TUser">The type used to represent a user.</typeparam> public class PasswordHasher<TUser> : IPasswordHasher<TUser> where TUser : class { /* ======================= * HASHED PASSWORD FORMATS * ======================= * * Version 2: * PBKDF2 with HMAC-SHA1, 128-bit salt, 256-bit subkey, 1000 iterations. * (See also: SDL crypto guidelines v5.1, Part III) * Format: { 0x00, salt, subkey } * * Version 3: * PBKDF2 with HMAC-SHA256, 128-bit salt, 256-bit subkey, 10000 iterations. * Format: { 0x01, prf (UInt32), iter count (UInt32), salt length (UInt32), salt, subkey } * (All UInt32s are stored big-endian.) */ private readonly PasswordHasherCompatibilityMode _compatibilityMode; private readonly int _iterCount; private readonly RandomNumberGenerator _rng; /// <summary> /// Creates a new instance of <see cref="PasswordHasher{TUser}"/>. /// </summary> /// <param name="optionsAccessor">The options for this instance.</param> public PasswordHasher(IOptions<PasswordHasherOptions> optionsAccessor = null) { var options = optionsAccessor?.Value ?? new PasswordHasherOptions(); _compatibilityMode = options.CompatibilityMode; switch (_compatibilityMode) { case PasswordHasherCompatibilityMode.IdentityV2: // nothing else to do break; case PasswordHasherCompatibilityMode.IdentityV3: _iterCount = options.IterationCount; if (_iterCount < 1) { throw new InvalidOperationException(Resources.InvalidPasswordHasherIterationCount); } break; default: throw new InvalidOperationException(Resources.InvalidPasswordHasherCompatibilityMode); } _rng = options.Rng; } #if NETSTANDARD2_0 || NETFRAMEWORK // Compares two byte arrays for equality. The method is specifically written so that the loop is not optimized. [MethodImpl(MethodImplOptions.NoInlining | MethodImplOptions.NoOptimization)] private static bool ByteArraysEqual(byte[] a, byte[] b) { if (a == null && b == null) { return true; } if (a == null || b == null || a.Length != b.Length) { return false; } var areSame = true; for (var i = 0; i < a.Length; i++) { areSame &= (a[i] == b[i]); } return areSame; } #endif /// <summary> /// Returns a hashed representation of the supplied <paramref name="password"/> for the specified <paramref name="user"/>. /// </summary> /// <param name="user">The user whose password is to be hashed.</param> /// <param name="password">The password to hash.</param> /// <returns>A hashed representation of the supplied <paramref name="password"/> for the specified <paramref name="user"/>.</returns> public virtual string HashPassword(TUser user, string password) { if (password == null) { throw new ArgumentNullException(nameof(password)); } if (_compatibilityMode == PasswordHasherCompatibilityMode.IdentityV2) { return Convert.ToBase64String(HashPasswordV2(password, _rng)); } else { return Convert.ToBase64String(HashPasswordV3(password, _rng)); } } private static byte[] HashPasswordV2(string password, RandomNumberGenerator rng) { const KeyDerivationPrf Pbkdf2Prf = KeyDerivationPrf.HMACSHA1; // default for Rfc2898DeriveBytes const int Pbkdf2IterCount = 1000; // default for Rfc2898DeriveBytes const int Pbkdf2SubkeyLength = 256 / 8; // 256 bits const int SaltSize = 128 / 8; // 128 bits // Produce a version 2 (see comment above) text hash. byte[] salt = new byte[SaltSize]; rng.GetBytes(salt); byte[] subkey = KeyDerivation.Pbkdf2(password, salt, Pbkdf2Prf, Pbkdf2IterCount, Pbkdf2SubkeyLength); var outputBytes = new byte[1 + SaltSize + Pbkdf2SubkeyLength]; outputBytes[0] = 0x00; // format marker Buffer.BlockCopy(salt, 0, outputBytes, 1, SaltSize); Buffer.BlockCopy(subkey, 0, outputBytes, 1 + SaltSize, Pbkdf2SubkeyLength); return outputBytes; } private byte[] HashPasswordV3(string password, RandomNumberGenerator rng) { return HashPasswordV3(password, rng, prf: KeyDerivationPrf.HMACSHA256, iterCount: _iterCount, saltSize: 128 / 8, numBytesRequested: 256 / 8); } private static byte[] HashPasswordV3(string password, RandomNumberGenerator rng, KeyDerivationPrf prf, int iterCount, int saltSize, int numBytesRequested) { // Produce a version 3 (see comment above) text hash. byte[] salt = new byte[saltSize]; rng.GetBytes(salt); byte[] subkey = KeyDerivation.Pbkdf2(password, salt, prf, iterCount, numBytesRequested); var outputBytes = new byte[13 + salt.Length + subkey.Length]; outputBytes[0] = 0x01; // format marker WriteNetworkByteOrder(outputBytes, 1, (uint)prf); WriteNetworkByteOrder(outputBytes, 5, (uint)iterCount); WriteNetworkByteOrder(outputBytes, 9, (uint)saltSize); Buffer.BlockCopy(salt, 0, outputBytes, 13, salt.Length); Buffer.BlockCopy(subkey, 0, outputBytes, 13 + saltSize, subkey.Length); return outputBytes; } private static uint ReadNetworkByteOrder(byte[] buffer, int offset) { return ((uint)(buffer[offset + 0]) << 24) | ((uint)(buffer[offset + 1]) << 16) | ((uint)(buffer[offset + 2]) << 8) | ((uint)(buffer[offset + 3])); } /// <summary> /// Returns a <see cref="PasswordVerificationResult"/> indicating the result of a password hash comparison. /// </summary> /// <param name="user">The user whose password should be verified.</param> /// <param name="hashedPassword">The hash value for a user's stored password.</param> /// <param name="providedPassword">The password supplied for comparison.</param> /// <returns>A <see cref="PasswordVerificationResult"/> indicating the result of a password hash comparison.</returns> /// <remarks>Implementations of this method should be time consistent.</remarks> public virtual PasswordVerificationResult VerifyHashedPassword(TUser user, string hashedPassword, string providedPassword) { if (hashedPassword == null) { throw new ArgumentNullException(nameof(hashedPassword)); } if (providedPassword == null) { throw new ArgumentNullException(nameof(providedPassword)); } byte[] decodedHashedPassword = Convert.FromBase64String(hashedPassword); // read the format marker from the hashed password if (decodedHashedPassword.Length == 0) { return PasswordVerificationResult.Failed; } switch (decodedHashedPassword[0]) { case 0x00: if (VerifyHashedPasswordV2(decodedHashedPassword, providedPassword)) { // This is an old password hash format - the caller needs to rehash if we're not running in an older compat mode. return (_compatibilityMode == PasswordHasherCompatibilityMode.IdentityV3) ? PasswordVerificationResult.SuccessRehashNeeded : PasswordVerificationResult.Success; } else { return PasswordVerificationResult.Failed; } case 0x01: int embeddedIterCount; if (VerifyHashedPasswordV3(decodedHashedPassword, providedPassword, out embeddedIterCount)) { // If this hasher was configured with a higher iteration count, change the entry now. return (embeddedIterCount < _iterCount) ? PasswordVerificationResult.SuccessRehashNeeded : PasswordVerificationResult.Success; } else { return PasswordVerificationResult.Failed; } default: return PasswordVerificationResult.Failed; // unknown format marker } } private static bool VerifyHashedPasswordV2(byte[] hashedPassword, string password) { const KeyDerivationPrf Pbkdf2Prf = KeyDerivationPrf.HMACSHA1; // default for Rfc2898DeriveBytes const int Pbkdf2IterCount = 1000; // default for Rfc2898DeriveBytes const int Pbkdf2SubkeyLength = 256 / 8; // 256 bits const int SaltSize = 128 / 8; // 128 bits // We know ahead of time the exact length of a valid hashed password payload. if (hashedPassword.Length != 1 + SaltSize + Pbkdf2SubkeyLength) { return false; // bad size } byte[] salt = new byte[SaltSize]; Buffer.BlockCopy(hashedPassword, 1, salt, 0, salt.Length); byte[] expectedSubkey = new byte[Pbkdf2SubkeyLength]; Buffer.BlockCopy(hashedPassword, 1 + salt.Length, expectedSubkey, 0, expectedSubkey.Length); // Hash the incoming password and verify it byte[] actualSubkey = KeyDerivation.Pbkdf2(password, salt, Pbkdf2Prf, Pbkdf2IterCount, Pbkdf2SubkeyLength); #if NETSTANDARD2_0 || NETFRAMEWORK return ByteArraysEqual(actualSubkey, expectedSubkey); #elif NETCOREAPP return CryptographicOperations.FixedTimeEquals(actualSubkey, expectedSubkey); #else #error Update target frameworks #endif } private static bool VerifyHashedPasswordV3(byte[] hashedPassword, string password, out int iterCount) { iterCount = default(int); try { // Read header information KeyDerivationPrf prf = (KeyDerivationPrf)ReadNetworkByteOrder(hashedPassword, 1); iterCount = (int)ReadNetworkByteOrder(hashedPassword, 5); int saltLength = (int)ReadNetworkByteOrder(hashedPassword, 9); // Read the salt: must be >= 128 bits if (saltLength < 128 / 8) { return false; } byte[] salt = new byte[saltLength]; Buffer.BlockCopy(hashedPassword, 13, salt, 0, salt.Length); // Read the subkey (the rest of the payload): must be >= 128 bits int subkeyLength = hashedPassword.Length - 13 - salt.Length; if (subkeyLength < 128 / 8) { return false; } byte[] expectedSubkey = new byte[subkeyLength]; Buffer.BlockCopy(hashedPassword, 13 + salt.Length, expectedSubkey, 0, expectedSubkey.Length); // Hash the incoming password and verify it byte[] actualSubkey = KeyDerivation.Pbkdf2(password, salt, prf, iterCount, subkeyLength); #if NETSTANDARD2_0 || NETFRAMEWORK return ByteArraysEqual(actualSubkey, expectedSubkey); #elif NETCOREAPP return CryptographicOperations.FixedTimeEquals(actualSubkey, expectedSubkey); #else #error Update target frameworks #endif } catch { // This should never occur except in the case of a malformed payload, where // we might go off the end of the array. Regardless, a malformed payload // implies verification failed. return false; } } private static void WriteNetworkByteOrder(byte[] buffer, int offset, uint value) { buffer[offset + 0] = (byte)(value >> 24); buffer[offset + 1] = (byte)(value >> 16); buffer[offset + 2] = (byte)(value >> 8); buffer[offset + 3] = (byte)(value >> 0); } } }
// Copyright 2021 Esri. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. // You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific // language governing permissions and limitations under the License. using Esri.ArcGISRuntime; using Esri.ArcGISRuntime.Geometry; using Esri.ArcGISRuntime.Mapping; using Esri.ArcGISRuntime.UI.Controls; using Esri.ArcGISRuntime.UI.GeoAnalysis; using Foundation; using System; using System.Diagnostics; using UIKit; namespace ArcGISRuntime.Samples.DistanceMeasurement { [Register("DistanceMeasurement")] [ArcGISRuntime.Samples.Shared.Attributes.Sample( name: "Distance measurement analysis", category: "Analysis", description: "Measure distances between two points in 3D.", instructions: "Choose a unit system for the measurement. Tap any location in the scene to start measuring. Move the mouse to an end location, and tap to complete the measurement. Tap a new location to clear and start a new measurement.", tags: new[] { "3D", "analysis", "distance", "measure" })] public class DistanceMeasurement : UIViewController { // Hold references to UI controls. private SceneView _mySceneView; private UILabel _resultLabel; private UIBarButtonItem _helpButton; private UIBarButtonItem _changeUnitsButton; // URLs to various services used to provide an interesting scene for the sample. private readonly Uri _buildingService = new Uri("https://tiles.arcgis.com/tiles/P3ePLMYs2RVChkJx/arcgis/rest/services/Buildings_Brest/SceneServer/layers/0"); private readonly Uri _worldElevationService = new Uri("https://elevation3d.arcgis.com/arcgis/rest/services/WorldElevation3D/Terrain3D/ImageServer"); // Reference to the measurement. private LocationDistanceMeasurement _distanceMeasurement; public DistanceMeasurement() { Title = "Distance measurement analysis"; } private void Initialize() { // Create a scene with elevation. Surface sceneSurface = new Surface(); sceneSurface.ElevationSources.Add(new ArcGISTiledElevationSource(_worldElevationService)); Scene myScene = new Scene(Basemap.CreateTopographic()) { BaseSurface = sceneSurface }; // Create and add a building layer. ArcGISSceneLayer buildingsLayer = new ArcGISSceneLayer(_buildingService); myScene.OperationalLayers.Add(buildingsLayer); // Create and add an analysis overlay. AnalysisOverlay measureAnalysisOverlay = new AnalysisOverlay(); _mySceneView.AnalysisOverlays.Add(measureAnalysisOverlay); // Create an initial distance measurement and show it. MapPoint start = new MapPoint(-4.494677, 48.384472, 24.772694, SpatialReferences.Wgs84); MapPoint end = new MapPoint(-4.495646, 48.384377, 58.501115, SpatialReferences.Wgs84); _distanceMeasurement = new LocationDistanceMeasurement(start, end); measureAnalysisOverlay.Analyses.Add(_distanceMeasurement); // Keep the UI updated. _distanceMeasurement.MeasurementChanged += MeasurementChanged; // Show the scene in the view. _mySceneView.Scene = myScene; _mySceneView.SetViewpointCamera(new Camera(start, 200, 45, 45, 0)); } private void MeasurementChanged(object sender, EventArgs e) { // This is needed because measurement change events occur on a non-UI thread and this code accesses UI object. BeginInvokeOnMainThread(() => { // Update the labels with new values in the format {value} {unit system}. string direct = $"{_distanceMeasurement.DirectDistance.Value:F} {_distanceMeasurement.DirectDistance.Unit.Abbreviation}"; string vertical = $"{_distanceMeasurement.VerticalDistance.Value:F} {_distanceMeasurement.VerticalDistance.Unit.Abbreviation}"; string horizontal = $"{_distanceMeasurement.HorizontalDistance.Value:F} {_distanceMeasurement.HorizontalDistance.Unit.Abbreviation}"; _resultLabel.Text = $"Direct: {direct}, V: {vertical}, H: {horizontal}"; }); } private async void MySceneView_GeoViewTapped(object sender, GeoViewInputEventArgs e) { try { // Get the geographic location for the current mouse position. MapPoint geoPoint = await _mySceneView.ScreenToLocationAsync(e.Position); if (geoPoint == null) return; // Update the location distance measurement. _distanceMeasurement.EndLocation = geoPoint; } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } } private void UnitChangeButton_TouchUpInside(object sender, EventArgs e) { // Create the view controller that will present the list of unit systems. UIAlertController unitSystemSelectionAlert = UIAlertController.Create(null, "Change unit system", UIAlertControllerStyle.ActionSheet); // Needed to prevent a crash on iPad. UIPopoverPresentationController presentationPopover = unitSystemSelectionAlert.PopoverPresentationController; if (presentationPopover != null) { presentationPopover.BarButtonItem = (UIBarButtonItem)sender; presentationPopover.PermittedArrowDirections = UIPopoverArrowDirection.Down; } // Show an option for each unit system. foreach (UnitSystem system in Enum.GetValues(typeof(UnitSystem))) { // Upon selecting a unit system, update the distance measure. unitSystemSelectionAlert.AddAction(UIAlertAction.Create(system.ToString(), UIAlertActionStyle.Default, action => _distanceMeasurement.UnitSystem = system)); } // Show the alert. PresentViewController(unitSystemSelectionAlert, true, null); } private void ShowHelp_Click(object sender, EventArgs e) { // Prompt for the type of convex hull to create. UIAlertController unionAlert = UIAlertController.Create("Tap to update", "Tap in the scene to set a new end point for the distance measurement.", UIAlertControllerStyle.Alert); unionAlert.AddAction(UIAlertAction.Create("Ok", UIAlertActionStyle.Default, null)); // Show the alert. PresentViewController(unionAlert, true, null); } public override void ViewDidLoad() { base.ViewDidLoad(); Initialize(); } public override void LoadView() { // Create and configure the views. View = new UIView { BackgroundColor = ApplicationTheme.BackgroundColor }; _mySceneView = new SceneView(); _mySceneView.TranslatesAutoresizingMaskIntoConstraints = false; UIToolbar toolbar = new UIToolbar(); toolbar.TranslatesAutoresizingMaskIntoConstraints = false; _resultLabel = new UILabel { Text = "Tap to measure distance.", BackgroundColor = UIColor.FromWhiteAlpha(0f, .6f), TextColor = UIColor.White, TextAlignment = UITextAlignment.Center, TranslatesAutoresizingMaskIntoConstraints = false }; _helpButton = new UIBarButtonItem(); _helpButton.Title = "Help"; _changeUnitsButton = new UIBarButtonItem(); _changeUnitsButton.Title = "Change units"; toolbar.Items = new[] { _changeUnitsButton, new UIBarButtonItem(UIBarButtonSystemItem.FlexibleSpace), _helpButton }; // Add the views. View.AddSubviews(_mySceneView, toolbar, _resultLabel); // Lay out the views. NSLayoutConstraint.ActivateConstraints(new[] { _mySceneView.TopAnchor.ConstraintEqualTo(View.SafeAreaLayoutGuide.TopAnchor), _mySceneView.LeadingAnchor.ConstraintEqualTo(View.LeadingAnchor), _mySceneView.TrailingAnchor.ConstraintEqualTo(View.TrailingAnchor), _mySceneView.BottomAnchor.ConstraintEqualTo(toolbar.TopAnchor), toolbar.BottomAnchor.ConstraintEqualTo(View.SafeAreaLayoutGuide.BottomAnchor), toolbar.LeadingAnchor.ConstraintEqualTo(View.LeadingAnchor), toolbar.TrailingAnchor.ConstraintEqualTo(View.TrailingAnchor), _resultLabel.TopAnchor.ConstraintEqualTo(View.SafeAreaLayoutGuide.TopAnchor), _resultLabel.TrailingAnchor.ConstraintEqualTo(View.TrailingAnchor), _resultLabel.LeadingAnchor.ConstraintEqualTo(View.LeadingAnchor), _resultLabel.HeightAnchor.ConstraintEqualTo(40) }); } public override void ViewWillAppear(bool animated) { base.ViewWillAppear(animated); // Subscribe to events. _mySceneView.GeoViewTapped += MySceneView_GeoViewTapped; _helpButton.Clicked += ShowHelp_Click; _changeUnitsButton.Clicked += UnitChangeButton_TouchUpInside; if (_distanceMeasurement != null) _distanceMeasurement.MeasurementChanged += MeasurementChanged; } public override void ViewDidDisappear(bool animated) { base.ViewDidDisappear(animated); // Unsubscribe from events, per best practice. _mySceneView.GeoViewTapped -= MySceneView_GeoViewTapped; _helpButton.Clicked -= ShowHelp_Click; _changeUnitsButton.Clicked -= UnitChangeButton_TouchUpInside; if (_distanceMeasurement != null) _distanceMeasurement.MeasurementChanged -= MeasurementChanged; } } }
/******************************************************** * ADO.NET 2.0 Data Provider for SQLite Version 3.X * Written by Robert Simpson (robert@blackcastlesoft.com) * * Released to the public domain, use at your own risk! ********************************************************/ namespace Mono.Data.Sqlite { using System; using System.Data; using System.Runtime.InteropServices; using System.Collections.Generic; using System.Globalization; /// <summary> /// This class implements SQLiteBase completely, and is the guts of the code that interop's SQLite with .NET /// </summary> internal class SQLite3 : SQLiteBase { /// <summary> /// The opaque pointer returned to us by the sqlite provider /// </summary> protected SqliteConnectionHandle _sql; protected string _fileName; protected bool _usePool; protected int _poolVersion = 0; #if !PLATFORM_COMPACTFRAMEWORK private bool _buildingSchema = false; #endif #if MONOTOUCH GCHandle gch; #endif /// <summary> /// The user-defined functions registered on this connection /// </summary> protected SqliteFunction[] _functionsArray; internal SQLite3(SQLiteDateFormats fmt) : base(fmt) { #if MONOTOUCH gch = GCHandle.Alloc (this); #endif } protected override void Dispose(bool bDisposing) { if (bDisposing) Close(); } // It isn't necessary to cleanup any functions we've registered. If the connection // goes to the pool and is resurrected later, re-registered functions will overwrite the // previous functions. The SqliteFunctionCookieHandle will take care of freeing unmanaged // resources belonging to the previously-registered functions. internal override void Close() { if (_sql != null) { if (_usePool) { SQLiteBase.ResetConnection(_sql); SqliteConnectionPool.Add(_fileName, _sql, _poolVersion); } else _sql.Dispose(); } _sql = null; #if MONOTOUCH if (gch.IsAllocated) gch.Free (); #endif } internal override void Cancel() { UnsafeNativeMethods.sqlite3_interrupt(_sql); } internal override string Version { get { return SQLite3.SQLiteVersion; } } internal static string SQLiteVersion { get { return UTF8ToString(UnsafeNativeMethods.sqlite3_libversion(), -1); } } internal override int Changes { get { return UnsafeNativeMethods.sqlite3_changes(_sql); } } internal override void Open(string strFilename, SQLiteOpenFlagsEnum flags, int maxPoolSize, bool usePool) { if (_sql != null) return; _usePool = usePool; if (usePool) { _fileName = strFilename; _sql = SqliteConnectionPool.Remove(strFilename, maxPoolSize, out _poolVersion); } if (_sql == null) { IntPtr db; #if !SQLITE_STANDARD int n = UnsafeNativeMethods.sqlite3_open_interop(ToUTF8(strFilename), (int)flags, out db); #else // Compatibility with versions < 3.5.0 int n; try { n = UnsafeNativeMethods.sqlite3_open_v2(ToUTF8(strFilename), out db, (int)flags, IntPtr.Zero); } catch (EntryPointNotFoundException) { Console.WriteLine ("Your sqlite3 version is old - please upgrade to at least v3.5.0!"); n = UnsafeNativeMethods.sqlite3_open (ToUTF8 (strFilename), out db); } #endif if (n > 0) throw new SqliteException(n, null); _sql = db; } // Bind functions to this connection. If any previous functions of the same name // were already bound, then the new bindings replace the old. _functionsArray = SqliteFunction.BindFunctions(this); SetTimeout(0); } internal override void ClearPool() { SqliteConnectionPool.ClearPool(_fileName); } internal override void SetTimeout(int nTimeoutMS) { int n = UnsafeNativeMethods.sqlite3_busy_timeout(_sql, nTimeoutMS); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override bool Step(SqliteStatement stmt) { int n; Random rnd = null; uint starttick = (uint)Environment.TickCount; uint timeout = (uint)(stmt._command._commandTimeout * 1000); while (true) { n = UnsafeNativeMethods.sqlite3_step(stmt._sqlite_stmt); if (n == 100) return true; if (n == 101) return false; if (n > 0) { int r; // An error occurred, attempt to reset the statement. If the reset worked because the // schema has changed, re-try the step again. If it errored our because the database // is locked, then keep retrying until the command timeout occurs. r = Reset(stmt); if (r == 0) throw new SqliteException(n, SQLiteLastError()); else if ((r == 6 || r == 5) && stmt._command != null) // SQLITE_LOCKED || SQLITE_BUSY { // Keep trying if (rnd == null) // First time we've encountered the lock rnd = new Random(); // If we've exceeded the command's timeout, give up and throw an error if ((uint)Environment.TickCount - starttick > timeout) { throw new SqliteException(r, SQLiteLastError()); } else { // Otherwise sleep for a random amount of time up to 150ms System.Threading.Thread.CurrentThread.Join(rnd.Next(1, 150)); } } } } } internal override int Reset(SqliteStatement stmt) { int n; #if !SQLITE_STANDARD n = UnsafeNativeMethods.sqlite3_reset_interop(stmt._sqlite_stmt); #else n = UnsafeNativeMethods.sqlite3_reset(stmt._sqlite_stmt); #endif // If the schema changed, try and re-prepare it if (n == 17) // SQLITE_SCHEMA { // Recreate a dummy statement string str; using (SqliteStatement tmp = Prepare(null, stmt._sqlStatement, null, (uint)(stmt._command._commandTimeout * 1000), out str)) { // Finalize the existing statement stmt._sqlite_stmt.Dispose(); // Reassign a new statement pointer to the old statement and clear the temporary one stmt._sqlite_stmt = tmp._sqlite_stmt; tmp._sqlite_stmt = null; // Reapply parameters stmt.BindParameters(); } return -1; // Reset was OK, with schema change } else if (n == 6 || n == 5) // SQLITE_LOCKED || SQLITE_BUSY return n; if (n > 0) throw new SqliteException(n, SQLiteLastError()); return 0; // We reset OK, no schema changes } internal override string SQLiteLastError() { return SQLiteBase.SQLiteLastError(_sql); } internal override SqliteStatement Prepare(SqliteConnection cnn, string strSql, SqliteStatement previous, uint timeoutMS, out string strRemain) { IntPtr stmt = IntPtr.Zero; IntPtr ptr = IntPtr.Zero; int len = 0; int n = 17; int retries = 0; byte[] b = ToUTF8(strSql); string typedefs = null; SqliteStatement cmd = null; Random rnd = null; uint starttick = (uint)Environment.TickCount; GCHandle handle = GCHandle.Alloc(b, GCHandleType.Pinned); IntPtr psql = handle.AddrOfPinnedObject(); try { while ((n == 17 || n == 6 || n == 5) && retries < 3) { #if !SQLITE_STANDARD n = UnsafeNativeMethods.sqlite3_prepare_interop(_sql, psql, b.Length - 1, out stmt, out ptr, out len); #else n = UnsafeNativeMethods.sqlite3_prepare(_sql, psql, b.Length - 1, out stmt, out ptr); len = -1; #endif if (n == 17) retries++; else if (n == 1) { if (String.Compare(SQLiteLastError(), "near \"TYPES\": syntax error", StringComparison.OrdinalIgnoreCase) == 0) { int pos = strSql.IndexOf(';'); if (pos == -1) pos = strSql.Length - 1; typedefs = strSql.Substring(0, pos + 1); strSql = strSql.Substring(pos + 1); strRemain = ""; while (cmd == null && strSql.Length > 0) { cmd = Prepare(cnn, strSql, previous, timeoutMS, out strRemain); strSql = strRemain; } if (cmd != null) cmd.SetTypes(typedefs); return cmd; } #if !PLATFORM_COMPACTFRAMEWORK else if (_buildingSchema == false && String.Compare(SQLiteLastError(), 0, "no such table: TEMP.SCHEMA", 0, 26, StringComparison.OrdinalIgnoreCase) == 0) { strRemain = ""; _buildingSchema = true; try { ISQLiteSchemaExtensions ext = ((IServiceProvider)SqliteFactory.Instance).GetService(typeof(ISQLiteSchemaExtensions)) as ISQLiteSchemaExtensions; if (ext != null) ext.BuildTempSchema(cnn); while (cmd == null && strSql.Length > 0) { cmd = Prepare(cnn, strSql, previous, timeoutMS, out strRemain); strSql = strRemain; } return cmd; } finally { _buildingSchema = false; } } #endif } else if (n == 6 || n == 5) // Locked -- delay a small amount before retrying { // Keep trying if (rnd == null) // First time we've encountered the lock rnd = new Random(); // If we've exceeded the command's timeout, give up and throw an error if ((uint)Environment.TickCount - starttick > timeoutMS) { throw new SqliteException(n, SQLiteLastError()); } else { // Otherwise sleep for a random amount of time up to 150ms System.Threading.Thread.CurrentThread.Join(rnd.Next(1, 150)); } } } if (n > 0) throw new SqliteException(n, SQLiteLastError()); strRemain = UTF8ToString(ptr, len); if (stmt != IntPtr.Zero) cmd = new SqliteStatement(this, stmt, strSql.Substring(0, strSql.Length - strRemain.Length), previous); return cmd; } finally { handle.Free(); } } internal override void Bind_Double(SqliteStatement stmt, int index, double value) { #if !PLATFORM_COMPACTFRAMEWORK int n = UnsafeNativeMethods.sqlite3_bind_double(stmt._sqlite_stmt, index, value); #else int n = UnsafeNativeMethods.sqlite3_bind_double_interop(stmt._sqlite_stmt, index, ref value); #endif if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_Int32(SqliteStatement stmt, int index, int value) { int n = UnsafeNativeMethods.sqlite3_bind_int(stmt._sqlite_stmt, index, value); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_Int64(SqliteStatement stmt, int index, long value) { #if !PLATFORM_COMPACTFRAMEWORK int n = UnsafeNativeMethods.sqlite3_bind_int64(stmt._sqlite_stmt, index, value); #else int n = UnsafeNativeMethods.sqlite3_bind_int64_interop(stmt._sqlite_stmt, index, ref value); #endif if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_Text(SqliteStatement stmt, int index, string value) { byte[] b = ToUTF8(value); int n = UnsafeNativeMethods.sqlite3_bind_text(stmt._sqlite_stmt, index, b, b.Length - 1, (IntPtr)(-1)); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_DateTime(SqliteStatement stmt, int index, DateTime dt) { byte[] b = ToUTF8(dt); int n = UnsafeNativeMethods.sqlite3_bind_text(stmt._sqlite_stmt, index, b, b.Length - 1, (IntPtr)(-1)); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_Blob(SqliteStatement stmt, int index, byte[] blobData) { int n = UnsafeNativeMethods.sqlite3_bind_blob(stmt._sqlite_stmt, index, blobData, blobData.Length, (IntPtr)(-1)); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void Bind_Null(SqliteStatement stmt, int index) { int n = UnsafeNativeMethods.sqlite3_bind_null(stmt._sqlite_stmt, index); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override int Bind_ParamCount(SqliteStatement stmt) { return UnsafeNativeMethods.sqlite3_bind_parameter_count(stmt._sqlite_stmt); } internal override string Bind_ParamName(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_bind_parameter_name_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_bind_parameter_name(stmt._sqlite_stmt, index), -1); #endif } internal override int Bind_ParamIndex(SqliteStatement stmt, string paramName) { return UnsafeNativeMethods.sqlite3_bind_parameter_index(stmt._sqlite_stmt, ToUTF8(paramName)); } internal override int ColumnCount(SqliteStatement stmt) { return UnsafeNativeMethods.sqlite3_column_count(stmt._sqlite_stmt); } internal override string ColumnName(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_column_name_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_column_name(stmt._sqlite_stmt, index), -1); #endif } internal override TypeAffinity ColumnAffinity(SqliteStatement stmt, int index) { return UnsafeNativeMethods.sqlite3_column_type(stmt._sqlite_stmt, index); } internal override string ColumnType(SqliteStatement stmt, int index, out TypeAffinity nAffinity) { int len; #if !SQLITE_STANDARD IntPtr p = UnsafeNativeMethods.sqlite3_column_decltype_interop(stmt._sqlite_stmt, index, out len); #else len = -1; IntPtr p = UnsafeNativeMethods.sqlite3_column_decltype(stmt._sqlite_stmt, index); #endif nAffinity = ColumnAffinity(stmt, index); if (p != IntPtr.Zero) return UTF8ToString(p, len); else { string[] ar = stmt.TypeDefinitions; if (ar != null) { if (index < ar.Length && ar[index] != null) return ar[index]; } return String.Empty; //switch (nAffinity) //{ // case TypeAffinity.Int64: // return "BIGINT"; // case TypeAffinity.Double: // return "DOUBLE"; // case TypeAffinity.Blob: // return "BLOB"; // default: // return "TEXT"; //} } } internal override int ColumnIndex(SqliteStatement stmt, string columnName) { int x = ColumnCount(stmt); for (int n = 0; n < x; n++) { if (String.Compare(columnName, ColumnName(stmt, n), true, CultureInfo.InvariantCulture) == 0) return n; } return -1; } internal override string ColumnOriginalName(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_column_origin_name_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_column_origin_name(stmt._sqlite_stmt, index), -1); #endif } internal override string ColumnDatabaseName(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_column_database_name_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_column_database_name(stmt._sqlite_stmt, index), -1); #endif } internal override string ColumnTableName(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_column_table_name_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_column_table_name(stmt._sqlite_stmt, index), -1); #endif } internal override void ColumnMetaData(string dataBase, string table, string column, out string dataType, out string collateSequence, out bool notNull, out bool primaryKey, out bool autoIncrement) { IntPtr dataTypePtr; IntPtr collSeqPtr; int nnotNull; int nprimaryKey; int nautoInc; int n; int dtLen; int csLen; #if !SQLITE_STANDARD n = UnsafeNativeMethods.sqlite3_table_column_metadata_interop(_sql, ToUTF8(dataBase), ToUTF8(table), ToUTF8(column), out dataTypePtr, out collSeqPtr, out nnotNull, out nprimaryKey, out nautoInc, out dtLen, out csLen); #else dtLen = -1; csLen = -1; n = UnsafeNativeMethods.sqlite3_table_column_metadata(_sql, ToUTF8(dataBase), ToUTF8(table), ToUTF8(column), out dataTypePtr, out collSeqPtr, out nnotNull, out nprimaryKey, out nautoInc); #endif if (n > 0) throw new SqliteException(n, SQLiteLastError()); dataType = UTF8ToString(dataTypePtr, dtLen); collateSequence = UTF8ToString(collSeqPtr, csLen); notNull = (nnotNull == 1); primaryKey = (nprimaryKey == 1); autoIncrement = (nautoInc == 1); } internal override double GetDouble(SqliteStatement stmt, int index) { double value; #if !PLATFORM_COMPACTFRAMEWORK value = UnsafeNativeMethods.sqlite3_column_double(stmt._sqlite_stmt, index); #else UnsafeNativeMethods.sqlite3_column_double_interop(stmt._sqlite_stmt, index, out value); #endif return value; } internal override int GetInt32(SqliteStatement stmt, int index) { return UnsafeNativeMethods.sqlite3_column_int(stmt._sqlite_stmt, index); } internal override long GetInt64(SqliteStatement stmt, int index) { long value; #if !PLATFORM_COMPACTFRAMEWORK value = UnsafeNativeMethods.sqlite3_column_int64(stmt._sqlite_stmt, index); #else UnsafeNativeMethods.sqlite3_column_int64_interop(stmt._sqlite_stmt, index, out value); #endif return value; } internal override string GetText(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_column_text_interop(stmt._sqlite_stmt, index, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_column_text(stmt._sqlite_stmt, index), -1); #endif } internal override DateTime GetDateTime(SqliteStatement stmt, int index) { #if !SQLITE_STANDARD int len; return ToDateTime(UnsafeNativeMethods.sqlite3_column_text_interop(stmt._sqlite_stmt, index, out len), len); #else return ToDateTime(UnsafeNativeMethods.sqlite3_column_text(stmt._sqlite_stmt, index), -1); #endif } internal override long GetBytes(SqliteStatement stmt, int index, int nDataOffset, byte[] bDest, int nStart, int nLength) { IntPtr ptr; int nlen; int nCopied = nLength; nlen = UnsafeNativeMethods.sqlite3_column_bytes(stmt._sqlite_stmt, index); ptr = UnsafeNativeMethods.sqlite3_column_blob(stmt._sqlite_stmt, index); if (bDest == null) return nlen; if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart; if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset; unsafe { if (nCopied > 0) Marshal.Copy((IntPtr)((byte*)ptr + nDataOffset), bDest, nStart, nCopied); else nCopied = 0; } return nCopied; } internal override long GetChars(SqliteStatement stmt, int index, int nDataOffset, char[] bDest, int nStart, int nLength) { int nlen; int nCopied = nLength; string str = GetText(stmt, index); nlen = str.Length; if (bDest == null) return nlen; if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart; if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset; if (nCopied > 0) str.CopyTo(nDataOffset, bDest, nStart, nCopied); else nCopied = 0; return nCopied; } internal override bool IsNull(SqliteStatement stmt, int index) { return (ColumnAffinity(stmt, index) == TypeAffinity.Null); } internal override int AggregateCount(IntPtr context) { return UnsafeNativeMethods.sqlite3_aggregate_count(context); } internal override void CreateFunction(string strFunction, int nArgs, bool needCollSeq, SQLiteCallback func, SQLiteCallback funcstep, SQLiteFinalCallback funcfinal) { int n; #if !SQLITE_STANDARD n = UnsafeNativeMethods.sqlite3_create_function_interop(_sql, ToUTF8(strFunction), nArgs, 4, IntPtr.Zero, func, funcstep, funcfinal, (needCollSeq == true) ? 1 : 0); if (n == 0) n = UnsafeNativeMethods.sqlite3_create_function_interop(_sql, ToUTF8(strFunction), nArgs, 1, IntPtr.Zero, func, funcstep, funcfinal, (needCollSeq == true) ? 1 : 0); #else n = UnsafeNativeMethods.sqlite3_create_function(_sql, ToUTF8(strFunction), nArgs, 4, IntPtr.Zero, func, funcstep, funcfinal); if (n == 0) n = UnsafeNativeMethods.sqlite3_create_function(_sql, ToUTF8(strFunction), nArgs, 1, IntPtr.Zero, func, funcstep, funcfinal); #endif if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void CreateCollation(string strCollation, SQLiteCollation func, SQLiteCollation func16) { int n = UnsafeNativeMethods.sqlite3_create_collation(_sql, ToUTF8(strCollation), 2, IntPtr.Zero, func16); if (n == 0) UnsafeNativeMethods.sqlite3_create_collation(_sql, ToUTF8(strCollation), 1, IntPtr.Zero, func); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override int ContextCollateCompare(CollationEncodingEnum enc, IntPtr context, string s1, string s2) { #if !SQLITE_STANDARD byte[] b1; byte[] b2; System.Text.Encoding converter = null; switch (enc) { case CollationEncodingEnum.UTF8: converter = System.Text.Encoding.UTF8; break; case CollationEncodingEnum.UTF16LE: converter = System.Text.Encoding.Unicode; break; case CollationEncodingEnum.UTF16BE: converter = System.Text.Encoding.BigEndianUnicode; break; } b1 = converter.GetBytes(s1); b2 = converter.GetBytes(s2); return UnsafeNativeMethods.sqlite3_context_collcompare(context, b1, b1.Length, b2, b2.Length); #else throw new NotImplementedException(); #endif } internal override int ContextCollateCompare(CollationEncodingEnum enc, IntPtr context, char[] c1, char[] c2) { #if !SQLITE_STANDARD byte[] b1; byte[] b2; System.Text.Encoding converter = null; switch (enc) { case CollationEncodingEnum.UTF8: converter = System.Text.Encoding.UTF8; break; case CollationEncodingEnum.UTF16LE: converter = System.Text.Encoding.Unicode; break; case CollationEncodingEnum.UTF16BE: converter = System.Text.Encoding.BigEndianUnicode; break; } b1 = converter.GetBytes(c1); b2 = converter.GetBytes(c2); return UnsafeNativeMethods.sqlite3_context_collcompare(context, b1, b1.Length, b2, b2.Length); #else throw new NotImplementedException(); #endif } internal override CollationSequence GetCollationSequence(SqliteFunction func, IntPtr context) { #if !SQLITE_STANDARD CollationSequence seq = new CollationSequence(); int len; int type; int enc; IntPtr p = UnsafeNativeMethods.sqlite3_context_collseq(context, out type, out enc, out len); if (p != null) seq.Name = UTF8ToString(p, len); seq.Type = (CollationTypeEnum)type; seq._func = func; seq.Encoding = (CollationEncodingEnum)enc; return seq; #else throw new NotImplementedException(); #endif } internal override long GetParamValueBytes(IntPtr p, int nDataOffset, byte[] bDest, int nStart, int nLength) { IntPtr ptr; int nlen; int nCopied = nLength; nlen = UnsafeNativeMethods.sqlite3_value_bytes(p); ptr = UnsafeNativeMethods.sqlite3_value_blob(p); if (bDest == null) return nlen; if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart; if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset; unsafe { if (nCopied > 0) Marshal.Copy((IntPtr)((byte*)ptr + nDataOffset), bDest, nStart, nCopied); else nCopied = 0; } return nCopied; } internal override double GetParamValueDouble(IntPtr ptr) { double value; #if !PLATFORM_COMPACTFRAMEWORK value = UnsafeNativeMethods.sqlite3_value_double(ptr); #else UnsafeNativeMethods.sqlite3_value_double_interop(ptr, out value); #endif return value; } internal override int GetParamValueInt32(IntPtr ptr) { return UnsafeNativeMethods.sqlite3_value_int(ptr); } internal override long GetParamValueInt64(IntPtr ptr) { Int64 value; #if !PLATFORM_COMPACTFRAMEWORK value = UnsafeNativeMethods.sqlite3_value_int64(ptr); #else UnsafeNativeMethods.sqlite3_value_int64_interop(ptr, out value); #endif return value; } internal override string GetParamValueText(IntPtr ptr) { #if !SQLITE_STANDARD int len; return UTF8ToString(UnsafeNativeMethods.sqlite3_value_text_interop(ptr, out len), len); #else return UTF8ToString(UnsafeNativeMethods.sqlite3_value_text(ptr), -1); #endif } internal override TypeAffinity GetParamValueType(IntPtr ptr) { return UnsafeNativeMethods.sqlite3_value_type(ptr); } internal override void ReturnBlob(IntPtr context, byte[] value) { UnsafeNativeMethods.sqlite3_result_blob(context, value, value.Length, (IntPtr)(-1)); } internal override void ReturnDouble(IntPtr context, double value) { #if !PLATFORM_COMPACTFRAMEWORK UnsafeNativeMethods.sqlite3_result_double(context, value); #else UnsafeNativeMethods.sqlite3_result_double_interop(context, ref value); #endif } internal override void ReturnError(IntPtr context, string value) { UnsafeNativeMethods.sqlite3_result_error(context, ToUTF8(value), value.Length); } internal override void ReturnInt32(IntPtr context, int value) { UnsafeNativeMethods.sqlite3_result_int(context, value); } internal override void ReturnInt64(IntPtr context, long value) { #if !PLATFORM_COMPACTFRAMEWORK UnsafeNativeMethods.sqlite3_result_int64(context, value); #else UnsafeNativeMethods.sqlite3_result_int64_interop(context, ref value); #endif } internal override void ReturnNull(IntPtr context) { UnsafeNativeMethods.sqlite3_result_null(context); } internal override void ReturnText(IntPtr context, string value) { byte[] b = ToUTF8(value); UnsafeNativeMethods.sqlite3_result_text(context, ToUTF8(value), b.Length - 1, (IntPtr)(-1)); } internal override IntPtr AggregateContext(IntPtr context) { return UnsafeNativeMethods.sqlite3_aggregate_context(context, 1); } internal override void SetPassword(byte[] passwordBytes) { int n = UnsafeNativeMethods.sqlite3_key(_sql, passwordBytes, passwordBytes.Length); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } internal override void ChangePassword(byte[] newPasswordBytes) { int n = UnsafeNativeMethods.sqlite3_rekey(_sql, newPasswordBytes, (newPasswordBytes == null) ? 0 : newPasswordBytes.Length); if (n > 0) throw new SqliteException(n, SQLiteLastError()); } #if MONOTOUCH SQLiteUpdateCallback update_callback; SQLiteCommitCallback commit_callback; SQLiteRollbackCallback rollback_callback; [MonoTouch.MonoPInvokeCallback (typeof (SQLiteUpdateCallback))] static void update (IntPtr puser, int type, IntPtr database, IntPtr table, Int64 rowid) { SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3; instance.update_callback (puser, type, database, table, rowid); } internal override void SetUpdateHook (SQLiteUpdateCallback func) { update_callback = func; if (func == null) UnsafeNativeMethods.sqlite3_update_hook (_sql, null, IntPtr.Zero); else UnsafeNativeMethods.sqlite3_update_hook (_sql, update, GCHandle.ToIntPtr (gch)); } [MonoTouch.MonoPInvokeCallback (typeof (SQLiteCommitCallback))] static int commit (IntPtr puser) { SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3; return instance.commit_callback (puser); } internal override void SetCommitHook (SQLiteCommitCallback func) { commit_callback = func; if (func == null) UnsafeNativeMethods.sqlite3_commit_hook (_sql, null, IntPtr.Zero); else UnsafeNativeMethods.sqlite3_commit_hook (_sql, commit, GCHandle.ToIntPtr (gch)); } [MonoTouch.MonoPInvokeCallback (typeof (SQLiteRollbackCallback))] static void rollback (IntPtr puser) { SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3; instance.rollback_callback (puser); } internal override void SetRollbackHook (SQLiteRollbackCallback func) { rollback_callback = func; if (func == null) UnsafeNativeMethods.sqlite3_rollback_hook (_sql, null, IntPtr.Zero); else UnsafeNativeMethods.sqlite3_rollback_hook (_sql, rollback, GCHandle.ToIntPtr (gch)); } #else internal override void SetUpdateHook(SQLiteUpdateCallback func) { UnsafeNativeMethods.sqlite3_update_hook(_sql, func, IntPtr.Zero); } internal override void SetCommitHook(SQLiteCommitCallback func) { UnsafeNativeMethods.sqlite3_commit_hook(_sql, func, IntPtr.Zero); } internal override void SetRollbackHook(SQLiteRollbackCallback func) { UnsafeNativeMethods.sqlite3_rollback_hook(_sql, func, IntPtr.Zero); } #endif /// <summary> /// Helper function to retrieve a column of data from an active statement. /// </summary> /// <param name="stmt">The statement being step()'d through</param> /// <param name="index">The column index to retrieve</param> /// <param name="typ">The type of data contained in the column. If Uninitialized, this function will retrieve the datatype information.</param> /// <returns>Returns the data in the column</returns> internal override object GetValue(SqliteStatement stmt, int index, SQLiteType typ) { if (IsNull(stmt, index)) return DBNull.Value; TypeAffinity aff = typ.Affinity; Type t = null; if (typ.Type != DbType.Object) { t = SqliteConvert.SQLiteTypeToType(typ); aff = TypeToAffinity(t); } switch (aff) { case TypeAffinity.Blob: if (typ.Type == DbType.Guid && typ.Affinity == TypeAffinity.Text) return new Guid(GetText(stmt, index)); int n = (int)GetBytes(stmt, index, 0, null, 0, 0); byte[] b = new byte[n]; GetBytes(stmt, index, 0, b, 0, n); if (typ.Type == DbType.Guid && n == 16) return new Guid(b); return b; case TypeAffinity.DateTime: return GetDateTime(stmt, index); case TypeAffinity.Double: if (t == null) return GetDouble(stmt, index); else return Convert.ChangeType(GetDouble(stmt, index), t, null); case TypeAffinity.Int64: if (t == null) return GetInt64(stmt, index); else return Convert.ChangeType(GetInt64(stmt, index), t, null); default: return GetText(stmt, index); } } internal override int GetCursorForTable(SqliteStatement stmt, int db, int rootPage) { #if !SQLITE_STANDARD return UnsafeNativeMethods.sqlite3_table_cursor(stmt._sqlite_stmt, db, rootPage); #else return -1; #endif } internal override long GetRowIdForCursor(SqliteStatement stmt, int cursor) { #if !SQLITE_STANDARD long rowid; int rc = UnsafeNativeMethods.sqlite3_cursor_rowid(stmt._sqlite_stmt, cursor, out rowid); if (rc == 0) return rowid; return 0; #else return 0; #endif } internal override void GetIndexColumnExtendedInfo(string database, string index, string column, out int sortMode, out int onError, out string collationSequence) { #if !SQLITE_STANDARD IntPtr coll; int colllen; int rc; rc = UnsafeNativeMethods.sqlite3_index_column_info_interop(_sql, ToUTF8(database), ToUTF8(index), ToUTF8(column), out sortMode, out onError, out coll, out colllen); if (rc != 0) throw new SqliteException(rc, ""); collationSequence = UTF8ToString(coll, colllen); #else sortMode = 0; onError = 2; collationSequence = "BINARY"; #endif } } }
// SharpMath - C# Mathematical Library // Copyright (c) 2014 Morten Bakkedal // This code is published under the MIT License. using System; using SharpMath.LinearAlgebra.AlgLib; namespace SharpMath.LinearAlgebra { /// <summary> /// LU decomposition. /// </summary> public sealed class LUDecomposition { private int m, n, q; private double[,] a; private int[] pivots; private Matrix lowerTriangular, upperTriangular, permutation; private LUDecomposition(Matrix matrix) { m = matrix.Rows; n = matrix.Columns; q = Math.Min(m, n); a = matrix.ToArray(); pivots = new int[0]; if (n == 0 && m == 0) { // Ignore degenerate case. return; } trfac.rmatrixlu(ref a, m, n, ref pivots); } public static LUDecomposition Decompose(Matrix matrix) { return new LUDecomposition(matrix); } /*public Vector Solve(Vector rightVector) { if (m != n) { throw new ArgumentException("The matrix is not a square matrix."); } if (rightVector.Length != n) { throw new ArgumentException("The length of the vector is invalid."); } double[] b = rightVector.ToArray(); double[] x = null; int info = 0; densesolver.densesolverreport rep = new densesolver.densesolverreport(); densesolver.rmatrixlusolve(ref a, ref pivots, n, ref b, ref info, ref rep, ref x); if (info != 1) { throw new ArithmeticException("The matrix is singular."); } return new Vector(x); }*/ /// <summary> /// Matrix determinant using LU decomposition. /// </summary> public double Determinant() { if (m != n) { throw new ArgumentException("The matrix is not a square matrix."); } if (n == 0 && m == 0) { // Handle degenerate case. return 0.0; } return matdet.rmatrixludet(ref a, ref pivots, n); } /// <summary> /// Matrix inversion using LU decomposition. /// </summary> public bool TryInverse(out Matrix inverse) { if (n == 0 && m == 0) { // Handle degenerate case. inverse = Matrix.Zero(0, 0); return true; } if (n == m) { double[,] x = (double[,])a.Clone(); int info = 0; matinv.matinvreport rep = new matinv.matinvreport(); // Compute the inverse. matinv.rmatrixluinverse(ref x, ref pivots, n, ref info, ref rep); if (info == 1) { inverse = new Matrix(x); return true; } } inverse = null; return false; } /// <summary> /// Matrix inversion using LU decomposition. /// </summary> public Matrix Inverse() { Matrix inverse; if (!TryInverse(out inverse)) { throw new ArithmeticException("The matrix is singular."); } return inverse; } /// <summary> /// Matrix determinant using LU decomposition. /// </summary> public static double Determinant(Matrix matrix) { return new LUDecomposition(matrix).Determinant(); } /// <summary> /// Matrix inversion using LU decomposition. /// </summary> public static bool TryInverse(Matrix matrix, out Matrix inverse) { return new LUDecomposition(matrix).TryInverse(out inverse); } /// <summary> /// Matrix inversion using LU decomposition. /// </summary> public static Matrix Inverse(Matrix matrix) { return new LUDecomposition(matrix).Inverse(); } /// <summary> /// The lower unitriangular matrix. /// </summary> public Matrix LowerTriangular { get { if (lowerTriangular == null) { double[,] al = new double[m, q]; for (int j = 0; j < q; j++) { al[j, j] = 1.0; for (int i = j + 1; i < m; i++) { al[i, j] = a[i, j]; } } lowerTriangular = new Matrix(al); } return lowerTriangular; } } /// <summary> /// The upper triangular matrix. /// </summary> public Matrix UpperTriangular { get { if (upperTriangular == null) { double[,] au = new double[q, n]; for (int i = 0; i < q; i++) { for (int j = i; j < n; j++) { au[i, j] = a[i, j]; } } upperTriangular = new Matrix(au); } return upperTriangular; } } /// <summary> /// The permutation matrix. /// </summary> public Matrix Permutation { get { if (permutation == null) { // Transform to a more natural representation. int[] p0 = new int[m]; for (int i = 0; i < m; i++) { p0[i] = i; } for (int i = q - 1; i >= 0; i--) { int j = pivots[i]; int r = p0[i]; p0[i] = p0[j]; p0[j] = r; } // Compute the permutation matrix. double[,] p = new double[m, m]; for (int i = 0; i < m; i++) { p[i, p0[i]] = 1.0; } permutation = new Matrix(p); } return permutation; } } } }
using System; using System.IO; using System.Text; using Tamir.SharpSsh.jsch; using Tamir.Streams; /* * Scp.cs * * Copyright (c) 2006 Tamir Gal, http://www.tamirgal.com, All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * * 3. The names of the authors may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR * *OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * **/ namespace Tamir.SharpSsh { /// <summary> /// Class for handling SCP file transfers over SSH connection. /// </summary> public class Scp : SshTransferProtocolBase { private bool m_recursive = false; private bool m_verbos = false; private bool m_cancelled = false; public Scp(string host, string user, string password) : base(host, user, password) { } public Scp(string host, string user) : base(host, user) { } public static Scp Clone(SshBase baseConnection) { var clone = new Scp(baseConnection.Host, baseConnection.Username, baseConnection.Password); clone.Session = baseConnection.Session; return clone; } protected override string ChannelType { get { return "exec"; } } /// <summary> ///This function is empty, so no channel is connected ///on session connect /// </summary> protected override void ConnectChannel() { } /// <summary> /// Gets or sets a value indicating the default recursive transfer behaviour /// </summary> public bool Recursive { get { return m_recursive; } set { m_recursive = value; } } /// <summary> /// Gets or sets a value indicating whether trace information should be printed /// </summary> public bool Verbos { get { return m_verbos; } set { m_verbos = value; } } public override void Cancel() { m_cancelled = true; } /// <summary> /// Creates a directory on the remot server /// </summary> /// <param name="dir">The new directory</param> public override void Mkdir(string dir) { SCP_CheckConnectivity(); Channel channel = null; Stream server = null; m_cancelled = false; SCP_ConnectTo(out channel, out server, dir, true); SCP_EnterIntoDir(server, dir); channel.disconnect(); } public override void Put(string fromFilePath, string toFilePath) { this.To(fromFilePath, toFilePath); } public override void Get(string fromFilePath, string toFilePath) { this.From(fromFilePath, toFilePath); } /// <summary> /// Copies a file from local machine to a remote SSH machine. /// </summary> /// <param name="localPath">The local file path.</param> /// <param name="remotePath">The path of the remote file.</param> public void To(string localPath, string remotePath) { this.To(localPath, remotePath, Recursive); } /// <summary> /// Copies a file from local machine to a remote SSH machine. /// </summary> /// <param name="localPath">The local file path.</param> /// <param name="remotePath">The path of the remote file.</param> public void To(string localPath, string remotePath, bool _recursive) { SCP_CheckConnectivity(); Channel channel = null; Stream server = null; m_cancelled = false; try { //if we are sending a single file if (File.Exists(localPath)) { SCP_ConnectTo(out channel, out server, remotePath, _recursive); SCP_SendFile(server, localPath, remotePath); channel.disconnect(); } //else, if we are sending a local directory else if (Directory.Exists(localPath)) { if (!_recursive) { throw new SshTransferException(Path.GetFileName("'" + localPath) + "' is a directory, you should use recursive transfer."); } SCP_ConnectTo(out channel, out server, remotePath, true); ToRecursive(server, localPath, remotePath); channel.disconnect(); } else { throw new SshTransferException("File not found: " + localPath); } } catch (Exception e) { if (Verbos) Console.WriteLine("Error: " + e.Message); //SendEndMessage(remoteFile, localFile, filesize,filesize, "Transfer ended with an error."); try { channel.disconnect(); } catch { } throw e; } } /// <summary> /// Copies files and directories from local machine to a remote SSH machine using SCP. /// </summary> /// <param name="server">I/O Stream for the remote server</param> /// <param name="src">Source to copy</param> /// <param name="dst">Destination path</param> private void ToRecursive(Stream server, string src, string dst) { if (Directory.Exists(src)) { SCP_EnterIntoDir(server, Path.GetFileName(dst)); foreach (string file in Directory.GetFiles(src)) { SCP_SendFile(server, file, Path.GetFileName(file)); } if (m_cancelled) { return; } foreach (string dir in Directory.GetDirectories(src)) { ToRecursive(server, dir, Path.GetFileName(dir)); } SCP_EnterIntoParent(server); } else if (File.Exists(src)) { SCP_SendFile(server, src, Path.GetFileName(src)); } else { throw new SshTransferException("File not found: " + src); } } /// <summary> /// Copies a file from a remote SSH machine to the local machine using SCP. /// </summary> /// <param name="remoteFile">The remmote file name</param> /// <param name="localPath">The local destination path</param> public void From(string remoteFile, string localPath) { this.From(remoteFile, localPath, Recursive); } /// <summary> /// Copies a file from a remote SSH machine to the local machine using SCP. /// </summary> /// <param name="remoteFile">The remmote file name</param> /// <param name="localPath">The local destination path</param> /// <param name="recursive">Value indicating whether a recursive transfer should take place</param> public void From(string remoteFile, string localPath, bool _recursive) { SCP_CheckConnectivity(); Channel channel = null; Stream server = null; m_cancelled = false; Int64 filesize = 0; String filename = null; string cmd = null; try { String dir = null; if (Directory.Exists(localPath)) { dir = Path.GetFullPath(localPath); } SCP_ConnectFrom(out channel, out server, remoteFile, _recursive); byte[] buf = new byte[1024]; // send '\0' SCP_SendAck(server); int c = SCP_CheckAck(server); //parse scp commands while ((c == 'D') || (c == 'C') || (c == 'E')) { if (m_cancelled) break; cmd = "" + (char) c; if (c == 'E') { c = SCP_CheckAck(server); dir = Path.GetDirectoryName(dir); if (Verbos) Console.WriteLine("E"); //send '\0' SCP_SendAck(server); c = (char) SCP_CheckAck(server); continue; } // read '0644 ' or '0755 ' server.Read(buf, 0, 5); for (int i = 0; i < 5; i++) cmd += (char) buf[i]; //reading file size filesize = 0; while (true) { server.Read(buf, 0, 1); if (buf[0] == ' ') break; filesize = filesize*10 + (buf[0] - '0'); } //reading file name for (int i = 0;; i++) { server.Read(buf, i, 1); if (buf[i] == (byte) 0x0a) { filename = Util.getString(buf, 0, i); break; } } cmd += " " + filesize + " " + filename; // send '\0' SCP_SendAck(server); //Receive file if (c == 'C') { if (Verbos) Console.WriteLine("Sending file modes: " + cmd); SCP_ReceiveFile(server, remoteFile, dir == null ? localPath : dir + "/" + filename, filesize); if (m_cancelled) break; // send '\0' SCP_SendAck(server); } //Enter directory else if (c == 'D') { if (dir == null) { if (File.Exists(localPath)) throw new SshTransferException("'" + localPath + "' is not a directory"); dir = localPath; Directory.CreateDirectory(dir); } if (Verbos) Console.WriteLine("Entering directory: " + cmd); dir += "/" + filename; Directory.CreateDirectory(dir); } c = SCP_CheckAck(server); } channel.disconnect(); } catch (Exception e) { if (Verbos) Console.WriteLine("Error: " + e.Message); try { channel.disconnect(); } catch { } throw e; } } #region SCP private functions /// <summary> /// Checks is a channel is already connected by this instance /// </summary> protected void SCP_CheckConnectivity() { if (!Connected) throw new Exception("Channel is down."); } /// <summary> /// Connect a channel to the remote server using the 'SCP TO' command ('scp -t') /// </summary> /// <param name="channel">Will contain the new connected channel</param> /// <param name="server">Will contaun the new connected server I/O stream</param> /// <param name="rfile">The remote path on the server</param> /// <param name="recursive">Idicate a recursive scp transfer</param> protected void SCP_ConnectTo(out Channel channel, out Stream server, string rfile, bool recursive) { string scpCommand = "scp -p -t "; if (recursive) scpCommand += "-r "; scpCommand += "\"" + rfile + "\""; channel = (ChannelExec) m_session.openChannel(ChannelType); ((ChannelExec) channel).setCommand(scpCommand); server = new CombinedStream (channel.getInputStream(), channel.getOutputStream()); channel.connect(); SCP_CheckAck(server); } /// <summary> /// Connect a channel to the remote server using the 'SCP From' command ('scp -f') /// </summary> /// <param name="channel">Will contain the new connected channel</param> /// <param name="server">Will contaun the new connected server I/O stream</param> /// <param name="rfile">The remote path on the server</param> /// <param name="recursive">Idicate a recursive scp transfer</param> protected void SCP_ConnectFrom(out Channel channel, out Stream server, string rfile, bool recursive) { string scpCommand = "scp -f "; if (recursive) scpCommand += "-r "; scpCommand += "\"" + rfile + "\""; channel = (ChannelExec) m_session.openChannel(ChannelType); ((ChannelExec) channel).setCommand(scpCommand); server = new CombinedStream (channel.getInputStream(), channel.getOutputStream()); channel.connect(); //SCP_CheckAck(server); } /// <summary> /// Transfer a file to the remote server /// </summary> /// <param name="server">A connected server I/O stream</param> /// <param name="src">The source file to copy</param> /// <param name="dst">The remote destination path</param> protected void SCP_SendFile(Stream server, string src, string dst) { Int64 filesize = 0; Int64 copied = 0; filesize = (new FileInfo(src)).Length; byte[] tmp = new byte[1]; // send "C0644 filesize filename", where filename should not include '/' string command = "C0644 " + filesize + " " + Path.GetFileName(dst) + "\n"; if (Verbos) Console.WriteLine("Sending file modes: " + command); SendStartMessage(src, dst, filesize, "Starting transfer."); byte[] buff = Util.getBytes(command); server.Write(buff, 0, buff.Length); server.Flush(); if (SCP_CheckAck(server) != 0) { throw new SshTransferException("Error openning communication channel."); } // send a content of lfile SendProgressMessage(src, dst, copied, filesize, "Transferring..."); FileStream fis = File.OpenRead(src); byte[] buf = new byte[1024*10*2]; while (!m_cancelled) { int len = fis.Read(buf, 0, buf.Length); if (len <= 0) break; server.Write(buf, 0, len); server.Flush(); copied += len; SendProgressMessage(src, dst, copied, filesize, "Transferring..."); } fis.Close(); if (m_cancelled) return; // send '\0' buf[0] = 0; server.Write(buf, 0, 1); server.Flush(); SendProgressMessage(src, dst, copied, filesize, "Verifying transfer..."); if (SCP_CheckAck(server) != 0) { SendEndMessage(src, dst, copied, filesize, "Transfer ended with an error."); throw new SshTransferException("Unknow error during file transfer."); } SendEndMessage(src, dst, copied, filesize, "Transfer completed successfully (" + copied + " bytes)."); } /// <summary> /// Transfer a file from the remote server /// </summary> /// <param name="server">A connected server I/O stream</param> /// <param name="rfile">The remote file to copy</param> /// <param name="lfile">The local destination path</param> protected void SCP_ReceiveFile(Stream server, string rfile, string lfile, Int64 size) { Int64 copied = 0; SendStartMessage(rfile, lfile, size, "Connected, starting transfer."); // read a content of lfile FileStream fos = File.OpenWrite(lfile); int foo; Int64 filesize = size; byte[] buf = new byte[1024]; while (!m_cancelled) { if (buf.Length < filesize) foo = buf.Length; else foo = (int) filesize; Int64 len = server.Read(buf, 0, foo); copied += len; fos.Write(buf, 0, foo); SendProgressMessage(rfile, lfile, copied, size, "Transferring..."); filesize -= foo; if (filesize == 0) break; } fos.Close(); if (m_cancelled) return; SCP_CheckAck(server); SendEndMessage(rfile, lfile, copied, size, "Transfer completed successfully (" + filesize + " bytes)."); } /// <summary> /// Instructs the remote server to enter into a directory /// </summary> /// <param name="server">A connected server I/O stream</param> /// <param name="dir">The directory name/param> protected void SCP_EnterIntoDir(Stream server, string dir) { try { byte[] tmp = new byte[1]; // send "C0644 filesize filename", where filename should not include '/' string command = "D0755 0 " + Path.GetFileName(dir) + "\n"; if (Verbos) Console.WriteLine("Enter directory: " + command); byte[] buff = Util.getBytes(command); server.Write(buff, 0, buff.Length); server.Flush(); if (SCP_CheckAck(server) != 0) { throw new SshTransferException("Error openning communication channel."); } } catch { } } /// <summary> /// Instructs the remote server to go up one level /// </summary> /// <param name="server">A connected server I/O stream</param> protected void SCP_EnterIntoParent(Stream server) { try { byte[] tmp = new byte[1]; // send "C0644 filesize filename", where filename should not include '/' string command = "E\n"; if (Verbos) Console.WriteLine(command); byte[] buff = Util.getBytes(command); server.Write(buff, 0, buff.Length); server.Flush(); if (SCP_CheckAck(server) != 0) { throw new SshTransferException("Error openning communication channel."); } } catch { } } /// <summary> /// Gets server acknowledgment /// </summary> /// <param name="ins">A connected server I/O stream</param> private int SCP_CheckAck(Stream ins) { int b = ins.ReadByte(); // b may be 0 for success, // 1 for error, // 2 for fatal error, // -1 if (b == 0) return b; if (b == -1) return b; if (b == 1 || b == 2) { StringBuilder sb = new StringBuilder(); int c; do { c = ins.ReadByte(); sb.Append((char) c); } while (c != '\n'); if (b == 1) { // error //Console.WriteLine(sb.ToString()); throw new SshTransferException(sb.ToString()); } if (b == 2) { // fatal error //Console.WriteLine(sb.ToString()); throw new SshTransferException(sb.ToString()); } } return b; } /// <summary> /// Sends acknowledgment to remote server /// </summary> /// <param name="server">A connected server I/O stream</param> private void SCP_SendAck(Stream server) { server.WriteByte(0); server.Flush(); } #endregion SCP private functions } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Numerics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using Internal.Runtime.CompilerServices; #if BIT64 using nuint = System.UInt64; #else using nuint = System.UInt32; #endif namespace System { internal static partial class Marvin { /// <summary> /// Compute a Marvin hash and collapse it into a 32-bit hash. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static int ComputeHash32(ReadOnlySpan<byte> data, ulong seed) => ComputeHash32(ref MemoryMarshal.GetReference(data), (uint)data.Length, (uint)seed, (uint)(seed >> 32)); /// <summary> /// Compute a Marvin hash and collapse it into a 32-bit hash. /// </summary> public static int ComputeHash32(ref byte data, uint count, uint p0, uint p1) { // Control flow of this method generally flows top-to-bottom, trying to // minimize the number of branches taken for large (>= 8 bytes, 4 chars) inputs. // If small inputs (< 8 bytes, 4 chars) are given, this jumps to a "small inputs" // handler at the end of the method. if (count < 8) { // We can't run the main loop, but we might still have 4 or more bytes available to us. // If so, jump to the 4 .. 7 bytes logic immediately after the main loop. if (count >= 4) { goto Between4And7BytesRemain; } else { goto InputTooSmallToEnterMainLoop; } } // Main loop - read 8 bytes at a time. // The block function is unrolled 2x in this loop. uint loopCount = count / 8; Debug.Assert(loopCount > 0, "Shouldn't reach this code path for small inputs."); do { // Most x86 processors have two dispatch ports for reads, so we can read 2x 32-bit // values in parallel. We opt for this instead of a single 64-bit read since the // typical use case for Marvin32 is computing String hash codes, and the particular // layout of String instances means the starting data is never 8-byte aligned when // running in a 64-bit process. p0 += Unsafe.ReadUnaligned<uint>(ref data); uint nextUInt32 = Unsafe.ReadUnaligned<uint>(ref Unsafe.AddByteOffset(ref data, 4)); // One block round for each of the 32-bit integers we just read, 2x rounds total. Block(ref p0, ref p1); p0 += nextUInt32; Block(ref p0, ref p1); // Bump the data reference pointer and decrement the loop count. // Decrementing by 1 every time and comparing against zero allows the JIT to produce // better codegen compared to a standard 'for' loop with an incrementing counter. // Requires https://github.com/dotnet/coreclr/issues/7566 to be addressed first // before we can realize the full benefits of this. data = ref Unsafe.AddByteOffset(ref data, 8); } while (--loopCount > 0); // n.b. We've not been updating the original 'count' parameter, so its actual value is // still the original data length. However, we can still rely on its least significant // 3 bits to tell us how much data remains (0 .. 7 bytes) after the loop above is // completed. if ((count & 0b_0100) == 0) { goto DoFinalPartialRead; } Between4And7BytesRemain: // If after finishing the main loop we still have 4 or more leftover bytes, or if we had // 4 .. 7 bytes to begin with and couldn't enter the loop in the first place, we need to // consume 4 bytes immediately and send them through one round of the block function. Debug.Assert(count >= 4, "Only should've gotten here if the original count was >= 4."); p0 += Unsafe.ReadUnaligned<uint>(ref data); Block(ref p0, ref p1); DoFinalPartialRead: // Finally, we have 0 .. 3 bytes leftover. Since we know the original data length was at // least 4 bytes (smaller lengths are handled at the end of this routine), we can safely // read the 4 bytes at the end of the buffer without reading past the beginning of the // original buffer. This necessarily means the data we're about to read will overlap with // some data we've already processed, but we can handle that below. Debug.Assert(count >= 4, "Only should've gotten here if the original count was >= 4."); // Read the last 4 bytes of the buffer. uint partialResult = Unsafe.ReadUnaligned<uint>(ref Unsafe.Add(ref Unsafe.AddByteOffset(ref data, (nuint)count & 7), -4)); // The 'partialResult' local above contains any data we have yet to read, plus some number // of bytes which we've already read from the buffer. An example of this is given below // for little-endian architectures. In this table, AA BB CC are the bytes which we still // need to consume, and ## are bytes which we want to throw away since we've already // consumed them as part of a previous read. // // (partialResult contains) (we want it to contain) // count mod 4 = 0 -> [ ## ## ## ## | ] -> 0x####_#### -> 0x0000_0080 // count mod 4 = 1 -> [ ## ## ## ## | AA ] -> 0xAA##_#### -> 0x0000_80AA // count mod 4 = 2 -> [ ## ## ## ## | AA BB ] -> 0xBBAA_#### -> 0x0080_BBAA // count mod 4 = 3 -> [ ## ## ## ## | AA BB CC ] -> 0xCCBB_AA## -> 0x80CC_BBAA count = ~count << 3; if (BitConverter.IsLittleEndian) { partialResult >>= 8; // make some room for the 0x80 byte partialResult |= 0x8000_0000u; // put the 0x80 byte at the beginning partialResult >>= (int)count & 0x1F; // shift out all previously consumed bytes } else { partialResult <<= 8; // make some room for the 0x80 byte partialResult |= 0x80u; // put the 0x80 byte at the end partialResult <<= (int)count & 0x1F; // shift out all previously consumed bytes } DoFinalRoundsAndReturn: // Now that we've computed the final partial result, merge it in and run two rounds of // the block function to finish out the Marvin algorithm. p0 += partialResult; Block(ref p0, ref p1); Block(ref p0, ref p1); return (int)(p1 ^ p0); InputTooSmallToEnterMainLoop: // We had only 0 .. 3 bytes to begin with, so we can't perform any 32-bit reads. // This means that we're going to be building up the final result right away and // will only ever run two rounds total of the block function. Let's initialize // the partial result to "no data". if (BitConverter.IsLittleEndian) { partialResult = 0x80u; } else { partialResult = 0x80000000u; } if ((count & 0b_0001) != 0) { // If the buffer is 1 or 3 bytes in length, let's read a single byte now // and merge it into our partial result. This will result in partialResult // having one of the two values below, where AA BB CC are the buffer bytes. // // (little-endian / big-endian) // [ AA ] -> 0x0000_80AA / 0xAA80_0000 // [ AA BB CC ] -> 0x0000_80CC / 0xCC80_0000 partialResult = Unsafe.AddByteOffset(ref data, (nuint)count & 2); if (BitConverter.IsLittleEndian) { partialResult |= 0x8000; } else { partialResult <<= 24; partialResult |= 0x800000u; } } if ((count & 0b_0010) != 0) { // If the buffer is 2 or 3 bytes in length, let's read a single ushort now // and merge it into the partial result. This will result in partialResult // having one of the two values below, where AA BB CC are the buffer bytes. // // (little-endian / big-endian) // [ AA BB ] -> 0x0080_BBAA / 0xAABB_8000 // [ AA BB CC ] -> 0x80CC_BBAA / 0xAABB_CC80 (carried over from above) if (BitConverter.IsLittleEndian) { partialResult <<= 16; partialResult |= (uint)Unsafe.ReadUnaligned<ushort>(ref data); } else { partialResult |= (uint)Unsafe.ReadUnaligned<ushort>(ref data); partialResult = BitOperations.RotateLeft(partialResult, 16); } } // Everything is consumed! Go perform the final rounds and return. goto DoFinalRoundsAndReturn; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Block(ref uint rp0, ref uint rp1) { uint p0 = rp0; uint p1 = rp1; p1 ^= p0; p0 = BitOperations.RotateLeft(p0, 20); p0 += p1; p1 = BitOperations.RotateLeft(p1, 9); p1 ^= p0; p0 = BitOperations.RotateLeft(p0, 27); p0 += p1; p1 = BitOperations.RotateLeft(p1, 19); rp0 = p0; rp1 = p1; } public static ulong DefaultSeed { get; } = GenerateSeed(); private static unsafe ulong GenerateSeed() { ulong seed; Interop.GetRandomBytes((byte*)&seed, sizeof(ulong)); return seed; } } }
using System; using System.Collections.Generic; namespace Server.Mobiles { [CorpseName( "a satyr's corpse" )] public class Satyr : BaseCreature { [Constructable] public Satyr() : base( AIType.AI_Animal, FightMode.Aggressor, 10, 1, 0.2, 0.4 ) { Name = "a satyr"; Body = 271; BaseSoundID = 0x586; SetStr( 177, 195 ); SetDex( 251, 269 ); SetInt( 153, 170 ); SetHits( 350, 400 ); SetDamage( 13, 24 ); SetDamageType( ResistanceType.Physical, 100 ); SetResistance( ResistanceType.Physical, 55, 60 ); SetResistance( ResistanceType.Fire, 25, 35 ); SetResistance( ResistanceType.Cold, 30, 40 ); SetResistance( ResistanceType.Poison, 30, 40 ); SetResistance( ResistanceType.Energy, 30, 40 ); SetSkill( SkillName.MagicResist, 55.0, 65.0 ); SetSkill( SkillName.Tactics, 80.0, 100.0 ); SetSkill( SkillName.Wrestling, 80.0, 100.0 ); Fame = 5000; Karma = 0; VirtualArmor = 28; // Don't know what it should be //PackArcanceScroll( 0.05 ); } public override OppositionGroup OppositionGroup { get { return OppositionGroup.FeyAndUndead; } } public override void GenerateLoot() { AddLoot( LootPack.MlRich ); } public override void OnThink() { base.OnThink(); Peace( Combatant ); Undress( Combatant ); Suppress( Combatant ); Provoke( Combatant ); } #region Peace private DateTime m_NextPeace; public void Peace( Mobile target ) { if ( target == null || Deleted || !Alive || m_NextPeace > DateTime.Now || 0.1 < Utility.RandomDouble() ) return; PlayerMobile p = target as PlayerMobile; if ( p != null && p.PeacedUntil < DateTime.Now && !p.Hidden && CanBeHarmful( p ) ) { p.PeacedUntil = DateTime.Now + TimeSpan.FromMinutes( 1 ); p.SendLocalizedMessage( 500616 ); // You hear lovely music, and forget to continue battling! p.FixedParticles( 0x376A, 1, 32, 0x15BD, EffectLayer.Waist ); p.Combatant = null; PlaySound( 0x58D ); } m_NextPeace = DateTime.Now + TimeSpan.FromSeconds( 10 ); } #endregion #region Suppress private static Dictionary<Mobile, Timer> m_Suppressed = new Dictionary<Mobile, Timer>(); private DateTime m_NextSuppress; public void Suppress( Mobile target ) { if ( target == null || m_Suppressed.ContainsKey( target ) || Deleted || !Alive || m_NextSuppress > DateTime.Now || 0.1 < Utility.RandomDouble() ) return; TimeSpan delay = TimeSpan.FromSeconds( Utility.RandomMinMax( 20, 80 ) ); if ( !target.Hidden && CanBeHarmful( target ) ) { target.SendLocalizedMessage( 1072061 ); // You hear jarring music, suppressing your strength. for ( int i = 0; i < target.Skills.Length; i++ ) { Skill s = target.Skills[ i ]; target.AddSkillMod( new TimedSkillMod( s.SkillName, true, s.Base * -0.28, delay ) ); } int count = (int) Math.Round( delay.TotalSeconds / 1.25 ); Timer timer = new AnimateTimer( target, count ); m_Suppressed.Add( target, timer ); timer.Start(); PlaySound( 0x58C ); } m_NextSuppress = DateTime.Now + TimeSpan.FromSeconds( 10 ); } public static void SuppressRemove( Mobile target ) { if ( target != null && m_Suppressed.ContainsKey( target ) ) { Timer timer = m_Suppressed[ target ]; if ( timer != null || timer.Running ) timer.Stop(); m_Suppressed.Remove( target ); } } private class AnimateTimer : Timer { private Mobile m_Owner; private int m_Count; public AnimateTimer( Mobile owner, int count ) : base( TimeSpan.Zero, TimeSpan.FromSeconds( 1.25 ) ) { m_Owner = owner; m_Count = count; } protected override void OnTick() { if ( m_Owner.Deleted || !m_Owner.Alive || m_Count-- < 0 ) { SuppressRemove( m_Owner ); } else m_Owner.FixedParticles( 0x376A, 1, 32, 0x15BD, EffectLayer.Waist ); } } #endregion #region Undress private DateTime m_NextUndress; public void Undress( Mobile target ) { if ( target == null || Deleted || !Alive || m_NextUndress > DateTime.Now || 0.005 < Utility.RandomDouble() ) return; if ( target.Player && target.Female && !target.Hidden && CanBeHarmful( target ) ) { UndressItem( target, Layer.OuterTorso ); UndressItem( target, Layer.InnerTorso ); UndressItem( target, Layer.MiddleTorso ); UndressItem( target, Layer.Pants ); UndressItem( target, Layer.Shirt ); target.SendLocalizedMessage( 1072196 ); // The satyr's music makes your blood race. Your clothing is too confining. } m_NextUndress = DateTime.Now + TimeSpan.FromMinutes( 1 ); } public void UndressItem( Mobile m, Layer layer ) { Item item = m.FindItemOnLayer( layer ); if ( item != null && item.Movable ) m.PlaceInBackpack( item ); } #endregion #region Provoke private DateTime m_NextProvoke; public void Provoke( Mobile target ) { if ( target == null || Deleted || !Alive || m_NextProvoke > DateTime.Now || 0.05 < Utility.RandomDouble() ) return; foreach ( Mobile m in GetMobilesInRange( RangePerception ) ) { if ( m is BaseCreature ) { BaseCreature c = (BaseCreature) m; if ( c == this || c == target || c.Unprovokable || c.IsParagon || c.BardProvoked || c.AccessLevel != AccessLevel.Player || !c.CanBeHarmful( target ) ) continue; c.Provoke( this, target, true ); if ( target.Player ) target.SendLocalizedMessage( 1072062 ); // You hear angry music, and start to fight. PlaySound( 0x58A ); break; } } m_NextProvoke = DateTime.Now + TimeSpan.FromSeconds( 10 ); } #endregion public override int Meat { get { return 1; } } public Satyr( Serial serial ) : base( serial ) { } public override void Serialize( GenericWriter writer ) { base.Serialize( writer ); writer.Write( (int) 0 ); // version } public override void Deserialize( GenericReader reader ) { base.Deserialize( reader ); int version = reader.ReadInt(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.Arm; namespace JIT.HardwareIntrinsics.Arm { public static partial class Program { private static void AbsScalarSingle() { var test = new SimpleUnaryOpTest__AbsScalarSingle(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); } // Validates passing a static member works test.RunClsVarScenario(); if (AdvSimd.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (AdvSimd.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (AdvSimd.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (AdvSimd.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (AdvSimd.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleUnaryOpTest__AbsScalarSingle { private struct DataTable { private byte[] inArray1; private byte[] outArray; private GCHandle inHandle1; private GCHandle outHandle; private ulong alignment; public DataTable(Single[] inArray1, Single[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>(); if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector64<Single> _fld1; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>()); return testStruct; } public void RunStructFldScenario(SimpleUnaryOpTest__AbsScalarSingle testClass) { var result = AdvSimd.AbsScalar(_fld1); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleUnaryOpTest__AbsScalarSingle testClass) { fixed (Vector64<Single>* pFld1 = &_fld1) { var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(pFld1)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 8; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single); private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Single>>() / sizeof(Single); private static Single[] _data1 = new Single[Op1ElementCount]; private static Vector64<Single> _clsVar1; private Vector64<Single> _fld1; private DataTable _dataTable; static SimpleUnaryOpTest__AbsScalarSingle() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>()); } public SimpleUnaryOpTest__AbsScalarSingle() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Single>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetSingle(); } _dataTable = new DataTable(_data1, new Single[RetElementCount], LargestVectorSize); } public bool IsSupported => AdvSimd.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = AdvSimd.AbsScalar( Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.AbsScalar), new Type[] { typeof(Vector64<Single>) }) .Invoke(null, new object[] { Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.AbsScalar), new Type[] { typeof(Vector64<Single>) }) .Invoke(null, new object[] { AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = AdvSimd.AbsScalar( _clsVar1 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector64<Single>* pClsVar1 = &_clsVar1) { var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(pClsVar1)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector64<Single>>(_dataTable.inArray1Ptr); var result = AdvSimd.AbsScalar(op1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = AdvSimd.LoadVector64((Single*)(_dataTable.inArray1Ptr)); var result = AdvSimd.AbsScalar(op1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleUnaryOpTest__AbsScalarSingle(); var result = AdvSimd.AbsScalar(test._fld1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleUnaryOpTest__AbsScalarSingle(); fixed (Vector64<Single>* pFld1 = &test._fld1) { var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(pFld1)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = AdvSimd.AbsScalar(_fld1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector64<Single>* pFld1 = &_fld1) { var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(pFld1)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = AdvSimd.AbsScalar(test._fld1); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = AdvSimd.AbsScalar( AdvSimd.LoadVector64((Single*)(&test._fld1)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector64<Single> op1, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>()); ValidateResult(inArray1, outArray, method); } private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Single>>()); ValidateResult(inArray1, outArray, method); } private void ValidateResult(Single[] firstOp, Single[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (BitConverter.SingleToInt32Bits(result[0]) != BitConverter.SingleToInt32Bits(Math.Abs(firstOp[0]))) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (BitConverter.SingleToInt32Bits(result[i]) != 0) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.AbsScalar)}<Single>(Vector64<Single>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using RefactoringEssentials.CSharp.CodeRefactorings; using Xunit; namespace RefactoringEssentials.Tests.CSharp.CodeRefactorings { public class LinqQueryToFluentTests : CSharpCodeRefactoringTestBase { [Fact(Skip="Not implemented!")] public void TestSimpleQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Select (x => x); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestName() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { int _; var _2 = $from x in System.Enumerable.Empty<int> () let _1 = x select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { int _; var _2 = System.Enumerable.Empty<int> ().Select (x => new { x, _1 = x }).Select (_3 => _3.x); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestPrecedence() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in true ? System.Enumerable.Empty<int> () : null select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = (true ? System.Enumerable.Empty<int> () : null).Select (x => x); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestWhereQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () where x > 1 select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Where (x => x > 1); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestOrderByQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () orderby x, x * 2 descending select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().OrderBy (x => x).ThenByDescending (x => x * 2); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDoubleFromWithSelectQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = $from x in System.Enumerable.Empty<int> () from y in newEnumerable select x * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = System.Enumerable.Empty<int> ().SelectMany (x => newEnumerable, (x, y) => x * y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDoubleFromWithCastQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = $from x in System.Enumerable.Empty<int> () from float y in newEnumerable select x * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = System.Enumerable.Empty<int> ().SelectMany (x => newEnumerable.Cast<float> (), (x, y) => x * y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDoubleFromWithIntermediateQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = $from x in System.Enumerable.Empty<int> () from y in newEnumerable where x > y select x * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = System.Enumerable.Empty<int> (); var data = System.Enumerable.Empty<int> ().SelectMany (x => newEnumerable, (x, y) => new { x, y }).Where (_ => _.x > _.y).Select (_1 => _1.x * _1.y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestLetQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () let y = x * 2 select x * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Select (x => new { x, y = x * 2 }).Select (_ => _.x * _.y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestLongChainQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () let y = x * 2 let z = x * y * 2 select x * y * z; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Select (x => new { x, y = x * 2 }).Select (_ => new { _, z = _.x * _.y * 2 }).Select (_1 => _1._.x * _1._.y * _1.z); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestCastQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from float x in System.Enumerable.Empty<int> () select x; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Cast<float> ().Select (x => x); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestJoinQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 4, 5, 6 }; var data = $from x in System.Enumerable.Empty<int> () join float yy in newEnumerable on x * 2 equals yy select x * yy; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 4, 5, 6 }; var data = System.Enumerable.Empty<int> ().Join (newEnumerable.Cast<float> (), x => x * 2, yy => yy, (x, yy) => x * yy); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestJoinWithIntermediateQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 4, 5, 6 }; var data = $from x in System.Enumerable.Empty<int> () join float y in newEnumerable on x * 2 equals y where x == 2 select x * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 4, 5, 6 }; var data = System.Enumerable.Empty<int> ().Join (newEnumerable.Cast<float> (), x => x * 2, y => y, (x, y) => new { x, y }).Where (_ => _.x == 2).Select (_1 => _1.x * _1.y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestJoinWithIntoSelectQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 1, 2, 3 }; var data = $from x in System.Enumerable.Empty<int> () join y in newEnumerable on x * 2 equals y into g select g; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 1, 2, 3 }; var data = System.Enumerable.Empty<int> ().GroupJoin (newEnumerable, x => x * 2, y => y, (x, g) => g); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestJoinWithIntoIntermediateQuery() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 1, 2, 3 }; var data = $from x in System.Enumerable.Empty<int> () join y in newEnumerable on x * 2 equals y into g where true select g; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var newEnumerable = new int[] { 1, 2, 3 }; var data = System.Enumerable.Empty<int> ().GroupJoin (newEnumerable, x => x * 2, y => y, (x, g) => new { x, g }).Where (_ => true).Select (_1 => _1.g); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestSimpleGroup() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () group x by x % 10; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().GroupBy (x => x % 10); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDifferentGroup() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () group x / 10 by x % 10; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().GroupBy (x => x % 10, x => x / 10); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestInto() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var data = $from x in System.Enumerable.Empty<int> () select x * 2 into y select y * 3; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var data = System.Enumerable.Empty<int> ().Select (x => x * 2).Select (y => y * 3); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDoubleFromWithLet() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var src = System.Enumerable.Empty<int> (); var data = $from x in src from y in src let k = x * y select k; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var src = System.Enumerable.Empty<int> (); var data = src.SelectMany (x => src, (x, y) => new { x, y }).Select (_ => new { _, k = _.x * _.y }).Select (_1 => _1.k); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } [Fact(Skip="Not implemented!")] public void TestDoubleFromWithMidLet() { string input = @" using System.Linq; public class TestClass { public void TestMethod() { var src = System.Enumerable.Empty<int> (); var data = $from x in src let k = x * x from y in src select k * y; } } "; string output = @" using System.Linq; public class TestClass { public void TestMethod() { var src = System.Enumerable.Empty<int> (); var data = src.Select (x => new { x, k = x * x }).SelectMany (_ => src, (_1, y) => _1.k * y); } } "; Assert.Equal(output, RunContextAction(new LinqQueryToFluentAction(), input)); } } }