context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using UnityEngine;
using UnityEngine.Serialization;
using System.Collections;
using System.Collections.Generic;
namespace UMA
{
/// <summary>
/// Slot data contains mesh information and overlay references.
/// </summary>
[System.Serializable]
#if !UMA2_LEAN_AND_CLEAN
public partial class SlotData : System.IEquatable<SlotData>
#else
public class SlotData : System.IEquatable<SlotData>, ISerializationCallbackReceiver
#endif
{
/// <summary>
/// The asset contains the immutable portions of the slot.
/// </summary>
public SlotDataAsset asset;
/// <summary>
/// Adjusts the resolution of slot overlays.
/// </summary>
public float overlayScale = 1.0f;
#if UMA2_LEAN_AND_CLEAN
public string slotName { get { return asset.slotName; } }
#endif
/// <summary>
/// list of overlays used to texture the slot.
/// </summary>
private List<OverlayData> overlayList = new List<OverlayData>();
/// <summary>
/// Constructor for slot using the given asset.
/// </summary>
/// <param name="asset">Asset.</param>
public SlotData(SlotDataAsset asset)
{
this.asset = asset;
#if !UMA2_LEAN_AND_CLEAN
#pragma warning disable 0649, 0169, 0618
slotName = asset.slotName;
materialSample = asset.materialSample;
#endif
overlayScale = asset.overlayScale;
}
/// <summary>
/// Deep copy of the SlotData.
/// </summary>
public SlotData Copy()
{
var res = new SlotData(asset);
int overlayCount = overlayList.Count;
res.overlayList = new List<OverlayData>(overlayCount);
for (int i = 0; i < overlayCount; i++)
{
OverlayData overlay = overlayList[i];
if (overlay != null)
{
res.overlayList.Add(overlay.Duplicate());
}
}
return res;
}
public int GetTextureChannelCount(UMAGeneratorBase generator)
{
return asset.GetTextureChannelCount(generator);
}
public bool RemoveOverlay(params string[] names)
{
bool changed = false;
foreach (var name in names)
{
for (int i = 0; i < overlayList.Count; i++)
{
if (overlayList[i].asset.overlayName == name)
{
overlayList.RemoveAt(i);
changed = true;
break;
}
}
}
return changed;
}
public bool SetOverlayColor(Color32 color, params string[] names)
{
bool changed = false;
foreach (var name in names)
{
foreach (var overlay in overlayList)
{
if (overlay.asset.overlayName == name)
{
overlay.colorData.color = color;
changed = true;
}
}
}
return changed;
}
public OverlayData GetOverlay(params string[] names)
{
foreach (var name in names)
{
foreach (var overlay in overlayList)
{
if (overlay.asset.overlayName == name)
{
return overlay;
}
}
}
return null;
}
public void SetOverlay(int index, OverlayData overlay)
{
if (index >= overlayList.Count)
{
overlayList.Capacity = index + 1;
while (index >= overlayList.Count)
{
overlayList.Add(null);
}
}
overlayList[index] = overlay;
}
public OverlayData GetOverlay(int index)
{
if (index < 0 || index >= overlayList.Count)
return null;
return overlayList[index];
}
/// <summary>
/// Attempts to find an equivalent overlay in the slot.
/// </summary>
/// <returns>The equivalent overlay (or null, if no equivalent).</returns>
/// <param name="overlay">Overlay.</param>
public OverlayData GetEquivalentOverlay(OverlayData overlay)
{
foreach (OverlayData overlay2 in overlayList)
{
if (OverlayData.Equivalent(overlay, overlay2))
{
return overlay2;
}
}
return null;
}
public int OverlayCount { get { return overlayList.Count; } }
/// <summary>
/// Sets the complete list of overlays.
/// </summary>
/// <param name="overlayList">The overlay list.</param>
public void SetOverlayList(List<OverlayData> overlayList)
{
this.overlayList = overlayList;
}
/// <summary>
/// Add an overlay to the slot.
/// </summary>
/// <param name="overlayData">Overlay.</param>
public void AddOverlay(OverlayData overlayData)
{
if (overlayData)
overlayList.Add(overlayData);
}
/// <summary>
/// Gets the complete list of overlays.
/// </summary>
/// <returns>The overlay list.</returns>
public List<OverlayData> GetOverlayList()
{
return overlayList;
}
internal bool Validate()
{
bool valid = true;
if (asset.meshData != null)
{
if (asset.material == null)
{
Debug.LogError(string.Format("Slot '{0}' has a mesh but no material.", asset.slotName), asset);
valid = false;
}
else
{
if (asset.material.material == null)
{
Debug.LogError(string.Format("Slot '{0}' has an umaMaterial without a material assigned.", asset.slotName), asset);
valid = false;
}
else
{
for (int i = 0; i < asset.material.channels.Length; i++)
{
var channel = asset.material.channels[i];
if (!asset.material.material.HasProperty(channel.materialPropertyName))
{
Debug.LogError(string.Format("Slot '{0}' Material Channel {1} refers to material property '{2}' but no such property exists.", asset.slotName, i, channel.materialPropertyName), asset);
valid = false;
}
}
}
}
for (int i = 0; i < overlayList.Count; i++)
{
var overlayData = overlayList[i];
if (overlayData != null)
{
if (overlayData.asset.material != asset.material)
{
Debug.LogError(string.Format("Slot '{0}' and Overlay '{1}' don't have the same UMA Material", asset.slotName, overlayData.asset.overlayName));
valid = false;
}
if ((overlayData.asset.textureList == null) || (overlayData.asset.textureList.Length != asset.material.channels.Length))
{
Debug.LogError(string.Format("Overlay '{0}' doesn't have the right number of channels", overlayData.asset.overlayName));
valid = false;
}
else
{
for (int j = 0; j < asset.material.channels.Length; j++)
{
if ((overlayData.asset.textureList[j] == null) && (asset.material.channels[j].channelType != UMAMaterial.ChannelType.MaterialColor))
{
Debug.LogError(string.Format("Overlay '{0}' missing required texture in channel {1}", overlayData.asset.overlayName, j));
valid = false;
}
}
}
if (overlayData.colorData.channelMask.Length < asset.material.channels.Length)
{
// Fixup colorData if moving from Legacy to PBR materials
int oldsize = overlayData.colorData.channelMask.Length;
System.Array.Resize (ref overlayData.colorData.channelMask, asset.material.channels.Length);
System.Array.Resize (ref overlayData.colorData.channelAdditiveMask, asset.material.channels.Length);
for (int j = oldsize; j > asset.material.channels.Length; j++)
{
overlayData.colorData.channelMask [j] = Color.white;
overlayData.colorData.channelAdditiveMask [j] = Color.black;
}
Debug.LogWarning (string.Format ("Overlay '{0}' missing required color data on Asset: " + asset.name+" Resizing and adding defaults", overlayData.asset.overlayName));
}
}
}
}
else
{
#if !UMA2_LEAN_AND_CLEAN
if (asset.meshRenderer != null)
{
Debug.LogError(string.Format("Slot '{0}' is a UMA 1x slot... you need to upgrade it by selecting it and using the UMA|Optimize Slot Meshes.", asset.slotName), asset);
valid = false;
}
#endif
if (asset.material != null)
{
for (int i = 0; i < asset.material.channels.Length; i++)
{
var channel = asset.material.channels[i];
if (!asset.material.material.HasProperty(channel.materialPropertyName))
{
Debug.LogError(string.Format("Slot '{0}' Material Channel {1} refers to material property '{2}' but no such property exists.", asset.slotName, i, channel.materialPropertyName), asset);
valid = false;
}
}
}
}
return valid;
}
public override string ToString()
{
return "SlotData: " + asset.slotName;
}
#if !UMA2_LEAN_AND_CLEAN
#region obsolete junk from version 1
[System.Obsolete("SlotData.materialSample is obsolete use asset.materialSample!", false)]
public Material materialSample;
[System.Obsolete("SlotData.slotName is obsolete use asset.slotName!", false)]
public string slotName;
[System.Obsolete("SlotData.listID is obsolete.", false)]
public int listID = -1;
[System.Obsolete("SlotData.meshRenderer is obsolete.", true)]
public SkinnedMeshRenderer meshRenderer;
[System.Obsolete("SlotData.boneNameHashes is obsolete.", true)]
public int[] boneNameHashes;
[System.Obsolete("SlotData.boneWeights is obsolete.", true)]
public BoneWeight[] boneWeights;
[System.Obsolete("SlotData.umaBoneData is obsolete.", true)]
public Transform[] umaBoneData;
[System.Obsolete("SlotData.animatedBones is obsolete, use SlotDataAsset.animatedBones.", true)]
public Transform[] animatedBones = new Transform[0];
[System.Obsolete("SlotData.textureNameList is obsolete, use SlotDataAsset.textureNameList.", true)]
public string[] textureNameList;
[System.Obsolete("SlotData.slotDNA is obsolete, use SlotDataAsset.slotDNA.", true)]
public DnaConverterBehaviour slotDNA;
[System.Obsolete("SlotData.subMeshIndex is obsolete, use SlotDataAsset.subMeshIndex.", true)]
public int subMeshIndex;
/// <summary>
/// Use this to identify slots that serves the same purpose
/// Eg. ChestArmor, Helmet, etc.
/// </summary>
[System.Obsolete("SlotData.slotGroup is obsolete, use SlotDataAsset.slotGroup.", false)]
public string slotGroup;
/// <summary>
/// Use this to identify what kind of overlays fit this slotData
/// Eg. BaseMeshSkin, BaseMeshOverlays, GenericPlateArmor01
/// </summary>
[System.Obsolete("SlotData.tags is obsolete, use SlotDataAsset.tags.", false)]
public string[] tags;
#endregion
#endif
#region operator ==, != and similar HACKS, seriously.....
[System.Obsolete("You can no longer cast UnityEngine.Object to SlotData, perhaps you want to cast it into SlotDataAsset instead?", false)]
public static implicit operator SlotData(UnityEngine.Object obj)
{
throw new System.NotImplementedException("You can no longer cast UnityEngine.Object to SlotData, perhaps you want to cast it into SlotDataAsset instead?");
}
public static implicit operator bool(SlotData obj)
{
return ((System.Object)obj) != null && obj.asset != null;
}
public bool Equals(SlotData other)
{
return (this == other);
}
public override bool Equals(object other)
{
return Equals(other as SlotData);
}
public static bool operator ==(SlotData slot, SlotData obj)
{
if (slot)
{
if (obj)
{
return System.Object.ReferenceEquals(slot, obj);
}
return false;
}
return !((bool)obj);
}
public static bool operator !=(SlotData slot, SlotData obj)
{
if (slot)
{
if (obj)
{
return !System.Object.ReferenceEquals(slot, obj);
}
return true;
}
return ((bool)obj);
}
public override int GetHashCode()
{
return base.GetHashCode();
}
#endregion
#region ISerializationCallbackReceiver Members
public void OnAfterDeserialize()
{
if (overlayList == null) overlayList = new List<OverlayData>();
}
public void OnBeforeSerialize()
{
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
/*============================================================
**
**
**
** Purpose: Managed ACL wrapper for Win32 mutexes.
**
**
===========================================================*/
using System;
using System.Collections;
using System.Security.Principal;
using Microsoft.Win32;
using Microsoft.Win32.SafeHandles;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.Security.AccessControl
{
// Derive this list of values from winnt.h and MSDN docs:
// http://msdn.microsoft.com/library/default.asp?url=/library/en-us/dllproc/base/synchronization_object_security_and_access_rights.asp
// In order to call ReleaseMutex, you must have an ACL granting you
// MUTEX_MODIFY_STATE rights (0x0001). The other interesting value
// in a Mutex's ACL is MUTEX_ALL_ACCESS (0x1F0001).
// You need SYNCHRONIZE to be able to open a handle to a mutex.
[Flags]
public enum MutexRights
{
Modify = 0x000001,
Delete = 0x010000,
ReadPermissions = 0x020000,
ChangePermissions = 0x040000,
TakeOwnership = 0x080000,
Synchronize = 0x100000, // SYNCHRONIZE
FullControl = 0x1F0001
}
public sealed class MutexAccessRule : AccessRule
{
// Constructor for creating access rules for registry objects
public MutexAccessRule(IdentityReference identity, MutexRights eventRights, AccessControlType type)
: this(identity, (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, type)
{
}
public MutexAccessRule(String identity, MutexRights eventRights, AccessControlType type)
: this(new NTAccount(identity), (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, type)
{
}
//
// Internal constructor to be called by public constructors
// and the access rule factory methods of {File|Folder}Security
//
internal MutexAccessRule(
IdentityReference identity,
int accessMask,
bool isInherited,
InheritanceFlags inheritanceFlags,
PropagationFlags propagationFlags,
AccessControlType type)
: base(
identity,
accessMask,
isInherited,
inheritanceFlags,
propagationFlags,
type)
{
}
public MutexRights MutexRights
{
get { return (MutexRights)base.AccessMask; }
}
}
public sealed class MutexAuditRule : AuditRule
{
public MutexAuditRule(IdentityReference identity, MutexRights eventRights, AuditFlags flags)
: this(identity, (int)eventRights, false, InheritanceFlags.None, PropagationFlags.None, flags)
{
}
/* // Not in the spec
public MutexAuditRule(string identity, MutexRights eventRights, AuditFlags flags)
: this(new NTAccount(identity), (int) eventRights, false, InheritanceFlags.None, PropagationFlags.None, flags)
{
}
*/
internal MutexAuditRule(IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AuditFlags flags)
: base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags, flags)
{
}
public MutexRights MutexRights
{
get { return (MutexRights)base.AccessMask; }
}
}
public sealed class MutexSecurity : NativeObjectSecurity
{
public MutexSecurity()
: base(true, ResourceType.KernelObject)
{
}
[System.Security.SecuritySafeCritical] // auto-generated
public MutexSecurity(String name, AccessControlSections includeSections)
: base(true, ResourceType.KernelObject, name, includeSections, _HandleErrorCode, null)
{
// Let the underlying ACL API's demand unmanaged code permission.
}
[System.Security.SecurityCritical] // auto-generated
internal MutexSecurity(SafeWaitHandle handle, AccessControlSections includeSections)
: base(true, ResourceType.KernelObject, handle, includeSections, _HandleErrorCode, null)
{
// Let the underlying ACL API's demand unmanaged code permission.
}
[System.Security.SecurityCritical] // auto-generated
private static Exception _HandleErrorCode(int errorCode, string name, SafeHandle handle, object context)
{
System.Exception exception = null;
switch (errorCode)
{
case Interop.mincore.Errors.ERROR_INVALID_NAME:
case Interop.mincore.Errors.ERROR_INVALID_HANDLE:
case Interop.mincore.Errors.ERROR_FILE_NOT_FOUND:
if ((name != null) && (name.Length != 0))
exception = new WaitHandleCannotBeOpenedException(SR.Format(SR.WaitHandleCannotBeOpenedException_InvalidHandle, name));
else
exception = new WaitHandleCannotBeOpenedException();
break;
default:
break;
}
return exception;
}
public override AccessRule AccessRuleFactory(IdentityReference identityReference, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AccessControlType type)
{
return new MutexAccessRule(identityReference, accessMask, isInherited, inheritanceFlags, propagationFlags, type);
}
public override AuditRule AuditRuleFactory(IdentityReference identityReference, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AuditFlags flags)
{
return new MutexAuditRule(identityReference, accessMask, isInherited, inheritanceFlags, propagationFlags, flags);
}
internal AccessControlSections GetAccessControlSectionsFromChanges()
{
AccessControlSections persistRules = AccessControlSections.None;
if (AccessRulesModified)
persistRules = AccessControlSections.Access;
if (AuditRulesModified)
persistRules |= AccessControlSections.Audit;
if (OwnerModified)
persistRules |= AccessControlSections.Owner;
if (GroupModified)
persistRules |= AccessControlSections.Group;
return persistRules;
}
[System.Security.SecurityCritical] // auto-generated
internal void Persist(SafeWaitHandle handle)
{
// Let the underlying ACL API's demand unmanaged code.
WriteLock();
try
{
AccessControlSections persistSections = GetAccessControlSectionsFromChanges();
if (persistSections == AccessControlSections.None)
return; // Don't need to persist anything.
base.Persist(handle, persistSections);
OwnerModified = GroupModified = AuditRulesModified = AccessRulesModified = false;
}
finally
{
WriteUnlock();
}
}
public void AddAccessRule(MutexAccessRule rule)
{
base.AddAccessRule(rule);
}
public void SetAccessRule(MutexAccessRule rule)
{
base.SetAccessRule(rule);
}
public void ResetAccessRule(MutexAccessRule rule)
{
base.ResetAccessRule(rule);
}
public bool RemoveAccessRule(MutexAccessRule rule)
{
return base.RemoveAccessRule(rule);
}
public void RemoveAccessRuleAll(MutexAccessRule rule)
{
base.RemoveAccessRuleAll(rule);
}
public void RemoveAccessRuleSpecific(MutexAccessRule rule)
{
base.RemoveAccessRuleSpecific(rule);
}
public void AddAuditRule(MutexAuditRule rule)
{
base.AddAuditRule(rule);
}
public void SetAuditRule(MutexAuditRule rule)
{
base.SetAuditRule(rule);
}
public bool RemoveAuditRule(MutexAuditRule rule)
{
return base.RemoveAuditRule(rule);
}
public void RemoveAuditRuleAll(MutexAuditRule rule)
{
base.RemoveAuditRuleAll(rule);
}
public void RemoveAuditRuleSpecific(MutexAuditRule rule)
{
base.RemoveAuditRuleSpecific(rule);
}
public override Type AccessRightType
{
get { return typeof(MutexRights); }
}
public override Type AccessRuleType
{
get { return typeof(MutexAccessRule); }
}
public override Type AuditRuleType
{
get { return typeof(MutexAuditRule); }
}
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Management.Automation.Runspaces;
using System.Collections.ObjectModel;
using System.Collections.Generic;
using System.Diagnostics;
using System.Management.Automation;
using System.Globalization;
using System.Management.Automation.Internal;
using System.Management.Automation.Help;
using System.Runtime.InteropServices;
namespace Microsoft.PowerShell.Commands
{
/// <summary>
/// This class implements get-help command
/// </summary>
[Cmdlet(VerbsCommon.Get, "Help", DefaultParameterSetName = "AllUsersView", HelpUri = "https://go.microsoft.com/fwlink/?LinkID=113316")]
public sealed class GetHelpCommand : PSCmdlet
{
/// <summary>
/// Help Views
/// </summary>
internal enum HelpView
{
Default = 0x00, // Default View
DetailedView = 0x01,
FullView = 0x02,
ExamplesView = 0x03
}
/// <summary>
/// Default constructor for the GetHelpCommand class
/// </summary>
public GetHelpCommand()
{
}
#region Cmdlet Parameters
/// <summary>
/// Target to search for help
/// </summary>
[Parameter(Position = 0, ValueFromPipelineByPropertyName = true)]
public string Name { get; set; } = "";
/// <summary>
/// Path to provider location that user is curious about.
/// </summary>
[Parameter]
public string Path { get; set; }
/// <summary>
/// List of help categories to search for help
/// </summary>
[Parameter]
[ValidateSet(
"Alias", "Cmdlet", "Provider", "General", "FAQ", "Glossary", "HelpFile", "ScriptCommand", "Function", "Filter", "ExternalScript", "All", "DefaultHelp", "Workflow", "DscResource", "Class", "Configuration",
IgnoreCase = true)]
public string[] Category { get; set; }
/// <summary>
/// List of Component's to search on.
/// </summary>
/// <value></value>
[Parameter]
public string[] Component { get; set; } = null;
/// <summary>
/// List of Functionality's to search on.
/// </summary>
/// <value></value>
[Parameter]
public string[] Functionality { get; set; } = null;
/// <summary>
/// List of Role's to search on.
/// </summary>
/// <value></value>
[Parameter]
public string[] Role { get; set; } = null;
private string _provider = "";
/// <summary>
/// Changes the view of HelpObject returned
/// </summary>
/// <remarks>
/// Currently we support following views:
///
/// 1. Reminder (Default - Experienced User)
/// 2. Detailed (Beginner - Beginning User)
/// 3. Full (All Users)
/// 4. Examples
/// 5. Parameters
///
/// Currently we support these views only for Cmdlets.
/// A SnapIn developer can however change these views.
/// </remarks>
[Parameter(ParameterSetName = "DetailedView", Mandatory = true)]
public SwitchParameter Detailed
{
set
{
if (value.ToBool())
{
_viewTokenToAdd = HelpView.DetailedView;
}
}
}
/// <summary>
/// Changes the view of HelpObject returned
/// </summary>
/// <remarks>
/// Currently we support following views:
///
/// 1. Reminder (Default - Experienced User)
/// 2. Detailed (Beginner - Beginning User)
/// 3. Full (All Users)
/// 4. Examples
/// 5. Parameters
///
/// Currently we support these views only for Cmdlets.
/// A SnapIn developer can however change these views.
/// </remarks>
[Parameter(ParameterSetName = "AllUsersView")]
public SwitchParameter Full
{
set
{
if (value.ToBool())
{
_viewTokenToAdd = HelpView.FullView;
}
}
}
/// <summary>
/// Changes the view of HelpObject returned
/// </summary>
/// <remarks>
/// Currently we support following views:
///
/// 1. Reminder (Default - Experienced User)
/// 2. Detailed (Beginner - Beginning User)
/// 3. Full (All Users)
/// 4. Examples
///
/// Currently we support these views only for Cmdlets.
/// A SnapIn developer can however change these views.
/// </remarks>
[Parameter(ParameterSetName = "Examples", Mandatory = true)]
public SwitchParameter Examples
{
set
{
if (value.ToBool())
{
_viewTokenToAdd = HelpView.ExamplesView;
}
}
}
/// <summary>
/// Parameter name.
/// </summary>
/// <remarks>
/// Support WildCard strings as supported by WildcardPattern class.
/// </remarks>
[Parameter(ParameterSetName = "Parameters", Mandatory = true)]
public string Parameter { set; get; }
/// <summary>
/// This parameter,if true, will direct get-help cmdlet to
/// navigate to a URL (stored in the command MAML file under
/// the uri node).
/// </summary>
[Parameter(ParameterSetName = "Online", Mandatory = true)]
public SwitchParameter Online
{
set
{
_showOnlineHelp = value;
if (_showOnlineHelp)
{
VerifyParameterForbiddenInRemoteRunspace(this, "Online");
}
}
get
{
return _showOnlineHelp;
}
}
private bool _showOnlineHelp;
private bool _showWindow;
/// <summary>
/// Gets and sets a value indicating whether the help should be displayed in a separate window
/// </summary>
[Parameter(ParameterSetName = "ShowWindow", Mandatory = true)]
public SwitchParameter ShowWindow
{
get
{
return _showWindow;
}
set
{
_showWindow = value;
if (_showWindow)
{
VerifyParameterForbiddenInRemoteRunspace(this, "ShowWindow");
}
}
}
// The following variable controls the view.
private HelpView _viewTokenToAdd = HelpView.Default;
#if !CORECLR
private GraphicalHostReflectionWrapper graphicalHostReflectionWrapper;
#endif
private readonly Stopwatch _timer = new Stopwatch();
private bool _updatedHelp;
#endregion
#region Cmdlet API implementation
/// <summary>
/// Implements the BeginProcessing() method for get-help command
/// </summary>
protected override void BeginProcessing()
{
_timer.Start();
if (!Online.IsPresent && UpdatableHelpSystem.ShouldPromptToUpdateHelp() && HostUtilities.IsProcessInteractive(MyInvocation) && HasInternetConnection())
{
if (ShouldContinue(HelpDisplayStrings.UpdateHelpPromptBody, HelpDisplayStrings.UpdateHelpPromptTitle))
{
System.Management.Automation.PowerShell.Create(RunspaceMode.CurrentRunspace).AddCommand("Update-Help").Invoke();
_updatedHelp = true;
}
UpdatableHelpSystem.SetDisablePromptToUpdateHelp();
}
}
/// <summary>
/// Implements the ProcessRecord() method for get-help command
/// </summary>
protected override void ProcessRecord()
{
try
{
#if !CORECLR
if (this.ShowWindow)
{
this.graphicalHostReflectionWrapper = GraphicalHostReflectionWrapper.GetGraphicalHostReflectionWrapper(this, "Microsoft.PowerShell.Commands.Internal.HelpWindowHelper");
}
#endif
this.Context.HelpSystem.OnProgress += new HelpSystem.HelpProgressHandler(HelpSystem_OnProgress);
bool failed = false;
HelpCategory helpCategory = ToHelpCategory(Category, ref failed);
if (failed)
return;
// Validate input parameters
ValidateAndThrowIfError(helpCategory);
HelpRequest helpRequest = new HelpRequest(this.Name, helpCategory);
helpRequest.Provider = _provider;
helpRequest.Component = Component;
helpRequest.Role = Role;
helpRequest.Functionality = Functionality;
helpRequest.ProviderContext = new ProviderContext(
this.Path,
this.Context.Engine.Context,
this.SessionState.Path);
helpRequest.CommandOrigin = this.MyInvocation.CommandOrigin;
// the idea is to use yield statement in the help lookup to speed up
// perceived user experience....So HelpSystem.GetHelp returns an
// IEnumerable..
IEnumerable<HelpInfo> helpInfos = this.Context.HelpSystem.GetHelp(helpRequest);
// HelpCommand acts differently when there is just one help object and when
// there are more than one object...so handling this behavior through
// some variables.
HelpInfo firstHelpInfoObject = null;
int countOfHelpInfos = 0;
foreach (HelpInfo helpInfo in helpInfos)
{
// honor Ctrl-C from user.
if (IsStopping)
{
return;
}
if (0 == countOfHelpInfos)
{
firstHelpInfoObject = helpInfo;
}
else
{
// write first help object only once.
if (null != firstHelpInfoObject)
{
WriteObjectsOrShowOnlineHelp(firstHelpInfoObject, false);
firstHelpInfoObject = null;
}
WriteObjectsOrShowOnlineHelp(helpInfo, false);
}
countOfHelpInfos++;
}
_timer.Stop();
if (!string.IsNullOrEmpty(Name))
Microsoft.PowerShell.Telemetry.Internal.TelemetryAPI.ReportGetHelpTelemetry(Name, countOfHelpInfos, _timer.ElapsedMilliseconds, _updatedHelp);
// Write full help as there is only one help info object
if (1 == countOfHelpInfos)
{
WriteObjectsOrShowOnlineHelp(firstHelpInfoObject, true);
}
else if (_showOnlineHelp && (countOfHelpInfos > 1))
{
throw PSTraceSource.NewInvalidOperationException(HelpErrors.MultipleOnlineTopicsNotSupported, "Online");
}
// show errors only if there is no wildcard search or VerboseHelpErrors is true.
if (((countOfHelpInfos == 0) && (!WildcardPattern.ContainsWildcardCharacters(helpRequest.Target)))
|| this.Context.HelpSystem.VerboseHelpErrors)
{
// Check if there is any error happened. If yes,
// pipe out errors.
if (this.Context.HelpSystem.LastErrors.Count > 0)
{
foreach (ErrorRecord errorRecord in this.Context.HelpSystem.LastErrors)
{
WriteError(errorRecord);
}
}
}
}
finally
{
this.Context.HelpSystem.OnProgress -= new HelpSystem.HelpProgressHandler(HelpSystem_OnProgress);
// finally clear the ScriptBlockAst -> Token[] cache
this.Context.HelpSystem.ClearScriptBlockTokenCache();
}
}
private HelpCategory ToHelpCategory(string[] category, ref bool failed)
{
if (category == null || category.Length == 0)
return HelpCategory.None;
HelpCategory helpCategory = HelpCategory.None;
failed = false;
for (int i = 0; i < category.Length; i++)
{
try
{
HelpCategory temp = (HelpCategory)Enum.Parse(typeof(HelpCategory), category[i], true);
helpCategory |= temp;
}
catch (ArgumentException argumentException)
{
Exception e = new HelpCategoryInvalidException(category[i], argumentException);
ErrorRecord errorRecord = new ErrorRecord(e, "InvalidHelpCategory", ErrorCategory.InvalidArgument, null);
this.WriteError(errorRecord);
failed = true;
}
}
return helpCategory;
}
/// <summary>
/// Change <paramref name="originalHelpObject"/> as per user request.
///
/// This method creates a new type to the existing typenames
/// depending on Detailed,Full,Example parameters and adds this
/// new type(s) to the top of the list.
/// </summary>
/// <param name="originalHelpObject">Full help object to transform.</param>
/// <returns>Transformed help object with new TypeNames.</returns>
/// <remarks>If Detailed and Full are not specified, nothing is changed.</remarks>
private PSObject TransformView(PSObject originalHelpObject)
{
Diagnostics.Assert(originalHelpObject != null,
"HelpObject should not be null");
if (_viewTokenToAdd == HelpView.Default)
{
s_tracer.WriteLine("Detailed, Full, Examples are not selected. Constructing default view.");
return originalHelpObject;
}
string tokenToAdd = _viewTokenToAdd.ToString();
// We are changing the types without modifying the original object.
// The contract between help command and helpsystem does not
// allow us to modify returned help objects.
PSObject objectToReturn = originalHelpObject.Copy();
objectToReturn.TypeNames.Clear();
if (originalHelpObject.TypeNames.Count == 0)
{
string typeToAdd = string.Format(CultureInfo.InvariantCulture, "HelpInfo#{0}", tokenToAdd);
objectToReturn.TypeNames.Add(typeToAdd);
}
else
{
// User request at the top..
foreach (string typeName in originalHelpObject.TypeNames)
{
// dont add new types for System.String and System.Object..
// as they are handled differently for F&0..(bug935095)
if (typeName.ToLowerInvariant().Equals("system.string") ||
typeName.ToLowerInvariant().Equals("system.object"))
{
continue;
}
string typeToAdd = string.Format(CultureInfo.InvariantCulture, "{0}#{1}", typeName, tokenToAdd);
s_tracer.WriteLine("Adding type {0}", typeToAdd);
objectToReturn.TypeNames.Add(typeToAdd);
}
// Existing typenames at the bottom..
foreach (string typeName in originalHelpObject.TypeNames)
{
s_tracer.WriteLine("Adding type {0}", typeName);
objectToReturn.TypeNames.Add(typeName);
}
}
return objectToReturn;
}
/// <summary>
/// Gets the parameter info for patterns identified Parameter property.
/// Writes the parameter info(s) to the output stream. An error is thrown
/// if a parameter with a given pattern is not found.
/// </summary>
/// <param name="helpInfo">HelpInfo Object to look for the parameter.</param>
private void GetAndWriteParameterInfo(HelpInfo helpInfo)
{
s_tracer.WriteLine("Searching parameters for {0}", helpInfo.Name);
PSObject[] pInfos = helpInfo.GetParameter(Parameter);
if ((pInfos == null) || (pInfos.Length == 0))
{
Exception innerException = PSTraceSource.NewArgumentException("Parameter",
HelpErrors.NoParmsFound, Parameter);
WriteError(new ErrorRecord(innerException, "NoParmsFound", ErrorCategory.InvalidArgument, helpInfo));
}
else
{
foreach (PSObject pInfo in pInfos)
{
WriteObject(pInfo);
}
}
}
/// <summary>
/// Validates input parameters
/// </summary>
/// <param name="cat">Category specified by the user</param>
/// <exception cref="ArgumentException">
/// If the request cant be serviced.
/// </exception>
private void ValidateAndThrowIfError(HelpCategory cat)
{
if (cat == HelpCategory.None)
{
return;
}
// categories that support -Parameter, -Role, -Functionality, -Component parameters
HelpCategory supportedCategories =
HelpCategory.Alias | HelpCategory.Cmdlet | HelpCategory.ExternalScript |
HelpCategory.Filter | HelpCategory.Function | HelpCategory.ScriptCommand | HelpCategory.Workflow;
if ((cat & supportedCategories) == 0)
{
if (!string.IsNullOrEmpty(Parameter))
{
throw PSTraceSource.NewArgumentException("Parameter",
HelpErrors.ParamNotSupported, "-Parameter");
}
if (Component != null)
{
throw PSTraceSource.NewArgumentException("Component",
HelpErrors.ParamNotSupported, "-Component");
}
if (Role != null)
{
throw PSTraceSource.NewArgumentException("Role",
HelpErrors.ParamNotSupported, "-Role");
}
if (Functionality != null)
{
throw PSTraceSource.NewArgumentException("Functionality",
HelpErrors.ParamNotSupported, "-Functionality");
}
}
}
/// <summary>
/// Helper method used to Write the help object onto the output
/// stream or show online help (URI extracted from the HelpInfo)
/// object.
/// </summary>
private void WriteObjectsOrShowOnlineHelp(HelpInfo helpInfo, bool showFullHelp)
{
if (helpInfo != null)
{
// online help can be showed only if showFullHelp is true..
// showFullHelp will be false when the help tries to display multiple help topics..
// -Online should not work when multiple help topics are displayed.
if (showFullHelp && _showOnlineHelp)
{
bool onlineUriFound = false;
// show online help
s_tracer.WriteLine("Preparing to show help online.");
Uri onlineUri = helpInfo.GetUriForOnlineHelp();
if (null != onlineUri)
{
onlineUriFound = true;
LaunchOnlineHelp(onlineUri);
return;
}
if (!onlineUriFound)
{
throw PSTraceSource.NewInvalidOperationException(HelpErrors.NoURIFound);
}
}
else if (showFullHelp && ShowWindow)
{
#if !CORECLR
graphicalHostReflectionWrapper.CallStaticMethod("ShowHelpWindow", helpInfo.FullHelp, this);
#endif
}
else
{
// show inline help
if (showFullHelp)
{
if (!string.IsNullOrEmpty(Parameter))
{
GetAndWriteParameterInfo(helpInfo);
}
else
{
PSObject objectToReturn = TransformView(helpInfo.FullHelp);
objectToReturn.IsHelpObject = true;
WriteObject(objectToReturn);
}
}
else
{
if (!string.IsNullOrEmpty(Parameter))
{
PSObject[] pInfos = helpInfo.GetParameter(Parameter);
if ((pInfos == null) || (pInfos.Length == 0))
{
return;
}
}
WriteObject(helpInfo.ShortHelp);
}
}
}
}
/// <summary>
/// Opens the Uri. System's default application will be used
/// to show the uri.
/// </summary>
/// <param name="uriToLaunch"></param>
private void LaunchOnlineHelp(Uri uriToLaunch)
{
Diagnostics.Assert(null != uriToLaunch, "uriToLaunch should not be null");
if (!uriToLaunch.Scheme.Equals("http", StringComparison.OrdinalIgnoreCase) &&
!uriToLaunch.Scheme.Equals("https", StringComparison.OrdinalIgnoreCase))
{
throw PSTraceSource.NewInvalidOperationException(HelpErrors.ProtocolNotSupported,
uriToLaunch.ToString(),
"http",
"https");
}
Exception exception = null;
try
{
this.WriteVerbose(string.Format(CultureInfo.InvariantCulture, HelpDisplayStrings.OnlineHelpUri, uriToLaunch.OriginalString));
System.Diagnostics.Process browserProcess = new System.Diagnostics.Process();
#if UNIX
browserProcess.StartInfo.FileName = Platform.IsLinux ? "xdg-open" : /* OS X */ "open";
browserProcess.StartInfo.Arguments = uriToLaunch.OriginalString;
browserProcess.Start();
#elif CORECLR
throw new PlatformNotSupportedException();
#else
browserProcess.StartInfo.FileName = uriToLaunch.OriginalString;
browserProcess.Start();
#endif
}
catch (InvalidOperationException ioe)
{
exception = ioe;
}
catch (System.ComponentModel.Win32Exception we)
{
exception = we;
}
if (null != exception)
{
throw PSTraceSource.NewInvalidOperationException(exception, HelpErrors.CannotLaunchURI, uriToLaunch.OriginalString);
}
}
#endregion
private void HelpSystem_OnProgress(object sender, HelpProgressInfo arg)
{
ProgressRecord record = new ProgressRecord(0, this.CommandInfo.Name, arg.Activity);
record.PercentComplete = arg.PercentComplete;
WriteProgress(record);
}
#if !CORECLR
[DllImport("wininet.dll")]
private static extern bool InternetGetConnectedState(out int desc, int reserved);
#endif
/// <summary>
/// Checks if we can connect to the internet
/// </summary>
/// <returns></returns>
private bool HasInternetConnection()
{
#if CORECLR
return true; // TODO:CORECLR wininet.dll is not present on NanoServer
#else
int unused;
return InternetGetConnectedState(out unused, 0);
#endif
}
#region Helper methods for verification of parameters against NoLanguage mode
internal static void VerifyParameterForbiddenInRemoteRunspace(Cmdlet cmdlet, string parameterName)
{
if (NativeCommandProcessor.IsServerSide)
{
string message = StringUtil.Format(CommandBaseStrings.ParameterNotValidInRemoteRunspace,
cmdlet.MyInvocation.InvocationName,
parameterName);
Exception e = new InvalidOperationException(message);
ErrorRecord errorRecord = new ErrorRecord(e, "ParameterNotValidInRemoteRunspace", ErrorCategory.InvalidArgument, null);
cmdlet.ThrowTerminatingError(errorRecord);
}
}
#endregion
#region trace
[TraceSourceAttribute("GetHelpCommand ", "GetHelpCommand ")]
private static PSTraceSource s_tracer = PSTraceSource.GetTracer("GetHelpCommand ", "GetHelpCommand ");
#endregion
}
/// <summary>
/// Helper methods used as powershell extension from a types file.
/// </summary>
public static class GetHelpCodeMethods
{
/// <summary>
/// Verifies if the InitialSessionState of the current process
/// </summary>
/// <returns></returns>
private static bool DoesCurrentRunspaceIncludeCoreHelpCmdlet()
{
InitialSessionState iss =
System.Management.Automation.Runspaces.Runspace.DefaultRunspace.InitialSessionState;
if (iss != null)
{
IEnumerable<SessionStateCommandEntry> publicGetHelpEntries = iss
.Commands["Get-Help"]
.Where(entry => entry.Visibility == SessionStateEntryVisibility.Public);
if (publicGetHelpEntries.Count() != 1)
{
return false;
}
foreach (SessionStateCommandEntry getHelpEntry in publicGetHelpEntries)
{
SessionStateCmdletEntry getHelpCmdlet = getHelpEntry as SessionStateCmdletEntry;
if ((null != getHelpCmdlet) && (getHelpCmdlet.ImplementingType.Equals(typeof(GetHelpCommand))))
{
return true;
}
}
}
return false;
}
/// <summary>
/// Retrieves the HelpUri given a CommandInfo instance.
/// </summary>
/// <param name="commandInfoPSObject">
/// CommandInfo instance wrapped as PSObject
/// </param>
/// <returns>
/// null if <paramref name="commandInfoPSObject"/> is not a CommandInfo type.
/// null if HelpUri could not be retrieved either from CommandMetadata or
/// help content.
/// </returns>
[SuppressMessage("Microsoft.Design", "CA1055:UriReturnValuesShouldNotBeStrings")]
public static string GetHelpUri(PSObject commandInfoPSObject)
{
if (null == commandInfoPSObject)
{
return string.Empty;
}
CommandInfo cmdInfo = PSObject.Base(commandInfoPSObject) as CommandInfo;
// GetHelpUri helper method is expected to be used only by System.Management.Automation.CommandInfo
// objects from types.ps1xml
if ((null == cmdInfo) || (string.IsNullOrEmpty(cmdInfo.Name)))
{
return string.Empty;
}
// The type checking is needed to avoid a try..catch exception block as
// the CommandInfo.CommandMetadata throws an InvalidOperationException
// instead of returning null.
if ((cmdInfo is CmdletInfo) || (cmdInfo is FunctionInfo) ||
(cmdInfo is ExternalScriptInfo) || (cmdInfo is ScriptInfo))
{
if (!string.IsNullOrEmpty(cmdInfo.CommandMetadata.HelpUri))
{
return cmdInfo.CommandMetadata.HelpUri;
}
}
AliasInfo aliasInfo = cmdInfo as AliasInfo;
if ((null != aliasInfo) &&
(null != aliasInfo.ExternalCommandMetadata) &&
(!string.IsNullOrEmpty(aliasInfo.ExternalCommandMetadata.HelpUri)))
{
return aliasInfo.ExternalCommandMetadata.HelpUri;
}
// if everything else fails..depend on Get-Help infrastructure to get us the Uri.
string cmdName = cmdInfo.Name;
if (!string.IsNullOrEmpty(cmdInfo.ModuleName))
{
cmdName = string.Format(CultureInfo.InvariantCulture,
"{0}\\{1}", cmdInfo.ModuleName, cmdInfo.Name);
}
if (DoesCurrentRunspaceIncludeCoreHelpCmdlet())
{
// Win8: 651300 if core get-help is present in the runspace (and it is the only get-help command), use
// help system directly and avoid perf penalty.
var currentContext = System.Management.Automation.Runspaces.LocalPipeline.GetExecutionContextFromTLS();
if ((null != currentContext) && (null != currentContext.HelpSystem))
{
HelpRequest helpRequest = new HelpRequest(cmdName, cmdInfo.HelpCategory);
helpRequest.ProviderContext = new ProviderContext(
string.Empty,
currentContext,
currentContext.SessionState.Path);
helpRequest.CommandOrigin = CommandOrigin.Runspace;
foreach (
Uri result in
currentContext.HelpSystem.ExactMatchHelp(helpRequest).Select(
helpInfo => helpInfo.GetUriForOnlineHelp()).Where(result => null != result))
{
return result.OriginalString;
}
}
}
else
{
// win8: 546025. Using Get-Help as command, instead of calling HelpSystem.ExactMatchHelp
// for the following reasons:
// 1. Exchange creates proxies for Get-Command and Get-Help in their scenario
// 2. This method is primarily used to get uri faster while serializing the CommandInfo objects (from Get-Command)
// 3. Exchange uses Get-Help proxy to not call Get-Help cmdlet at-all while serializing CommandInfo objects
// 4. Using HelpSystem directly will not allow Get-Help proxy to do its job.
System.Management.Automation.PowerShell getHelpPS = System.Management.Automation.PowerShell.Create(
RunspaceMode.CurrentRunspace).AddCommand("get-help").
AddParameter("Name", cmdName).AddParameter("Category",
cmdInfo.HelpCategory.ToString());
try
{
Collection<PSObject> helpInfos = getHelpPS.Invoke();
if (null != helpInfos)
{
for (int index = 0; index < helpInfos.Count; index++)
{
HelpInfo helpInfo;
if (LanguagePrimitives.TryConvertTo<HelpInfo>(helpInfos[index], out helpInfo))
{
Uri result = helpInfo.GetUriForOnlineHelp();
if (null != result)
{
return result.OriginalString;
}
}
else
{
Uri result = BaseCommandHelpInfo.GetUriFromCommandPSObject(helpInfos[index]);
return (result != null) ? result.OriginalString : string.Empty;
}
}
}
}
finally
{
getHelpPS.Dispose();
}
}
return string.Empty;
}
}
}
| |
//
// Method.cs: Represents a C++ method
//
// Author:
// Alexander Corrado (alexander.corrado@gmail.com)
// Andreia Gaita (shana@spoiledcat.net)
// Zoltan Varga <vargaz@gmail.com>
//
// Copyright (C) 2011 Novell Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.CodeDom;
using System.CodeDom.Compiler;
using Mono.VisualC.Interop;
class Method
{
public Method (Node node) {
Node = node;
Parameters = new List<Parameter> ();
GenWrapperMethod = true;
}
public Node Node {
get; set;
}
public string Name {
get; set;
}
public bool IsVirtual {
get; set;
}
public bool IsStatic {
get; set;
}
public bool IsConst {
get; set;
}
public bool IsInline {
get; set;
}
public bool IsArtificial {
get; set;
}
public bool IsConstructor {
get; set;
}
public bool IsDestructor {
get; set;
}
public bool IsCopyCtor {
get; set;
}
public bool GenWrapperMethod {
get; set;
}
public CppType ReturnType {
get; set;
}
public List<Parameter> Parameters {
get; set;
}
// The C# method name
public string FormattedName {
get {
return "" + Char.ToUpper (Name [0]) + Name.Substring (1);
}
}
string GetCSharpMethodName (string name) {
return "" + Char.ToUpper (name [0]) + name.Substring (1);
}
public CodeMemberMethod GenerateIFaceMethod (Generator g) {
var method = new CodeMemberMethod () {
Name = Name
};
if (!IsStatic)
method.Parameters.Add (new CodeParameterDeclarationExpression (new CodeTypeReference ("CppInstancePtr"), "this"));
CodeTypeReference rtype = g.CppTypeToCodeDomType (ReturnType);
method.ReturnType = rtype;
foreach (var p in Parameters) {
CppType ptype = p.Type;
bool byref;
var ctype = g.CppTypeToCodeDomType (ptype, out byref);
var param = new CodeParameterDeclarationExpression (ctype, p.Name);
if (byref)
param.Direction = FieldDirection.Ref;
if (!IsVirtual && !ptype.ToString ().Equals (string.Empty))
param.CustomAttributes.Add (new CodeAttributeDeclaration ("MangleAsAttribute", new CodeAttributeArgument (new CodePrimitiveExpression (ptype.ToString ()))));
// FIXME: Structs too
if (ptype.ElementType == CppTypes.Class && !ptype.Modifiers.Contains (CppModifiers.Reference) && !ptype.Modifiers.Contains (CppModifiers.Pointer))
param.CustomAttributes.Add (new CodeAttributeDeclaration ("ByVal"));
method.Parameters.Add (param);
}
// FIXME: Copy ctor
if (IsVirtual)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Virtual"));
if (IsConstructor)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Constructor"));
if (IsDestructor)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Destructor"));
if (IsConst)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Const"));
if (IsInline)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Inline"));
if (IsArtificial)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Artificial"));
if (IsCopyCtor)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("CopyConstructor"));
if (IsStatic)
method.CustomAttributes.Add (new CodeAttributeDeclaration ("Static"));
return method;
}
public CodeMemberMethod GenerateWrapperMethod (Generator g) {
CodeMemberMethod method;
if (IsConstructor)
method = new CodeConstructor () {
Name = GetCSharpMethodName (Name)
};
else
method = new CodeMemberMethod () {
Name = GetCSharpMethodName (Name)
};
method.Attributes = MemberAttributes.Public;
if (IsStatic)
method.Attributes |= MemberAttributes.Static;
CodeTypeReference rtype = g.CppTypeToCodeDomType (ReturnType);
method.ReturnType = rtype;
foreach (var p in Parameters) {
bool byref;
var ptype = g.CppTypeToCodeDomType (p.Type, out byref);
var param = new CodeParameterDeclarationExpression (ptype, p.Name);
if (byref)
param.Direction = FieldDirection.Ref;
method.Parameters.Add (param);
}
if (IsConstructor) {
//this.native_ptr = impl.Alloc(this);
method.Statements.Add (new CodeAssignStatement (new CodeFieldReferenceExpression (null, "native_ptr"), new CodeMethodInvokeExpression (new CodeMethodReferenceExpression (new CodeFieldReferenceExpression (null, "impl"), "Alloc"), new CodeExpression [] { new CodeThisReferenceExpression () })));
}
// Call the iface method
CodeExpression[] args = new CodeExpression [Parameters.Count + (IsStatic ? 0 : 1)];
if (!IsStatic)
args [0] = new CodeFieldReferenceExpression (null, "Native");
for (int i = 0; i < Parameters.Count; ++i) {
bool byref;
g.CppTypeToCodeDomType (Parameters [i].Type, out byref);
CodeExpression arg = new CodeArgumentReferenceExpression (Parameters [i].Name);
if (byref)
arg = new CodeDirectionExpression (FieldDirection.Ref, arg);
args [i + (IsStatic ? 0 : 1)] = arg;
}
var call = new CodeMethodInvokeExpression (new CodeMethodReferenceExpression (new CodeFieldReferenceExpression (null, "impl"), Name), args);
if (rtype.BaseType == "System.Void" || IsConstructor)
method.Statements.Add (call);
else
method.Statements.Add (new CodeMethodReturnStatement (call));
return method;
}
}
| |
namespace netduino.helpers.Imaging {
public static class B {
public const byte ________ = 0;
public const byte _______X = 1;
public const byte ______X_ = 2;
public const byte ______XX = 3;
public const byte _____X__ = 4;
public const byte _____X_X = 5;
public const byte _____XX_ = 6;
public const byte _____XXX = 7;
public const byte ____X___ = 8;
public const byte ____X__X = 9;
public const byte ____X_X_ = 10;
public const byte ____X_XX = 11;
public const byte ____XX__ = 12;
public const byte ____XX_X = 13;
public const byte ____XXX_ = 14;
public const byte ____XXXX = 15;
public const byte ___X____ = 16;
public const byte ___X___X = 17;
public const byte ___X__X_ = 18;
public const byte ___X__XX = 19;
public const byte ___X_X__ = 20;
public const byte ___X_X_X = 21;
public const byte ___X_XX_ = 22;
public const byte ___X_XXX = 23;
public const byte ___XX___ = 24;
public const byte ___XX__X = 25;
public const byte ___XX_X_ = 26;
public const byte ___XX_XX = 27;
public const byte ___XXX__ = 28;
public const byte ___XXX_X = 29;
public const byte ___XXXX_ = 30;
public const byte ___XXXXX = 31;
public const byte __X_____ = 32;
public const byte __X____X = 33;
public const byte __X___X_ = 34;
public const byte __X___XX = 35;
public const byte __X__X__ = 36;
public const byte __X__X_X = 37;
public const byte __X__XX_ = 38;
public const byte __X__XXX = 39;
public const byte __X_X___ = 40;
public const byte __X_X__X = 41;
public const byte __X_X_X_ = 42;
public const byte __X_X_XX = 43;
public const byte __X_XX__ = 44;
public const byte __X_XX_X = 45;
public const byte __X_XXX_ = 46;
public const byte __X_XXXX = 47;
public const byte __XX____ = 48;
public const byte __XX___X = 49;
public const byte __XX__X_ = 50;
public const byte __XX__XX = 51;
public const byte __XX_X__ = 52;
public const byte __XX_X_X = 53;
public const byte __XX_XX_ = 54;
public const byte __XX_XXX = 55;
public const byte __XXX___ = 56;
public const byte __XXX__X = 57;
public const byte __XXX_X_ = 58;
public const byte __XXX_XX = 59;
public const byte __XXXX__ = 60;
public const byte __XXXX_X = 61;
public const byte __XXXXX_ = 62;
public const byte __XXXXXX = 63;
public const byte _X______ = 64;
public const byte _X_____X = 65;
public const byte _X____X_ = 66;
public const byte _X____XX = 67;
public const byte _X___X__ = 68;
public const byte _X___X_X = 69;
public const byte _X___XX_ = 70;
public const byte _X___XXX = 71;
public const byte _X__X___ = 72;
public const byte _X__X__X = 73;
public const byte _X__X_X_ = 74;
public const byte _X__X_XX = 75;
public const byte _X__XX__ = 76;
public const byte _X__XX_X = 77;
public const byte _X__XXX_ = 78;
public const byte _X__XXXX = 79;
public const byte _X_X____ = 80;
public const byte _X_X___X = 81;
public const byte _X_X__X_ = 82;
public const byte _X_X__XX = 83;
public const byte _X_X_X__ = 84;
public const byte _X_X_X_X = 85;
public const byte _X_X_XX_ = 86;
public const byte _X_X_XXX = 87;
public const byte _X_XX___ = 88;
public const byte _X_XX__X = 89;
public const byte _X_XX_X_ = 90;
public const byte _X_XX_XX = 91;
public const byte _X_XXX__ = 92;
public const byte _X_XXX_X = 93;
public const byte _X_XXXX_ = 94;
public const byte _X_XXXXX = 95;
public const byte _XX_____ = 96;
public const byte _XX____X = 97;
public const byte _XX___X_ = 98;
public const byte _XX___XX = 99;
public const byte _XX__X__ = 100;
public const byte _XX__X_X = 101;
public const byte _XX__XX_ = 102;
public const byte _XX__XXX = 103;
public const byte _XX_X___ = 104;
public const byte _XX_X__X = 105;
public const byte _XX_X_X_ = 106;
public const byte _XX_X_XX = 107;
public const byte _XX_XX__ = 108;
public const byte _XX_XX_X = 109;
public const byte _XX_XXX_ = 110;
public const byte _XX_XXXX = 111;
public const byte _XXX____ = 112;
public const byte _XXX___X = 113;
public const byte _XXX__X_ = 114;
public const byte _XXX__XX = 115;
public const byte _XXX_X__ = 116;
public const byte _XXX_X_X = 117;
public const byte _XXX_XX_ = 118;
public const byte _XXX_XXX = 119;
public const byte _XXXX___ = 120;
public const byte _XXXX__X = 121;
public const byte _XXXX_X_ = 122;
public const byte _XXXX_XX = 123;
public const byte _XXXXX__ = 124;
public const byte _XXXXX_X = 125;
public const byte _XXXXXX_ = 126;
public const byte _XXXXXXX = 127;
public const byte X_______ = 128;
public const byte X______X = 129;
public const byte X_____X_ = 130;
public const byte X_____XX = 131;
public const byte X____X__ = 132;
public const byte X____X_X = 133;
public const byte X____XX_ = 134;
public const byte X____XXX = 135;
public const byte X___X___ = 136;
public const byte X___X__X = 137;
public const byte X___X_X_ = 138;
public const byte X___X_XX = 139;
public const byte X___XX__ = 140;
public const byte X___XX_X = 141;
public const byte X___XXX_ = 142;
public const byte X___XXXX = 143;
public const byte X__X____ = 144;
public const byte X__X___X = 145;
public const byte X__X__X_ = 146;
public const byte X__X__XX = 147;
public const byte X__X_X__ = 148;
public const byte X__X_X_X = 149;
public const byte X__X_XX_ = 150;
public const byte X__X_XXX = 151;
public const byte X__XX___ = 152;
public const byte X__XX__X = 153;
public const byte X__XX_X_ = 154;
public const byte X__XX_XX = 155;
public const byte X__XXX__ = 156;
public const byte X__XXX_X = 157;
public const byte X__XXXX_ = 158;
public const byte X__XXXXX = 159;
public const byte X_X_____ = 160;
public const byte X_X____X = 161;
public const byte X_X___X_ = 162;
public const byte X_X___XX = 163;
public const byte X_X__X__ = 164;
public const byte X_X__X_X = 165;
public const byte X_X__XX_ = 166;
public const byte X_X__XXX = 167;
public const byte X_X_X___ = 168;
public const byte X_X_X__X = 169;
public const byte X_X_X_X_ = 170;
public const byte X_X_X_XX = 171;
public const byte X_X_XX__ = 172;
public const byte X_X_XX_X = 173;
public const byte X_X_XXX_ = 174;
public const byte X_X_XXXX = 175;
public const byte X_XX____ = 176;
public const byte X_XX___X = 177;
public const byte X_XX__X_ = 178;
public const byte X_XX__XX = 179;
public const byte X_XX_X__ = 180;
public const byte X_XX_X_X = 181;
public const byte X_XX_XX_ = 182;
public const byte X_XX_XXX = 183;
public const byte X_XXX___ = 184;
public const byte X_XXX__X = 185;
public const byte X_XXX_X_ = 186;
public const byte X_XXX_XX = 187;
public const byte X_XXXX__ = 188;
public const byte X_XXXX_X = 189;
public const byte X_XXXXX_ = 190;
public const byte X_XXXXXX = 191;
public const byte XX______ = 192;
public const byte XX_____X = 193;
public const byte XX____X_ = 194;
public const byte XX____XX = 195;
public const byte XX___X__ = 196;
public const byte XX___X_X = 197;
public const byte XX___XX_ = 198;
public const byte XX___XXX = 199;
public const byte XX__X___ = 200;
public const byte XX__X__X = 201;
public const byte XX__X_X_ = 202;
public const byte XX__X_XX = 203;
public const byte XX__XX__ = 204;
public const byte XX__XX_X = 205;
public const byte XX__XXX_ = 206;
public const byte XX__XXXX = 207;
public const byte XX_X____ = 208;
public const byte XX_X___X = 209;
public const byte XX_X__X_ = 210;
public const byte XX_X__XX = 211;
public const byte XX_X_X__ = 212;
public const byte XX_X_X_X = 213;
public const byte XX_X_XX_ = 214;
public const byte XX_X_XXX = 215;
public const byte XX_XX___ = 216;
public const byte XX_XX__X = 217;
public const byte XX_XX_X_ = 218;
public const byte XX_XX_XX = 219;
public const byte XX_XXX__ = 220;
public const byte XX_XXX_X = 221;
public const byte XX_XXXX_ = 222;
public const byte XX_XXXXX = 223;
public const byte XXX_____ = 224;
public const byte XXX____X = 225;
public const byte XXX___X_ = 226;
public const byte XXX___XX = 227;
public const byte XXX__X__ = 228;
public const byte XXX__X_X = 229;
public const byte XXX__XX_ = 230;
public const byte XXX__XXX = 231;
public const byte XXX_X___ = 232;
public const byte XXX_X__X = 233;
public const byte XXX_X_X_ = 234;
public const byte XXX_X_XX = 235;
public const byte XXX_XX__ = 236;
public const byte XXX_XX_X = 237;
public const byte XXX_XXX_ = 238;
public const byte XXX_XXXX = 239;
public const byte XXXX____ = 240;
public const byte XXXX___X = 241;
public const byte XXXX__X_ = 242;
public const byte XXXX__XX = 243;
public const byte XXXX_X__ = 244;
public const byte XXXX_X_X = 245;
public const byte XXXX_XX_ = 246;
public const byte XXXX_XXX = 247;
public const byte XXXXX___ = 248;
public const byte XXXXX__X = 249;
public const byte XXXXX_X_ = 250;
public const byte XXXXX_XX = 251;
public const byte XXXXXX__ = 252;
public const byte XXXXXX_X = 253;
public const byte XXXXXXX_ = 254;
public const byte XXXXXXXX = 255;
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.Runtime.GrainDirectory;
using Orleans.Runtime.Messaging;
using Orleans.Runtime.Placement;
using Orleans.Runtime.Scheduler;
using Orleans.Serialization.Invocation;
namespace Orleans.Runtime
{
internal class Dispatcher
{
private readonly MessageCenter messageCenter;
private readonly RuntimeMessagingTrace messagingTrace;
private readonly SiloAddress _siloAddress;
private readonly OrleansTaskScheduler scheduler;
private readonly Catalog catalog;
private readonly ILogger logger;
private readonly SiloMessagingOptions messagingOptions;
private readonly PlacementService placementService;
private readonly MessageFactory messageFactory;
private readonly ActivationDirectory activationDirectory;
private readonly ILocalGrainDirectory localGrainDirectory;
internal Dispatcher(
OrleansTaskScheduler scheduler,
MessageCenter messageCenter,
Catalog catalog,
IOptionsMonitor<SiloMessagingOptions> messagingOptions,
PlacementService placementService,
ILocalGrainDirectory localGrainDirectory,
MessageFactory messageFactory,
ILoggerFactory loggerFactory,
ActivationDirectory activationDirectory,
RuntimeMessagingTrace messagingTrace,
ILocalSiloDetails localSiloDetails)
{
_siloAddress = localSiloDetails.SiloAddress;
this.scheduler = scheduler;
this.catalog = catalog;
this.messageCenter = messageCenter;
this.messagingOptions = messagingOptions.CurrentValue;
this.placementService = placementService;
this.localGrainDirectory = localGrainDirectory;
this.messageFactory = messageFactory;
this.activationDirectory = activationDirectory;
this.messagingTrace = messagingTrace;
this.logger = loggerFactory.CreateLogger<Dispatcher>();
}
public void RejectMessage(
Message message,
Message.RejectionTypes rejectionType,
Exception exc,
string rejectInfo = null)
{
if (message.Direction == Message.Directions.Request
|| (message.Direction == Message.Directions.OneWay && message.HasCacheInvalidationHeader))
{
this.messagingTrace.OnDispatcherRejectMessage(message, rejectionType, rejectInfo, exc);
var str = string.Format("{0} {1}", rejectInfo ?? "", exc == null ? "" : exc.ToString());
var rejection = this.messageFactory.CreateRejectionResponse(message, rejectionType, str, exc);
messageCenter.SendMessage(rejection);
}
else
{
this.messagingTrace.OnDispatcherDiscardedRejection(message, rejectionType, rejectInfo, exc);
}
}
internal void ProcessRequestsToInvalidActivation(
List<Message> messages,
ActivationAddress oldAddress,
ActivationAddress forwardingAddress,
string failedOperation,
Exception exc = null,
bool rejectMessages = false)
{
// IMPORTANT: do not do anything on activation context anymore, since this activation is invalid already.
scheduler.QueueAction(
() =>
{
foreach (var message in messages)
{
if (rejectMessages)
{
RejectMessage(message, Message.RejectionTypes.Transient, exc, failedOperation);
}
else
{
this.messagingTrace.OnDispatcherForwardingMultiple(messages.Count, oldAddress, forwardingAddress, failedOperation, exc);
TryForwardRequest(message, oldAddress, forwardingAddress, failedOperation, exc);
}
}
},
catalog);
}
internal void ProcessRequestToInvalidActivation(
Message message,
ActivationAddress oldAddress,
ActivationAddress forwardingAddress,
string failedOperation,
Exception exc = null,
bool rejectMessages = false)
{
// Just use this opportunity to invalidate local Cache Entry as well.
if (oldAddress != null)
{
this.localGrainDirectory.InvalidateCacheEntry(oldAddress);
}
// IMPORTANT: do not do anything on activation context anymore, since this activation is invalid already.
if (rejectMessages)
{
this.RejectMessage(message, Message.RejectionTypes.Transient, exc, failedOperation);
}
else
{
this.TryForwardRequest(message, oldAddress, forwardingAddress, failedOperation, exc);
}
}
public void ProcessRequestToStuckActivation(
Message message,
ActivationData activationData,
string failedOperation)
{
scheduler.RunOrQueueTask(
async () =>
{
await catalog.DeactivateStuckActivation(activationData);
TryForwardRequest(message, activationData.Address, activationData.ForwardingAddress, failedOperation);
},
catalog)
.Ignore();
}
internal void TryForwardRequest(Message message, ActivationAddress oldAddress, ActivationAddress forwardingAddress, string failedOperation, Exception exc = null)
{
bool forwardingSucceded = false;
try
{
this.messagingTrace.OnDispatcherForwarding(message, oldAddress, forwardingAddress, failedOperation, exc);
if (oldAddress != null)
{
message.AddToCacheInvalidationHeader(oldAddress);
}
forwardingSucceded = this.TryForwardMessage(message, forwardingAddress);
}
catch (Exception exc2)
{
forwardingSucceded = false;
exc = exc2;
}
finally
{
var sentRejection = false;
// If the message was a one-way message, send a cache invalidation response even if the message was successfully forwarded.
if (message.Direction == Message.Directions.OneWay)
{
this.RejectMessage(
message,
Message.RejectionTypes.CacheInvalidation,
exc,
"OneWay message sent to invalid activation");
sentRejection = true;
}
if (!forwardingSucceded)
{
this.messagingTrace.OnDispatcherForwardingFailed(message, oldAddress, forwardingAddress, failedOperation, exc);
if (!sentRejection)
{
var str = $"Forwarding failed: tried to forward message {message} for {message.ForwardCount} times after {failedOperation} to invalid activation. Rejecting now.";
RejectMessage(message, Message.RejectionTypes.Transient, exc, str);
}
}
}
}
/// <summary>
/// Reroute a message coming in through a gateway
/// </summary>
/// <param name="message"></param>
internal void RerouteMessage(Message message)
{
ResendMessageImpl(message);
}
internal bool TryForwardMessage(Message message, ActivationAddress forwardingAddress)
{
if (!MayForward(message, this.messagingOptions)) return false;
message.ForwardCount = message.ForwardCount + 1;
MessagingProcessingStatisticsGroup.OnDispatcherMessageForwared(message);
ResendMessageImpl(message, forwardingAddress);
return true;
}
private void ResendMessageImpl(Message message, ActivationAddress forwardingAddress = null)
{
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("Resend {0}", message);
message.TargetHistory = message.GetTargetHistory();
if (message.TargetGrain.IsSystemTarget())
{
this.PrepareSystemTargetMessage(message);
this.messageCenter.SendMessage(message);
}
else if (forwardingAddress != null)
{
message.TargetAddress = forwardingAddress;
message.IsNewPlacement = false;
this.messageCenter.SendMessage(message);
}
else
{
message.TargetActivation = default;
message.TargetSilo = null;
message.ClearTargetAddress();
this.SendMessage(message);
}
}
// Forwarding is used by the receiver, usually when it cannot process the message and forwards it to another silo to perform the processing
// (got here due to duplicate activation, outdated cache, silo is shutting down/overloaded, ...).
private static bool MayForward(Message message, SiloMessagingOptions messagingOptions)
{
return message.ForwardCount < messagingOptions.MaxForwardCount;
}
/// <summary>
/// Send an outgoing message, may complete synchronously
/// - may buffer for transaction completion / commit if it ends a transaction
/// - choose target placement address, maintaining send order
/// - add ordering info and maintain send order
///
/// </summary>
internal Task SendMessage(Message message, IGrainContext sendingActivation = null)
{
try
{
var messageAddressingTask = placementService.AddressMessage(message);
if (messageAddressingTask.Status != TaskStatus.RanToCompletion)
{
return SendMessageAsync(messageAddressingTask, message, sendingActivation);
}
messageCenter.SendMessage(message);
}
catch (Exception ex)
{
OnAddressingFailure(message, sendingActivation, ex);
}
return Task.CompletedTask;
async Task SendMessageAsync(Task addressMessageTask, Message m, IGrainContext activation)
{
try
{
await addressMessageTask;
}
catch (Exception ex)
{
OnAddressingFailure(m, activation, ex);
return;
}
messageCenter.SendMessage(m);
}
void OnAddressingFailure(Message m, IGrainContext activation, Exception ex)
{
this.messagingTrace.OnDispatcherSelectTargetFailed(m, activation, ex);
RejectMessage(m, Message.RejectionTypes.Unrecoverable, ex);
}
}
internal void SendResponse(Message request, Response response)
{
// create the response
var message = this.messageFactory.CreateResponseMessage(request);
message.BodyObject = response;
if (message.TargetGrain.IsSystemTarget())
{
PrepareSystemTargetMessage(message);
}
messageCenter.SendMessage(message);
}
internal void PrepareSystemTargetMessage(Message message)
{
message.Category = message.TargetGrain.Equals(Constants.MembershipServiceType) ?
Message.Categories.Ping : Message.Categories.System;
if (message.TargetSilo == null)
{
message.TargetSilo = _siloAddress;
}
if (message.TargetActivation is null)
{
message.TargetActivation = ActivationId.GetDeterministic(message.TargetGrain);
}
}
public void ReceiveMessage(Message msg)
{
this.messagingTrace.OnIncomingMessageAgentReceiveMessage(msg);
// Find the activation it targets; first check for a system activation, then an app activation
if (msg.TargetGrain.IsSystemTarget())
{
SystemTarget target = this.activationDirectory.FindSystemTarget(msg.TargetActivation);
if (target == null)
{
MessagingStatisticsGroup.OnRejectedMessage(msg);
this.logger.LogWarning(
(int) ErrorCode.MessagingMessageFromUnknownActivation,
"Received a message {Message} for an unknown SystemTarget: {Target}",
msg, msg.TargetAddress);
// Send a rejection only on a request
if (msg.Direction == Message.Directions.Request)
{
var response = this.messageFactory.CreateRejectionResponse(
msg,
Message.RejectionTypes.Unrecoverable,
$"SystemTarget {msg.TargetGrain} not active on this silo. Msg={msg}");
this.messageCenter.SendMessage(response);
}
return;
}
target.ReceiveMessage(msg);
}
else if (messageCenter.TryDeliverToProxy(msg))
{
return;
}
else
{
try
{
var targetActivation = catalog.GetOrCreateActivation(
msg.TargetAddress,
msg.IsNewPlacement,
msg.RequestContextData);
if (targetActivation is null)
{
// Activation does not exists and is not a new placement.
if (msg.Direction == Message.Directions.Response)
{
logger.LogWarning(
(int)ErrorCode.Dispatcher_NoTargetActivation,
"No target activation {Activation} for response message: {Message}",
msg.TargetActivation,
msg);
return;
}
else
{
logger.LogInformation(
(int)ErrorCode.Dispatcher_Intermediate_GetOrCreateActivation,
"Intermediate NonExistentActivation for message {Message}",
msg);
var nonExistentActivation = msg.TargetAddress;
ProcessRequestToInvalidActivation(msg, nonExistentActivation, null, "Non-existent activation");
return;
}
}
targetActivation.ReceiveMessage(msg);
}
catch (Exception ex)
{
MessagingProcessingStatisticsGroup.OnDispatcherMessageProcessedError(msg);
logger.LogError(
(int)ErrorCode.Dispatcher_ErrorCreatingActivation,
ex,
"Error creating activation for grain {TargetGrain} (interface: {InterfaceType}). Message {Message}",
msg.TargetGrain,
msg.InterfaceType,
msg);
this.RejectMessage(msg, Message.RejectionTypes.Transient, ex);
}
}
}
}
}
| |
using YAF.Lucene.Net.Diagnostics;
using YAF.Lucene.Net.Support;
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Text;
/*
* dk.brics.automaton
*
* Copyright (c) 2001-2009 Anders Moeller
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* this SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* this SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
namespace YAF.Lucene.Net.Util.Automaton
{
/// <summary>
/// <see cref="Automaton"/> state.
/// <para/>
/// @lucene.experimental
/// </summary>
public class State : IComparable<State>
{
internal bool accept;
[WritableArray]
[SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
public Transition[] TransitionsArray => transitionsArray;
// LUCENENET NOTE: Setter removed because it is apparently not in use outside of this class
private Transition[] transitionsArray = Arrays.Empty<Transition>();
internal int numTransitions = 0;// LUCENENET NOTE: Made internal because we already have a public property for access
internal int number;
internal int id;
internal static int next_id;
/// <summary>
/// Constructs a new state. Initially, the new state is a reject state.
/// </summary>
public State()
{
//ResetTransitions(); // LUCENENET: Let class initializer set these
id = next_id++;
}
/// <summary>
/// Resets transition set.
/// </summary>
internal void ResetTransitions()
{
transitionsArray = Arrays.Empty<Transition>();
numTransitions = 0;
}
internal class TransitionsEnumerable : IEnumerable<Transition>
{
private readonly State outerInstance;
public TransitionsEnumerable(State outerInstance)
{
this.outerInstance = outerInstance;
}
public virtual IEnumerator<Transition> GetEnumerator()
{
return new TransitionsEnumerator(this);
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private struct TransitionsEnumerator : IEnumerator<Transition>
{
private readonly TransitionsEnumerable outerInstance;
private Transition current;
private int i;
private readonly int upTo;
public TransitionsEnumerator(TransitionsEnumerable outerInstance)
{
this.outerInstance = outerInstance;
upTo = this.outerInstance.outerInstance.numTransitions;
i = 0;
current = default;
}
public bool MoveNext()
{
if (i < upTo)
{
current = outerInstance.outerInstance.transitionsArray[i++];
return true;
}
return false;
}
public Transition Current => current;
object System.Collections.IEnumerator.Current => Current;
public void Reset()
{
throw UnsupportedOperationException.Create();
}
public void Dispose()
{
}
}
}
/// <summary>
/// Returns the set of outgoing transitions. Subsequent changes are reflected
/// in the automaton.
/// </summary>
/// <returns> Transition set. </returns>
public virtual IEnumerable<Transition> GetTransitions()
{
return new TransitionsEnumerable(this);
}
public virtual int NumTransitions => numTransitions;
public virtual void SetTransitions(Transition[] transitions)
{
this.numTransitions = transitions.Length;
this.transitionsArray = transitions;
}
/// <summary>
/// Adds an outgoing transition.
/// </summary>
/// <param name="t"> Transition. </param>
public virtual void AddTransition(Transition t)
{
if (numTransitions == transitionsArray.Length)
{
// LUCENENET: Resize rather than copy
Array.Resize(ref transitionsArray, ArrayUtil.Oversize(1 + numTransitions, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
transitionsArray[numTransitions++] = t;
}
/// <summary>
/// Sets acceptance for this state. If <c>true</c>, this state is an accept state.
/// </summary>
public virtual bool Accept
{
get => accept;
set => this.accept = value;
}
/// <summary>
/// Performs lookup in transitions, assuming determinism.
/// </summary>
/// <param name="c"> Codepoint to look up. </param>
/// <returns> Destination state, <c>null</c> if no matching outgoing transition. </returns>
/// <seealso cref="Step(int, ICollection{State})"/>
public virtual State Step(int c)
{
if (Debugging.AssertsEnabled) Debugging.Assert(c >= 0);
for (int i = 0; i < numTransitions; i++)
{
Transition t = transitionsArray[i];
if (t.min <= c && c <= t.max)
{
return t.to;
}
}
return null;
}
/// <summary>
/// Performs lookup in transitions, allowing nondeterminism.
/// </summary>
/// <param name="c"> Codepoint to look up. </param>
/// <param name="dest"> Collection where destination states are stored. </param>
/// <seealso cref="Step(int)"/>
public virtual void Step(int c, ICollection<State> dest)
{
for (int i = 0; i < numTransitions; i++)
{
Transition t = transitionsArray[i];
if (t.min <= c && c <= t.max)
{
dest.Add(t.to);
}
}
}
/// <summary>
/// Virtually adds an epsilon transition to the target
/// <paramref name="to"/> state. this is implemented by copying all
/// transitions from <paramref name="to"/> to this state, and if
/// <paramref name="to"/> is an accept state then set accept for this state.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal virtual void AddEpsilon(State to)
{
if (to.accept)
{
accept = true;
}
foreach (Transition t in to.GetTransitions())
{
AddTransition(t);
}
}
/// <summary>
/// Downsizes transitionArray to numTransitions. </summary>
public virtual void TrimTransitionsArray()
{
if (numTransitions < transitionsArray.Length)
{
Array.Resize(ref transitionsArray, numTransitions); // LUCENENET: Resize rather than copy
}
}
/// <summary>
/// Reduces this state. A state is "reduced" by combining overlapping
/// and adjacent edge intervals with same destination.
/// </summary>
public virtual void Reduce()
{
if (numTransitions <= 1)
{
return;
}
SortTransitions(Transition.COMPARE_BY_DEST_THEN_MIN_MAX);
State p = null;
int min = -1, max = -1;
int upto = 0;
for (int i = 0; i < numTransitions; i++)
{
Transition t = transitionsArray[i];
if (p == t.to)
{
if (t.min <= max + 1)
{
if (t.max > max)
{
max = t.max;
}
}
else
{
if (p != null)
{
transitionsArray[upto++] = new Transition(min, max, p);
}
min = t.min;
max = t.max;
}
}
else
{
if (p != null)
{
transitionsArray[upto++] = new Transition(min, max, p);
}
p = t.to;
min = t.min;
max = t.max;
}
}
if (p != null)
{
transitionsArray[upto++] = new Transition(min, max, p);
}
numTransitions = upto;
}
/// <summary>
/// Returns sorted list of outgoing transitions.
/// </summary>
/// <param name="comparer"> Comparer to sort with. </param>
/// <returns> Transition list. </returns>
/// <summary>
/// Sorts transitions array in-place. </summary>
public virtual void SortTransitions(IComparer<Transition> comparer)
{
// mergesort seems to perform better on already sorted arrays:
if (numTransitions > 1)
{
ArrayUtil.TimSort(transitionsArray, 0, numTransitions, comparer);
}
}
/// <summary>
/// Return this state's number.
/// <para/>
/// Expert: Will be useless unless <see cref="Automaton.GetNumberedStates()"/>
/// has been called first to number the states. </summary>
/// <returns> The number. </returns>
public virtual int Number => number;
/// <summary>
/// Returns string describing this state. Normally invoked via
/// <see cref="Automaton.ToString()"/>.
/// </summary>
public override string ToString()
{
StringBuilder b = new StringBuilder();
b.Append("state ").Append(number);
if (accept)
{
b.Append(" [accept]");
}
else
{
b.Append(" [reject]");
}
b.Append(":\n");
foreach (Transition t in GetTransitions())
{
b.Append(" ").Append(t.ToString()).Append("\n");
}
return b.ToString();
}
/// <summary>
/// Compares this object with the specified object for order. States are
/// ordered by the time of construction.
/// </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public virtual int CompareTo(State s)
{
return s.id - id;
}
// LUCENENET NOTE: DO NOT IMPLEMENT Equals()!!!
// Although it doesn't match GetHashCode(), checking for
// reference equality is by design.
// Implementing Equals() causes difficult to diagnose
// IndexOutOfRangeExceptions when using FuzzyTermsEnum.
// See GH-296.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override int GetHashCode()
{
return id;
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="DocumentationServerProtocol.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.Services.Protocols {
using System;
using System.Collections;
using System.IO;
using System.Reflection;
using System.Web.Services.Discovery;
using System.Web.UI;
using System.Diagnostics;
using System.Web.Services.Configuration;
using System.Xml.Serialization;
using System.Xml.Schema;
using System.Text;
using System.Net;
using System.Web.Services.Description;
using System.Threading;
using System.Web.Services.Diagnostics;
using System.Security.Permissions;
using System.Collections.Generic;
internal class DocumentationServerType : ServerType {
ServiceDescriptionCollection serviceDescriptions, serviceDescriptionsWithPost;
XmlSchemas schemas, schemasWithPost;
LogicalMethodInfo methodInfo;
public List<Action<Uri>> UriFixups { get; private set; }
void AddUriFixup(Action<Uri> fixup)
{
if (this.UriFixups != null)
{
this.UriFixups.Add(fixup);
}
}
// See comment on the ServerProtocol.IsCacheUnderPressure method for explanation of the excludeSchemeHostPortFromCachingKey logic.
internal DocumentationServerType(Type type, string uri, bool excludeSchemeHostPortFromCachingKey)
: base(typeof(DocumentationServerProtocol))
{
if (excludeSchemeHostPortFromCachingKey)
{
this.UriFixups = new List<Action<Uri>>();
}
//
// parse the uri from a string into a URI object
//
Uri uriObject = new Uri(uri, true);
//
// and get rid of the query string if there's one
//
uri = uriObject.GetLeftPart(UriPartial.Path);
methodInfo = new LogicalMethodInfo(typeof(DocumentationServerProtocol).GetMethod("Documentation", BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic));
ServiceDescriptionReflector reflector = new ServiceDescriptionReflector(this.UriFixups);
reflector.Reflect(type, uri);
schemas = reflector.Schemas;
serviceDescriptions = reflector.ServiceDescriptions;
schemasWithPost = reflector.SchemasWithPost;
serviceDescriptionsWithPost = reflector.ServiceDescriptionsWithPost;
}
internal LogicalMethodInfo MethodInfo {
get { return methodInfo; }
}
internal XmlSchemas Schemas {
get { return schemas; }
}
internal ServiceDescriptionCollection ServiceDescriptions {
get { return serviceDescriptions; }
}
internal ServiceDescriptionCollection ServiceDescriptionsWithPost {
get { return serviceDescriptionsWithPost; }
}
internal XmlSchemas SchemasWithPost {
get { return schemasWithPost; }
}
}
internal class DocumentationServerProtocolFactory : ServerProtocolFactory {
protected override ServerProtocol CreateIfRequestCompatible(HttpRequest request) {
if (request.PathInfo.Length > 0)
return null;
if (request.HttpMethod != "GET")
// MethodNotAllowed = 405,
return new UnsupportedRequestProtocol(405);
return new DocumentationServerProtocol();
}
}
internal sealed class DocumentationServerProtocol : ServerProtocol {
DocumentationServerType serverType;
IHttpHandler handler = null;
object syncRoot = new object();
private const int MAX_PATH_SIZE = 1024;
internal override bool Initialize() {
//
// see if we already cached a DocumentationServerType
//
if (null == (serverType = (DocumentationServerType)GetFromCache(typeof(DocumentationServerProtocol), Type))
&& null == (serverType = (DocumentationServerType)GetFromCache(typeof(DocumentationServerProtocol), Type, true))) {
lock (InternalSyncObject) {
if (null == (serverType = (DocumentationServerType)GetFromCache(typeof(DocumentationServerProtocol), Type))
&& null == (serverType = (DocumentationServerType)GetFromCache(typeof(DocumentationServerProtocol), Type, true)))
{
//
// if not create a new DocumentationServerType and cache it
//
//
bool excludeSchemeHostPortFromCachingKey = this.IsCacheUnderPressure(typeof(DocumentationServerProtocol), Type);
string escapedUri = RuntimeUtils.EscapeUri(Request.Url);
serverType = new DocumentationServerType(Type, escapedUri, excludeSchemeHostPortFromCachingKey);
AddToCache(typeof(DocumentationServerProtocol), Type, serverType, excludeSchemeHostPortFromCachingKey);
}
}
}
WebServicesSection config = WebServicesSection.Current;
if (config.WsdlHelpGenerator.Href != null && config.WsdlHelpGenerator.Href.Length > 0)
{
TraceMethod caller = Tracing.On ? new TraceMethod(this, "Initialize") : null;
if (Tracing.On) Tracing.Enter("ASP.NET", caller, new TraceMethod(typeof(PageParser), "GetCompiledPageInstance", config.WsdlHelpGenerator.HelpGeneratorVirtualPath, config.WsdlHelpGenerator.HelpGeneratorPath, Context));
handler = GetCompiledPageInstance(config.WsdlHelpGenerator.HelpGeneratorVirtualPath,
config.WsdlHelpGenerator.HelpGeneratorPath,
Context);
if (Tracing.On) Tracing.Exit("ASP.NET", caller);
}
return true;
}
// Asserts SecurityPermission and FileIOPermission.
// Justification: Security Permission is demanded by PageParser.GetCompiledPageInstance() method.
// It is used to initialize the IHttpHandler field of the DocumentationServerProtocol object.
// FileIOPermission is required to access the inputFile passed in as a parameter.
// It is used only to map the virtual path to the physical file path. The FileIOPermission is not used to access any file other than the one passed in.
[SecurityPermission(SecurityAction.Assert, Unrestricted = true)]
[FileIOPermissionAttribute(SecurityAction.Assert, Unrestricted = true)]
private IHttpHandler GetCompiledPageInstance(string virtualPath, string inputFile, HttpContext context)
{
return PageParser.GetCompiledPageInstance(virtualPath, inputFile, context);
}
internal override ServerType ServerType {
get { return serverType; }
}
internal override bool IsOneWay {
get { return false; }
}
internal override LogicalMethodInfo MethodInfo {
get { return serverType.MethodInfo; }
}
internal override object[] ReadParameters() {
return new object[0];
}
internal override void WriteReturns(object[] returnValues, Stream outputStream) {
try {
if (handler != null) {
Context.Items.Add("wsdls", serverType.ServiceDescriptions);
Context.Items.Add("schemas", serverType.Schemas);
// conditionally add post-enabled wsdls and schemas to support localhost-only post
if (Context.Request.Url.IsLoopback || Context.Request.IsLocal) {
Context.Items.Add("wsdlsWithPost", serverType.ServiceDescriptionsWithPost);
Context.Items.Add("schemasWithPost", serverType.SchemasWithPost);
}
Context.Items.Add("conformanceWarnings", WebServicesSection.Current.EnabledConformanceWarnings);
Response.ContentType = "text/html";
if (this.serverType.UriFixups == null)
{
handler.ProcessRequest(Context);
}
else
{
lock (this.syncRoot)
{
this.RunUriFixups();
handler.ProcessRequest(Context);
}
}
}
}
catch (Exception e) {
if (e is ThreadAbortException || e is StackOverflowException || e is OutOfMemoryException) {
throw;
}
throw new InvalidOperationException(Res.GetString(Res.HelpGeneratorInternalError), e);
}
}
internal override bool WriteException(Exception e, Stream outputStream) {
return false;
}
internal void Documentation() {
// This is the "server method" that is called for this protocol
}
void RunUriFixups()
{
foreach (Action<Uri> fixup in this.serverType.UriFixups)
{
fixup(this.Context.Request.Url);
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.TimeZoneInfo.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System
{
sealed public partial class TimeZoneInfo : IEquatable<TimeZoneInfo>, System.Runtime.Serialization.ISerializable, System.Runtime.Serialization.IDeserializationCallback
{
#region Methods and constructors
public static void ClearCachedData()
{
}
public static DateTime ConvertTime(DateTime dateTime, TimeZoneInfo sourceTimeZone, TimeZoneInfo destinationTimeZone)
{
return default(DateTime);
}
public static DateTimeOffset ConvertTime(DateTimeOffset dateTimeOffset, TimeZoneInfo destinationTimeZone)
{
return default(DateTimeOffset);
}
public static DateTime ConvertTime(DateTime dateTime, TimeZoneInfo destinationTimeZone)
{
return default(DateTime);
}
public static DateTime ConvertTimeBySystemTimeZoneId(DateTime dateTime, string destinationTimeZoneId)
{
return default(DateTime);
}
public static DateTime ConvertTimeBySystemTimeZoneId(DateTime dateTime, string sourceTimeZoneId, string destinationTimeZoneId)
{
return default(DateTime);
}
public static DateTimeOffset ConvertTimeBySystemTimeZoneId(DateTimeOffset dateTimeOffset, string destinationTimeZoneId)
{
return default(DateTimeOffset);
}
public static DateTime ConvertTimeFromUtc(DateTime dateTime, TimeZoneInfo destinationTimeZone)
{
return default(DateTime);
}
public static DateTime ConvertTimeToUtc(DateTime dateTime, TimeZoneInfo sourceTimeZone)
{
return default(DateTime);
}
public static DateTime ConvertTimeToUtc(DateTime dateTime)
{
return default(DateTime);
}
public static TimeZoneInfo CreateCustomTimeZone(string id, TimeSpan baseUtcOffset, string displayName, string standardDisplayName, string daylightDisplayName, TimeZoneInfo.AdjustmentRule[] adjustmentRules)
{
Contract.Ensures(false);
return default(TimeZoneInfo);
}
public static TimeZoneInfo CreateCustomTimeZone(string id, TimeSpan baseUtcOffset, string displayName, string standardDisplayName)
{
Contract.Ensures(false);
return default(TimeZoneInfo);
}
public static TimeZoneInfo CreateCustomTimeZone(string id, TimeSpan baseUtcOffset, string displayName, string standardDisplayName, string daylightDisplayName, TimeZoneInfo.AdjustmentRule[] adjustmentRules, bool disableDaylightSavingTime)
{
Contract.Ensures(false);
return default(TimeZoneInfo);
}
public bool Equals(TimeZoneInfo other)
{
return default(bool);
}
public static TimeZoneInfo FindSystemTimeZoneById(string id)
{
Contract.Ensures(Contract.Result<System.TimeZoneInfo>() != null);
return default(TimeZoneInfo);
}
public static TimeZoneInfo FromSerializedString(string source)
{
return default(TimeZoneInfo);
}
public TimeZoneInfo.AdjustmentRule[] GetAdjustmentRules()
{
return default(TimeZoneInfo.AdjustmentRule[]);
}
public TimeSpan[] GetAmbiguousTimeOffsets(DateTime dateTime)
{
Contract.Ensures(Contract.Result<System.TimeSpan[]>() != null);
return default(TimeSpan[]);
}
public TimeSpan[] GetAmbiguousTimeOffsets(DateTimeOffset dateTimeOffset)
{
Contract.Ensures(Contract.Result<System.TimeSpan[]>() != null);
return default(TimeSpan[]);
}
public override int GetHashCode()
{
return default(int);
}
public static System.Collections.ObjectModel.ReadOnlyCollection<TimeZoneInfo> GetSystemTimeZones()
{
Contract.Ensures(Contract.Result<System.Collections.ObjectModel.ReadOnlyCollection<System.TimeZoneInfo>>() != null);
return default(System.Collections.ObjectModel.ReadOnlyCollection<TimeZoneInfo>);
}
public TimeSpan GetUtcOffset(DateTimeOffset dateTimeOffset)
{
return default(TimeSpan);
}
public TimeSpan GetUtcOffset(DateTime dateTime)
{
return default(TimeSpan);
}
public bool HasSameRules(TimeZoneInfo other)
{
return default(bool);
}
public bool IsAmbiguousTime(DateTimeOffset dateTimeOffset)
{
return default(bool);
}
public bool IsAmbiguousTime(DateTime dateTime)
{
return default(bool);
}
public bool IsDaylightSavingTime(DateTimeOffset dateTimeOffset)
{
return default(bool);
}
public bool IsDaylightSavingTime(DateTime dateTime)
{
return default(bool);
}
public bool IsInvalidTime(DateTime dateTime)
{
return default(bool);
}
void System.Runtime.Serialization.IDeserializationCallback.OnDeserialization(Object sender)
{
}
void System.Runtime.Serialization.ISerializable.GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
{
}
internal TimeZoneInfo()
{
}
public string ToSerializedString()
{
return default(string);
}
public override string ToString()
{
return default(string);
}
#endregion
#region Properties and indexers
public TimeSpan BaseUtcOffset
{
get
{
return default(TimeSpan);
}
}
public string DaylightName
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return default(string);
}
}
public string DisplayName
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return default(string);
}
}
public string Id
{
get
{
return default(string);
}
}
public static System.TimeZoneInfo Local
{
get
{
Contract.Ensures(Contract.Result<System.TimeZoneInfo>() != null);
return default(System.TimeZoneInfo);
}
}
public string StandardName
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return default(string);
}
}
public bool SupportsDaylightSavingTime
{
get
{
return default(bool);
}
}
public static System.TimeZoneInfo Utc
{
get
{
Contract.Ensures(Contract.Result<System.TimeZoneInfo>() != null);
return default(System.TimeZoneInfo);
}
}
#endregion
}
}
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
namespace _4PosBackOffice.NET
{
[Microsoft.VisualBasic.CompilerServices.DesignerGenerated()]
partial class frmItemGroup
{
#region "Windows Form Designer generated code "
[System.Diagnostics.DebuggerNonUserCode()]
public frmItemGroup() : base()
{
Load += frmItemGroup_Load;
KeyPress += frmItemGroup_KeyPress;
//This call is required by the Windows Form Designer.
InitializeComponent();
}
//Form overrides dispose to clean up the component list.
[System.Diagnostics.DebuggerNonUserCode()]
protected override void Dispose(bool Disposing)
{
if (Disposing) {
if ((components != null)) {
components.Dispose();
}
}
base.Dispose(Disposing);
}
//Required by the Windows Form Designer
private System.ComponentModel.IContainer components;
public System.Windows.Forms.ToolTip ToolTip1;
private System.Windows.Forms.Button withEventsField_cmdExit;
public System.Windows.Forms.Button cmdExit {
get { return withEventsField_cmdExit; }
set {
if (withEventsField_cmdExit != null) {
withEventsField_cmdExit.Click -= cmdExit_Click;
}
withEventsField_cmdExit = value;
if (withEventsField_cmdExit != null) {
withEventsField_cmdExit.Click += cmdExit_Click;
}
}
}
private System.Windows.Forms.Button withEventsField_cmdLoad;
public System.Windows.Forms.Button cmdLoad {
get { return withEventsField_cmdLoad; }
set {
if (withEventsField_cmdLoad != null) {
withEventsField_cmdLoad.Click -= cmdLoad_Click;
}
withEventsField_cmdLoad = value;
if (withEventsField_cmdLoad != null) {
withEventsField_cmdLoad.Click += cmdLoad_Click;
}
}
}
public System.Windows.Forms.RadioButton _optDataType_1;
public System.Windows.Forms.RadioButton _optDataType_0;
private System.Windows.Forms.Button withEventsField_cmdGroup;
public System.Windows.Forms.Button cmdGroup {
get { return withEventsField_cmdGroup; }
set {
if (withEventsField_cmdGroup != null) {
withEventsField_cmdGroup.Click -= cmdGroup_Click;
}
withEventsField_cmdGroup = value;
if (withEventsField_cmdGroup != null) {
withEventsField_cmdGroup.Click += cmdGroup_Click;
}
}
}
private System.Windows.Forms.Button withEventsField_cmdStockItem;
public System.Windows.Forms.Button cmdStockItem {
get { return withEventsField_cmdStockItem; }
set {
if (withEventsField_cmdStockItem != null) {
withEventsField_cmdStockItem.Click -= cmdStockItem_Click;
}
withEventsField_cmdStockItem = value;
if (withEventsField_cmdStockItem != null) {
withEventsField_cmdStockItem.Click += cmdStockItem_Click;
}
}
}
public System.Windows.Forms.Label _lbl_1;
public System.Windows.Forms.Label lblGroup;
public System.Windows.Forms.Label _lbl_0;
public System.Windows.Forms.Label lblItem;
public Microsoft.VisualBasic.PowerPacks.RectangleShape _Shape1_0;
public Microsoft.VisualBasic.PowerPacks.RectangleShape _Shape1_1;
//Public WithEvents lbl As Microsoft.VisualBasic.Compatibility.VB6.LabelArray
//Public WithEvents optDataType As Microsoft.VisualBasic.Compatibility.VB6.RadioButtonArray
public RectangleShapeArray Shape1;
public Microsoft.VisualBasic.PowerPacks.ShapeContainer ShapeContainer1;
//NOTE: The following procedure is required by the Windows Form Designer
//It can be modified using the Windows Form Designer.
//Do not modify it using the code editor.
[System.Diagnostics.DebuggerStepThrough()]
private void InitializeComponent()
{
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(frmItemGroup));
this.components = new System.ComponentModel.Container();
this.ToolTip1 = new System.Windows.Forms.ToolTip(components);
this.ShapeContainer1 = new Microsoft.VisualBasic.PowerPacks.ShapeContainer();
this.cmdExit = new System.Windows.Forms.Button();
this.cmdLoad = new System.Windows.Forms.Button();
this._optDataType_1 = new System.Windows.Forms.RadioButton();
this._optDataType_0 = new System.Windows.Forms.RadioButton();
this.cmdGroup = new System.Windows.Forms.Button();
this.cmdStockItem = new System.Windows.Forms.Button();
this._lbl_1 = new System.Windows.Forms.Label();
this.lblGroup = new System.Windows.Forms.Label();
this._lbl_0 = new System.Windows.Forms.Label();
this.lblItem = new System.Windows.Forms.Label();
this._Shape1_0 = new Microsoft.VisualBasic.PowerPacks.RectangleShape();
this._Shape1_1 = new Microsoft.VisualBasic.PowerPacks.RectangleShape();
//Me.lbl = New Microsoft.VisualBasic.Compatibility.VB6.LabelArray(components)
//Me.optDataType = New Microsoft.VisualBasic.Compatibility.VB6.RadioButtonArray(components)
this.Shape1 = new RectangleShapeArray(components);
this.SuspendLayout();
this.ToolTip1.Active = true;
//CType(Me.lbl, System.ComponentModel.ISupportInitialize).BeginInit()
//CType(Me.optDataType, System.ComponentModel.ISupportInitialize).BeginInit()
((System.ComponentModel.ISupportInitialize)this.Shape1).BeginInit();
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.Text = "Stock Item / Stock Group Compare";
this.ClientSize = new System.Drawing.Size(558, 196);
this.Location = new System.Drawing.Point(3, 22);
this.ControlBox = false;
this.KeyPreview = true;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.SystemColors.Control;
this.Enabled = true;
this.Cursor = System.Windows.Forms.Cursors.Default;
this.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.HelpButton = false;
this.WindowState = System.Windows.Forms.FormWindowState.Normal;
this.Name = "frmItemGroup";
this.cmdExit.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.cmdExit.Text = "E&xit";
this.cmdExit.Size = new System.Drawing.Size(79, 31);
this.cmdExit.Location = new System.Drawing.Point(9, 144);
this.cmdExit.TabIndex = 9;
this.cmdExit.BackColor = System.Drawing.SystemColors.Control;
this.cmdExit.CausesValidation = true;
this.cmdExit.Enabled = true;
this.cmdExit.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdExit.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdExit.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdExit.TabStop = true;
this.cmdExit.Name = "cmdExit";
this.cmdLoad.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.cmdLoad.Text = "&Load report >>";
this.cmdLoad.Size = new System.Drawing.Size(79, 31);
this.cmdLoad.Location = new System.Drawing.Point(465, 144);
this.cmdLoad.TabIndex = 8;
this.cmdLoad.BackColor = System.Drawing.SystemColors.Control;
this.cmdLoad.CausesValidation = true;
this.cmdLoad.Enabled = true;
this.cmdLoad.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdLoad.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdLoad.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdLoad.TabStop = true;
this.cmdLoad.Name = "cmdLoad";
this._optDataType_1.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
this._optDataType_1.Text = "Sales Value";
this._optDataType_1.Size = new System.Drawing.Size(145, 13);
this._optDataType_1.Location = new System.Drawing.Point(315, 162);
this._optDataType_1.TabIndex = 7;
this._optDataType_1.CheckAlign = System.Drawing.ContentAlignment.MiddleLeft;
this._optDataType_1.BackColor = System.Drawing.SystemColors.Control;
this._optDataType_1.CausesValidation = true;
this._optDataType_1.Enabled = true;
this._optDataType_1.ForeColor = System.Drawing.SystemColors.ControlText;
this._optDataType_1.Cursor = System.Windows.Forms.Cursors.Default;
this._optDataType_1.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._optDataType_1.Appearance = System.Windows.Forms.Appearance.Normal;
this._optDataType_1.TabStop = true;
this._optDataType_1.Checked = false;
this._optDataType_1.Visible = true;
this._optDataType_1.Name = "_optDataType_1";
this._optDataType_0.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
this._optDataType_0.Text = "Sales Quantity";
this._optDataType_0.Size = new System.Drawing.Size(145, 13);
this._optDataType_0.Location = new System.Drawing.Point(315, 144);
this._optDataType_0.TabIndex = 6;
this._optDataType_0.Checked = true;
this._optDataType_0.CheckAlign = System.Drawing.ContentAlignment.MiddleLeft;
this._optDataType_0.BackColor = System.Drawing.SystemColors.Control;
this._optDataType_0.CausesValidation = true;
this._optDataType_0.Enabled = true;
this._optDataType_0.ForeColor = System.Drawing.SystemColors.ControlText;
this._optDataType_0.Cursor = System.Windows.Forms.Cursors.Default;
this._optDataType_0.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._optDataType_0.Appearance = System.Windows.Forms.Appearance.Normal;
this._optDataType_0.TabStop = true;
this._optDataType_0.Visible = true;
this._optDataType_0.Name = "_optDataType_0";
this.cmdGroup.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.cmdGroup.Text = "Get a Group >>";
this.cmdGroup.Size = new System.Drawing.Size(97, 31);
this.cmdGroup.Location = new System.Drawing.Point(441, 90);
this.cmdGroup.TabIndex = 3;
this.cmdGroup.BackColor = System.Drawing.SystemColors.Control;
this.cmdGroup.CausesValidation = true;
this.cmdGroup.Enabled = true;
this.cmdGroup.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdGroup.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdGroup.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdGroup.TabStop = true;
this.cmdGroup.Name = "cmdGroup";
this.cmdStockItem.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.cmdStockItem.Text = "Get Stock Item >>";
this.cmdStockItem.Size = new System.Drawing.Size(97, 22);
this.cmdStockItem.Location = new System.Drawing.Point(441, 30);
this.cmdStockItem.TabIndex = 1;
this.cmdStockItem.BackColor = System.Drawing.SystemColors.Control;
this.cmdStockItem.CausesValidation = true;
this.cmdStockItem.Enabled = true;
this.cmdStockItem.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdStockItem.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdStockItem.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdStockItem.TabStop = true;
this.cmdStockItem.Name = "cmdStockItem";
this._lbl_1.Text = "&2. Select a Group";
this._lbl_1.Size = new System.Drawing.Size(101, 13);
this._lbl_1.Location = new System.Drawing.Point(9, 66);
this._lbl_1.TabIndex = 5;
this._lbl_1.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this._lbl_1.BackColor = System.Drawing.Color.Transparent;
this._lbl_1.Enabled = true;
this._lbl_1.ForeColor = System.Drawing.SystemColors.ControlText;
this._lbl_1.Cursor = System.Windows.Forms.Cursors.Default;
this._lbl_1.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._lbl_1.UseMnemonic = true;
this._lbl_1.Visible = true;
this._lbl_1.AutoSize = true;
this._lbl_1.BorderStyle = System.Windows.Forms.BorderStyle.None;
this._lbl_1.Name = "_lbl_1";
this.lblGroup.Text = "lblGroup";
this.lblGroup.Size = new System.Drawing.Size(421, 37);
this.lblGroup.Location = new System.Drawing.Point(15, 87);
this.lblGroup.TabIndex = 4;
this.lblGroup.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblGroup.BackColor = System.Drawing.SystemColors.Control;
this.lblGroup.Enabled = true;
this.lblGroup.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblGroup.Cursor = System.Windows.Forms.Cursors.Default;
this.lblGroup.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblGroup.UseMnemonic = true;
this.lblGroup.Visible = true;
this.lblGroup.AutoSize = false;
this.lblGroup.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.lblGroup.Name = "lblGroup";
this._lbl_0.Text = "&1. Select a Stock Item";
this._lbl_0.Size = new System.Drawing.Size(128, 13);
this._lbl_0.Location = new System.Drawing.Point(9, 9);
this._lbl_0.TabIndex = 2;
this._lbl_0.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this._lbl_0.BackColor = System.Drawing.Color.Transparent;
this._lbl_0.Enabled = true;
this._lbl_0.ForeColor = System.Drawing.SystemColors.ControlText;
this._lbl_0.Cursor = System.Windows.Forms.Cursors.Default;
this._lbl_0.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._lbl_0.UseMnemonic = true;
this._lbl_0.Visible = true;
this._lbl_0.AutoSize = true;
this._lbl_0.BorderStyle = System.Windows.Forms.BorderStyle.None;
this._lbl_0.Name = "_lbl_0";
this.lblItem.Size = new System.Drawing.Size(421, 19);
this.lblItem.Location = new System.Drawing.Point(15, 30);
this.lblItem.TabIndex = 0;
this.lblItem.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblItem.BackColor = System.Drawing.SystemColors.Control;
this.lblItem.Enabled = true;
this.lblItem.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblItem.Cursor = System.Windows.Forms.Cursors.Default;
this.lblItem.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblItem.UseMnemonic = true;
this.lblItem.Visible = true;
this.lblItem.AutoSize = false;
this.lblItem.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.lblItem.Name = "lblItem";
this._Shape1_0.BackColor = System.Drawing.Color.FromArgb(192, 192, 255);
this._Shape1_0.BackStyle = Microsoft.VisualBasic.PowerPacks.BackStyle.Opaque;
this._Shape1_0.Size = new System.Drawing.Size(535, 34);
this._Shape1_0.Location = new System.Drawing.Point(9, 24);
this._Shape1_0.BorderColor = System.Drawing.SystemColors.WindowText;
this._Shape1_0.BorderStyle = System.Drawing.Drawing2D.DashStyle.Solid;
this._Shape1_0.BorderWidth = 1;
this._Shape1_0.FillColor = System.Drawing.Color.Black;
this._Shape1_0.FillStyle = Microsoft.VisualBasic.PowerPacks.FillStyle.Transparent;
this._Shape1_0.Visible = true;
this._Shape1_0.Name = "_Shape1_0";
this._Shape1_1.BackColor = System.Drawing.Color.FromArgb(192, 192, 255);
this._Shape1_1.BackStyle = Microsoft.VisualBasic.PowerPacks.BackStyle.Opaque;
this._Shape1_1.Size = new System.Drawing.Size(535, 52);
this._Shape1_1.Location = new System.Drawing.Point(9, 81);
this._Shape1_1.BorderColor = System.Drawing.SystemColors.WindowText;
this._Shape1_1.BorderStyle = System.Drawing.Drawing2D.DashStyle.Solid;
this._Shape1_1.BorderWidth = 1;
this._Shape1_1.FillColor = System.Drawing.Color.Black;
this._Shape1_1.FillStyle = Microsoft.VisualBasic.PowerPacks.FillStyle.Transparent;
this._Shape1_1.Visible = true;
this._Shape1_1.Name = "_Shape1_1";
this.Controls.Add(cmdExit);
this.Controls.Add(cmdLoad);
this.Controls.Add(_optDataType_1);
this.Controls.Add(_optDataType_0);
this.Controls.Add(cmdGroup);
this.Controls.Add(cmdStockItem);
this.Controls.Add(_lbl_1);
this.Controls.Add(lblGroup);
this.Controls.Add(_lbl_0);
this.Controls.Add(lblItem);
this.ShapeContainer1.Shapes.Add(_Shape1_0);
this.ShapeContainer1.Shapes.Add(_Shape1_1);
this.Controls.Add(ShapeContainer1);
//Me.lbl.SetIndex(_lbl_1, CType(1, Short))
//Me.lbl.SetIndex(_lbl_0, CType(0, Short))
//Me.optDataType.SetIndex(_optDataType_1, CType(1, Short))
//Me.optDataType.SetIndex(_optDataType_0, CType(0, Short))
this.Shape1.SetIndex(_Shape1_0, Convert.ToInt16(0));
this.Shape1.SetIndex(_Shape1_1, Convert.ToInt16(1));
((System.ComponentModel.ISupportInitialize)this.Shape1).EndInit();
//CType(Me.optDataType, System.ComponentModel.ISupportInitialize).EndInit()
//CType(Me.lbl, System.ComponentModel.ISupportInitialize).EndInit()
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.DirectoryServices
{
using System;
using System.Runtime.InteropServices;
using System.Collections;
using System.Diagnostics;
using System.DirectoryServices.Interop;
using System.Security.Permissions;
using System.Globalization;
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection"]/*' />
/// <devdoc>
/// <para>Contains the properties on a <see cref='System.DirectoryServices.DirectoryEntry'/>.</para>
/// </devdoc>
public class PropertyCollection : IDictionary
{
private DirectoryEntry _entry;
internal Hashtable valueTable = null;
internal PropertyCollection(DirectoryEntry entry)
{
_entry = entry;
Hashtable tempTable = new Hashtable();
valueTable = Hashtable.Synchronized(tempTable);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.this"]/*' />
/// <devdoc>
/// <para>Gets the property with the given name.</para>
/// </devdoc>
public PropertyValueCollection this[string propertyName]
{
get
{
if (propertyName == null)
throw new ArgumentNullException("propertyName");
string name = propertyName.ToLower(CultureInfo.InvariantCulture);
if (valueTable.Contains(name))
return (PropertyValueCollection)valueTable[name];
else
{
PropertyValueCollection value = new PropertyValueCollection(_entry, propertyName);
valueTable.Add(name, value);
return value;
}
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.Count"]/*' />
/// <devdoc>
/// <para>Gets the number of properties available on this entry.</para>
/// </devdoc>
public int Count
{
get
{
if (!(_entry.AdsObject is UnsafeNativeMethods.IAdsPropertyList))
throw new NotSupportedException(SR.DSCannotCount);
_entry.FillCache("");
UnsafeNativeMethods.IAdsPropertyList propList = (UnsafeNativeMethods.IAdsPropertyList)_entry.AdsObject;
return propList.PropertyCount;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyNames"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public ICollection PropertyNames
{
get
{
return new KeysCollection(this);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.Values"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public ICollection Values
{
get
{
return new ValuesCollection(this);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.Contains"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public bool Contains(string propertyName)
{
//entry.FillCache(propertyName);
object var;
int unmanagedResult = _entry.AdsObject.GetEx(propertyName, out var);
if (unmanagedResult != 0)
{
// property not found (IIS provider returns 0x80005006, other provides return 0x8000500D).
if ((unmanagedResult == unchecked((int)0x8000500D)) || (unmanagedResult == unchecked((int)0x80005006)))
{
return false;
}
else
{
throw COMExceptionHelper.CreateFormattedComException(unmanagedResult);
}
}
return true;
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.CopyTo"]/*' />
/// <devdoc>
/// <para>Copies the elements of this instance into an <see cref='System.Array'/>, starting at a particular index into the array.</para>
/// </devdoc>
public void CopyTo(PropertyValueCollection[] array, int index)
{
((ICollection)this).CopyTo((Array)array, index);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.GetEnumerator"]/*' />
/// <devdoc>
/// <para>Returns an enumerator, which can be used to iterate through the collection.</para>
/// </devdoc>
public IDictionaryEnumerator GetEnumerator()
{
if (!(_entry.AdsObject is UnsafeNativeMethods.IAdsPropertyList))
throw new NotSupportedException(SR.DSCannotEmunerate);
// Once an object has been used for an enumerator once, it can't be used again, because it only
// maintains a single cursor. Re-bind to the ADSI object to get a new instance.
// That's why we must clone entry here. It will be automatically disposed inside Enumerator.
DirectoryEntry entryToUse = _entry.CloneBrowsable();
entryToUse.FillCache("");
UnsafeNativeMethods.IAdsPropertyList propList = (UnsafeNativeMethods.IAdsPropertyList)entryToUse.AdsObject;
entryToUse.propertiesAlreadyEnumerated = true;
return new PropertyEnumerator(_entry, entryToUse);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.this"]/*' />
///<internalonly/>
object IDictionary.this[object key]
{
get
{
return this[(string)key];
}
set
{
throw new NotSupportedException(SR.DSPropertySetSupported);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.IsFixedSize"]/*' />
///<internalonly/>
bool IDictionary.IsFixedSize
{
get
{
return true;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.IsReadOnly"]/*' />
///<internalonly/>
bool IDictionary.IsReadOnly
{
get
{
return true;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.Keys"]/*' />
///<internalonly/>
ICollection IDictionary.Keys
{
get
{
return new KeysCollection(this);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.Add"]/*' />
///<internalonly/>
void IDictionary.Add(object key, object value)
{
throw new NotSupportedException(SR.DSAddNotSupported);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.Clear"]/*' />
///<internalonly/>
void IDictionary.Clear()
{
throw new NotSupportedException(SR.DSClearNotSupported);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.Contains"]/*' />
///<internalonly/>
bool IDictionary.Contains(object value)
{
return this.Contains((string)value);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IDictionary.Remove"]/*' />
///<internalonly/>
void IDictionary.Remove(object key)
{
throw new NotSupportedException(SR.DSRemoveNotSupported);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.IEnumerable.GetEnumerator"]/*' />
///<internalonly/>
IEnumerator IEnumerable.GetEnumerator()
{
return (IEnumerator)GetEnumerator();
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ICollection.IsSynchronized"]/*' />
///<internalonly/>
bool ICollection.IsSynchronized
{
get
{
return false;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ICollection.SyncRoot"]/*' />
///<internalonly/>
object ICollection.SyncRoot
{
get
{
return this;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ICollection.CopyTo"]/*' />
///<internalonly/>
void ICollection.CopyTo(Array array, Int32 index)
{
if (array == null)
throw new ArgumentNullException("array");
if (array.Rank != 1)
throw new ArgumentException(SR.OnlyAllowSingleDimension, "array");
if (index < 0)
throw new ArgumentOutOfRangeException(SR.LessThanZero, "index");
if (((index + Count) > array.Length) || ((index + Count) < index))
throw new ArgumentException(SR.DestinationArrayNotLargeEnough);
foreach (PropertyValueCollection value in this)
{
array.SetValue(value, index);
index++;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator"]/*' />
///<internalonly/>
private class PropertyEnumerator : IDictionaryEnumerator, IDisposable
{
private DirectoryEntry _entry; // clone (to be disposed)
private DirectoryEntry _parentEntry; // original entry to pass to PropertyValueCollection
private string _currentPropName = null;
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.PropertyEnumerator"]/*' />
///<internalonly/>
public PropertyEnumerator(DirectoryEntry parent, DirectoryEntry clone)
{
_entry = clone;
_parentEntry = parent;
}
~PropertyEnumerator()
{
Dispose(true); // finalizer is called => Dispose has not been called yet.
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyEnumerator.Dispose"]/*' />
/// <devdoc>
/// </devdoc>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyEnumerator.Dispose1"]/*' />
/// <devdoc>
/// </devdoc>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_entry.Dispose();
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.Current"]/*' />
///<internalonly/>
public object Current
{
get
{
return Entry.Value;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.Entry"]/*' />
///<internalonly/>
public DictionaryEntry Entry
{
get
{
if (_currentPropName == null)
throw new InvalidOperationException(SR.DSNoCurrentProperty);
return new DictionaryEntry(_currentPropName, new PropertyValueCollection(_parentEntry, _currentPropName));
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.Key"]/*' />
///<internalonly/>
public object Key
{
get
{
return Entry.Key;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.Value"]/*' />
///<internalonly/>
public object Value
{
get
{
return Entry.Value;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.MoveNext"]/*' />
///<internalonly/>
public bool MoveNext()
{
object prop;
int hr = 0;
try
{
hr = ((UnsafeNativeMethods.IAdsPropertyList)_entry.AdsObject).Next(out prop);
}
catch (COMException e)
{
hr = e.ErrorCode;
prop = null;
}
if (hr == 0)
{
if (prop != null)
_currentPropName = ((UnsafeNativeMethods.IAdsPropertyEntry)prop).Name;
else
_currentPropName = null;
return true;
}
else
{
_currentPropName = null;
return false;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.PropertyEnumerator.Reset"]/*' />
///<internalonly/>
public void Reset()
{
((UnsafeNativeMethods.IAdsPropertyList)_entry.AdsObject).Reset();
_currentPropName = null;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection"]/*' />
///<internalonly/>
private class ValuesCollection : ICollection
{
protected PropertyCollection props;
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.ValuesCollection"]/*' />
///<internalonly/>
public ValuesCollection(PropertyCollection props)
{
this.props = props;
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.Count"]/*' />
///<internalonly/>
public int Count
{
get
{
return props.Count;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.IsReadOnly"]/*' />
///<internalonly/>
public bool IsReadOnly
{
get
{
return true;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.IsSynchronized"]/*' />
///<internalonly/>
public bool IsSynchronized
{
get
{
return false;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.SyncRoot"]/*' />
///<internalonly/>
public object SyncRoot
{
get
{
return ((ICollection)props).SyncRoot;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.CopyTo"]/*' />
///<internalonly/>
public void CopyTo(Array array, int index)
{
foreach (object value in this)
array.SetValue(value, index++);
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesCollection.GetEnumerator"]/*' />
///<internalonly/>
public virtual IEnumerator GetEnumerator()
{
return new ValuesEnumerator(props);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysCollection"]/*' />
///<internalonly/>
private class KeysCollection : ValuesCollection
{
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysCollection.KeysCollection"]/*' />
///<internalonly/>
public KeysCollection(PropertyCollection props)
: base(props)
{
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysCollection.GetEnumerator"]/*' />
///<internalonly/>
public override IEnumerator GetEnumerator()
{
props._entry.FillCache("");
return new KeysEnumerator(props);
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator"]/*' />
///<internalonly/>
private class ValuesEnumerator : IEnumerator
{
private int _currentIndex = -1;
protected PropertyCollection propCollection;
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator.ValuesEnumerator"]/*' />
///<internalonly/>
public ValuesEnumerator(PropertyCollection propCollection)
{
this.propCollection = propCollection;
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator.CurrentIndex"]/*' />
///<internalonly/>
protected int CurrentIndex
{
get
{
if (_currentIndex == -1)
throw new InvalidOperationException(SR.DSNoCurrentValue);
return _currentIndex;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator.Current"]/*' />
///<internalonly/>
public virtual object Current
{
get
{
UnsafeNativeMethods.IAdsPropertyList propList = (UnsafeNativeMethods.IAdsPropertyList)propCollection._entry.AdsObject;
return propCollection[((UnsafeNativeMethods.IAdsPropertyEntry)propList.Item(CurrentIndex)).Name];
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator.MoveNext"]/*' />
///<internalonly/>
public bool MoveNext()
{
_currentIndex++;
if (_currentIndex >= propCollection.Count)
{
_currentIndex = -1;
return false;
}
else
return true;
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.ValuesEnumerator.Reset"]/*' />
///<internalonly/>
public void Reset()
{
_currentIndex = -1;
}
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysEnumerator"]/*' />
///<internalonly/>
private class KeysEnumerator : ValuesEnumerator
{
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysEnumerator.KeysEnumerator"]/*' />
///<internalonly/>
public KeysEnumerator(PropertyCollection collection)
: base(collection)
{
}
/// <include file='doc\PropertyCollection.uex' path='docs/doc[@for="PropertyCollection.KeysEnumerator.Current"]/*' />
///<internalonly/>
public override object Current
{
get
{
UnsafeNativeMethods.IAdsPropertyList propList = (UnsafeNativeMethods.IAdsPropertyList)propCollection._entry.AdsObject;
return ((UnsafeNativeMethods.IAdsPropertyEntry)propList.Item(CurrentIndex)).Name;
}
}
}
}
}
| |
namespace Azure.Security.Attestation
{
public partial class AttestationAdministrationClient
{
protected AttestationAdministrationClient() { }
public AttestationAdministrationClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { }
public AttestationAdministrationClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.Security.Attestation.AttestationClientOptions options) { }
public System.Uri Endpoint { get { throw null; } }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesModificationResult> AddPolicyManagementCertificate(Azure.Security.Attestation.SecuredAttestationToken certificateToAdd, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesModificationResult>> AddPolicyManagementCertificateAsync(Azure.Security.Attestation.SecuredAttestationToken certificateToAdd, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.StoredAttestationPolicy> GetPolicy(Azure.Security.Attestation.Models.AttestationType attestationType, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.StoredAttestationPolicy>> GetPolicyAsync(Azure.Security.Attestation.Models.AttestationType attestationType, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesResult> GetPolicyManagementCertificates(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesResult>> GetPolicyManagementCertificatesAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesModificationResult> RemovePolicyManagementCertificate(Azure.Security.Attestation.SecuredAttestationToken certificateToAdd, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyCertificatesModificationResult>> RemovePolicyManagementCertificateAsync(Azure.Security.Attestation.SecuredAttestationToken certificateToAdd, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyResult> ResetPolicy(Azure.Security.Attestation.Models.AttestationType attestationType, Azure.Security.Attestation.AttestationToken authorizationToken = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyResult>> ResetPolicyAsync(Azure.Security.Attestation.Models.AttestationType attestationType, Azure.Security.Attestation.AttestationToken authorizationToken = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyResult> SetPolicy(Azure.Security.Attestation.Models.AttestationType attestationType, Azure.Security.Attestation.AttestationToken policyToSet, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.PolicyResult>> SetPolicyAsync(Azure.Security.Attestation.Models.AttestationType attestationType, Azure.Security.Attestation.AttestationToken policyToSet, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class AttestationClient
{
protected AttestationClient() { }
public AttestationClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { }
public AttestationClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.Security.Attestation.AttestationClientOptions options) { }
public System.Uri Endpoint { get { throw null; } }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.AttestationResult> AttestOpenEnclave(System.ReadOnlyMemory<byte> report, System.BinaryData initTimeData, bool initTimeDataIsObject, System.BinaryData runTimeData, bool runTimeDataIsObject, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.AttestationResult>> AttestOpenEnclaveAsync(System.ReadOnlyMemory<byte> report, System.BinaryData initTimeData, bool initTimeDataIsObject, System.BinaryData runTimeData, bool runTimeDataIsObject, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.AttestationResult> AttestSgxEnclave(System.ReadOnlyMemory<byte> quote, System.BinaryData initTimeData, bool initTimeDataIsObject, System.BinaryData runTimeData, bool runTimeDataIsObject, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Security.Attestation.AttestationResponse<Azure.Security.Attestation.Models.AttestationResult>> AttestSgxEnclaveAsync(System.ReadOnlyMemory<byte> quote, System.BinaryData initTimeData, bool initTimeDataIsObject, System.BinaryData runTimeData, bool runTimeDataIsObject, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<System.BinaryData> AttestTpm(System.BinaryData request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<System.BinaryData>> AttestTpmAsync(System.BinaryData request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Security.Attestation.Models.AttestationSigner>> GetSigningCertificates(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Security.Attestation.Models.AttestationSigner>>> GetSigningCertificatesAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class AttestationClientOptions : Azure.Core.ClientOptions
{
public AttestationClientOptions(Azure.Security.Attestation.AttestationClientOptions.ServiceVersion version = Azure.Security.Attestation.AttestationClientOptions.ServiceVersion.V2020_10_01, System.Func<Azure.Security.Attestation.AttestationToken, Azure.Security.Attestation.Models.AttestationSigner, bool> validationCallback = null, bool validateAttestationTokens = true) { }
public System.Func<Azure.Security.Attestation.AttestationToken, Azure.Security.Attestation.Models.AttestationSigner, bool> ValidationCallback { get { throw null; } }
public enum ServiceVersion
{
V2020_10_01 = 1,
}
}
public partial class AttestationResponse<T> : Azure.Response<T> where T : class
{
internal AttestationResponse() { }
public Azure.Security.Attestation.AttestationToken Token { get { throw null; } }
public override T Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
}
public partial class AttestationToken
{
protected AttestationToken() { }
public string CertificateThumbprint { get { throw null; } }
public System.DateTimeOffset ExpirationTime { get { throw null; } }
public System.DateTimeOffset IssuedAtTime { get { throw null; } }
public System.DateTimeOffset NotBeforeTime { get { throw null; } }
public string TokenBody { get { throw null; } }
public System.ReadOnlyMemory<byte> TokenBodyBytes { get { throw null; } }
public string TokenHeader { get { throw null; } }
public System.ReadOnlyMemory<byte> TokenHeaderBytes { get { throw null; } }
public System.ReadOnlyMemory<byte> TokenSignatureBytes { get { throw null; } }
public T GetBody<T>() where T : class { throw null; }
public override string ToString() { throw null; }
public virtual bool ValidateToken(System.Collections.Generic.IReadOnlyList<Azure.Security.Attestation.Models.AttestationSigner> attestationSigningCertificates, System.Func<Azure.Security.Attestation.AttestationToken, Azure.Security.Attestation.Models.AttestationSigner, bool> validationCallback = null) { throw null; }
}
public partial class SecuredAttestationToken : Azure.Security.Attestation.AttestationToken
{
public SecuredAttestationToken(object body, System.Security.Cryptography.AsymmetricAlgorithm signingKey, System.Security.Cryptography.X509Certificates.X509Certificate2 signingCertificate) { }
public SecuredAttestationToken(object body, System.Security.Cryptography.X509Certificates.X509Certificate2 signingCertificate) { }
public SecuredAttestationToken(System.Security.Cryptography.AsymmetricAlgorithm signingKey, System.Security.Cryptography.X509Certificates.X509Certificate2 signingCertificate) { }
public SecuredAttestationToken(System.Security.Cryptography.X509Certificates.X509Certificate2 signingCertificate) { }
}
public partial class UnsecuredAttestationToken : Azure.Security.Attestation.AttestationToken
{
public UnsecuredAttestationToken() { }
public UnsecuredAttestationToken(object body) { }
}
}
namespace Azure.Security.Attestation.Models
{
public partial class AttestationResult
{
internal AttestationResult() { }
public object Confirmation { get { throw null; } }
public byte[] DeprecatedEnclaveHeldData { get { throw null; } }
public byte[] DeprecatedEnclaveHeldData2 { get { throw null; } }
public bool? DeprecatedIsDebuggable { get { throw null; } }
public string DeprecatedMrEnclave { get { throw null; } }
public string DeprecatedMrSigner { get { throw null; } }
public byte[] DeprecatedPolicyHash { get { throw null; } }
public float? DeprecatedProductId { get { throw null; } }
public string DeprecatedRpData { get { throw null; } }
public object DeprecatedSgxCollateral { get { throw null; } }
public float? DeprecatedSvn { get { throw null; } }
public string DeprecatedTee { get { throw null; } }
public string DeprecatedVersion { get { throw null; } }
public byte[] EnclaveHeldData { get { throw null; } }
public System.DateTimeOffset Expiration { get { throw null; } }
public object InittimeClaims { get { throw null; } }
public bool? IsDebuggable { get { throw null; } }
public System.DateTimeOffset IssuedAt { get { throw null; } }
public System.Uri Issuer { get { throw null; } }
public string MrEnclave { get { throw null; } }
public string MrSigner { get { throw null; } }
public string Nonce { get { throw null; } }
public System.DateTimeOffset NotBefore { get { throw null; } }
public object PolicyClaims { get { throw null; } }
public byte[] PolicyHash { get { throw null; } }
public float? ProductId { get { throw null; } }
public object RuntimeClaims { get { throw null; } }
public object SgxCollateral { get { throw null; } }
public float? Svn { get { throw null; } }
public string UniqueIdentifier { get { throw null; } }
public string VerifierType { get { throw null; } }
public string Version { get { throw null; } }
}
public partial class AttestationSigner
{
public AttestationSigner(System.Security.Cryptography.X509Certificates.X509Certificate2[] signingCertificates, string certificateKeyId) { }
public string CertificateKeyId { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<System.Security.Cryptography.X509Certificates.X509Certificate2> SigningCertificates { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct AttestationType : System.IEquatable<Azure.Security.Attestation.Models.AttestationType>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public AttestationType(string value) { throw null; }
public static Azure.Security.Attestation.Models.AttestationType OpenEnclave { get { throw null; } }
public static Azure.Security.Attestation.Models.AttestationType SgxEnclave { get { throw null; } }
public static Azure.Security.Attestation.Models.AttestationType Tpm { get { throw null; } }
public bool Equals(Azure.Security.Attestation.Models.AttestationType other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Security.Attestation.Models.AttestationType left, Azure.Security.Attestation.Models.AttestationType right) { throw null; }
public static implicit operator Azure.Security.Attestation.Models.AttestationType (string value) { throw null; }
public static bool operator !=(Azure.Security.Attestation.Models.AttestationType left, Azure.Security.Attestation.Models.AttestationType right) { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct CertificateModification : System.IEquatable<Azure.Security.Attestation.Models.CertificateModification>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public CertificateModification(string value) { throw null; }
public static Azure.Security.Attestation.Models.CertificateModification IsAbsent { get { throw null; } }
public static Azure.Security.Attestation.Models.CertificateModification IsPresent { get { throw null; } }
public bool Equals(Azure.Security.Attestation.Models.CertificateModification other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Security.Attestation.Models.CertificateModification left, Azure.Security.Attestation.Models.CertificateModification right) { throw null; }
public static implicit operator Azure.Security.Attestation.Models.CertificateModification (string value) { throw null; }
public static bool operator !=(Azure.Security.Attestation.Models.CertificateModification left, Azure.Security.Attestation.Models.CertificateModification right) { throw null; }
public override string ToString() { throw null; }
}
public partial class PolicyCertificateModification
{
public PolicyCertificateModification(System.Security.Cryptography.X509Certificates.X509Certificate2 bodyCertificate) { }
public System.Security.Cryptography.X509Certificates.X509Certificate2 PolicyCertificate { get { throw null; } }
}
public partial class PolicyCertificatesModificationResult
{
internal PolicyCertificatesModificationResult() { }
public Azure.Security.Attestation.Models.CertificateModification? CertificateResolution { get { throw null; } }
public string CertificateThumbprint { get { throw null; } }
}
public partial class PolicyCertificatesResult
{
public PolicyCertificatesResult() { }
public System.Collections.Generic.IReadOnlyList<System.Security.Cryptography.X509Certificates.X509Certificate2> GetPolicyCertificates() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct PolicyModification : System.IEquatable<Azure.Security.Attestation.Models.PolicyModification>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public PolicyModification(string value) { throw null; }
public static Azure.Security.Attestation.Models.PolicyModification Removed { get { throw null; } }
public static Azure.Security.Attestation.Models.PolicyModification Updated { get { throw null; } }
public bool Equals(Azure.Security.Attestation.Models.PolicyModification other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Security.Attestation.Models.PolicyModification left, Azure.Security.Attestation.Models.PolicyModification right) { throw null; }
public static implicit operator Azure.Security.Attestation.Models.PolicyModification (string value) { throw null; }
public static bool operator !=(Azure.Security.Attestation.Models.PolicyModification left, Azure.Security.Attestation.Models.PolicyModification right) { throw null; }
public override string ToString() { throw null; }
}
public partial class PolicyResult
{
public PolicyResult() { }
public Azure.Security.Attestation.Models.PolicyModification PolicyResolution { get { throw null; } }
public Azure.Security.Attestation.Models.AttestationSigner PolicySigner { get { throw null; } }
public byte[] PolicyTokenHash { get { throw null; } }
}
public partial class StoredAttestationPolicy
{
public StoredAttestationPolicy() { }
public string AttestationPolicy { get { throw null; } set { } }
}
public partial class TpmAttestationRequest
{
public TpmAttestationRequest() { }
public System.ReadOnlyMemory<byte> Data { get { throw null; } set { } }
}
public partial class TpmAttestationResponse
{
internal TpmAttestationResponse() { }
public System.ReadOnlyMemory<byte> Data { get { throw null; } }
}
}
| |
namespace Loon.Action
{
using System;
using System.Collections.Generic;
using Loon.Action.Map;
using Loon.Core.Geom;
using Loon.Utils;
using Loon.Core.Graphics.Component;
using Loon.Utils.Debugging;
public class MoveTo : ActionEvent
{
private static readonly Dictionary<Int32, List<Vector2f>> pathCache = new Dictionary<Int32, List<Vector2f>>(
Loon.Core.LSystem.DEFAULT_MAX_CACHE_SIZE);
private Vector2f startLocation, endLocation;
private Field2D layerMap;
private bool flag, useCache, synchroLayerField;
private List<Vector2f> pActorPath;
private int startX, startY, endX, endY, moveX, moveY;
private int direction, speed;
private AStarFindHeuristic heuristic;
private Vector2f pLocation;
public MoveTo(Field2D map, int x, int y, bool flag_0)
{
this.pLocation = new Vector2f();
this.startLocation = new Vector2f();
this.endLocation = new Vector2f(x, y);
this.layerMap = map;
this.flag = flag_0;
this.speed = 4;
this.useCache = true;
this.synchroLayerField = false;
}
public MoveTo(Field2D map, Vector2f pos, bool flag_0)
: this(map, pos.X(), pos.Y(), flag_0)
{
}
public void RandomPathFinder()
{
lock (typeof(MoveTo))
{
AStarFindHeuristic afh = null;
int index = Loon.Utils.MathUtils.Random(AStarFindHeuristic.MANHATTAN,
AStarFindHeuristic.CLOSEST_SQUARED);
switch (index)
{
case AStarFindHeuristic.MANHATTAN:
afh = Loon.Action.Map.AStarFinder.ASTAR_EUCLIDEAN;
break;
case AStarFindHeuristic.MIXING:
afh = Loon.Action.Map.AStarFinder.ASTAR_MIXING;
break;
case AStarFindHeuristic.DIAGONAL:
afh = Loon.Action.Map.AStarFinder.ASTAR_DIAGONAL;
break;
case AStarFindHeuristic.DIAGONAL_SHORT:
afh = Loon.Action.Map.AStarFinder.ASTAR_DIAGONAL_SHORT;
break;
case AStarFindHeuristic.EUCLIDEAN:
afh = Loon.Action.Map.AStarFinder.ASTAR_EUCLIDEAN;
break;
case AStarFindHeuristic.EUCLIDEAN_NOSQR:
afh = Loon.Action.Map.AStarFinder.ASTAR_EUCLIDEAN_NOSQR;
break;
case AStarFindHeuristic.CLOSEST:
afh = Loon.Action.Map.AStarFinder.ASTAR_CLOSEST;
break;
case AStarFindHeuristic.CLOSEST_SQUARED:
afh = Loon.Action.Map.AStarFinder.ASTAR_CLOSEST_SQUARED;
break;
}
SetHeuristic(afh);
}
}
public float[] GetBeginPath()
{
return new float[] { startX, startY };
}
public float[] GetEndPath()
{
return new float[] { endX, endY };
}
public override void OnLoad()
{
if (layerMap == null || original == null)
{
return;
}
if (!original.GetRectBox().Contains(endLocation.X(), endLocation.Y()))
{
if (useCache)
{
lock (pathCache)
{
if (pathCache.Count > Loon.Core.LSystem.DEFAULT_MAX_CACHE_SIZE * 10)
{
pathCache.Clear();
}
Int32 key = GetHashCode();
List<Vector2f> final_path = (List<Vector2f>)CollectionUtils.Get(pathCache, key);
if (final_path == null)
{
final_path = AStarFinder.Find(heuristic,
layerMap,
layerMap.PixelsToTilesWidth(startLocation
.X()),
layerMap.PixelsToTilesHeight(startLocation
.Y()),
layerMap.PixelsToTilesWidth(endLocation
.X()),
layerMap.PixelsToTilesHeight(endLocation
.Y()), flag);
CollectionUtils.Put(pathCache, key, final_path);
}
pActorPath = new List<Vector2f>();
CollectionUtils.AddAll(final_path, pActorPath);
}
}
else
{
pActorPath = Loon.Action.Map.AStarFinder.Find(heuristic, layerMap,
layerMap.PixelsToTilesWidth(startLocation.X()),
layerMap.PixelsToTilesHeight(startLocation.Y()),
layerMap.PixelsToTilesWidth(endLocation.X()),
layerMap.PixelsToTilesHeight(endLocation.Y()), flag);
}
}
}
public void ClearPath()
{
if (pActorPath != null)
{
lock (pActorPath)
{
CollectionUtils.Clear(pActorPath);
pActorPath = null;
}
}
}
public static void ClearPathCache()
{
if (pathCache != null)
{
lock (pathCache)
{
pathCache.Clear();
}
}
}
public override int GetHashCode()
{
if (layerMap == null || original == null)
{
return base.GetHashCode();
}
int hashCode = 1;
hashCode = Loon.Core.LSystem.Unite(hashCode, flag);
hashCode = Loon.Core.LSystem.Unite(hashCode,
layerMap.PixelsToTilesWidth(original.X()));
hashCode = Loon.Core.LSystem.Unite(hashCode,
layerMap.PixelsToTilesHeight(original.Y()));
hashCode = Loon.Core.LSystem.Unite(hashCode,
layerMap.PixelsToTilesWidth(endLocation.X()));
hashCode = Loon.Core.LSystem.Unite(hashCode,
layerMap.PixelsToTilesHeight(endLocation.Y()));
hashCode = Loon.Core.LSystem.Unite(hashCode, layerMap.GetWidth());
hashCode = Loon.Core.LSystem.Unite(hashCode, layerMap.GetHeight());
hashCode = Loon.Core.LSystem.Unite(hashCode, layerMap.GetTileWidth());
hashCode = Loon.Core.LSystem.Unite(hashCode, layerMap.GetTileHeight());
hashCode = Loon.Core.LSystem.Unite(hashCode,
Loon.Utils.CollectionUtils.HashCode(layerMap.GetMap()));
return hashCode;
}
public override void Start(ActionBind target)
{
base.Start(target);
startLocation.Set(target.GetX(), target.GetY());
}
public List<Vector2f> GetPath()
{
return pActorPath;
}
public int GetDirection()
{
return direction;
}
public void SetField2D(Field2D field)
{
if (field != null)
{
this.layerMap = field;
}
}
public Field2D GetField2D()
{
return layerMap;
}
public override void Update(long elapsedTime)
{
if (layerMap == null || original == null || pActorPath == null)
{
return;
}
lock (pActorPath)
{
if (synchroLayerField)
{
if (original != null)
{
Field2D field = original.GetField2D();
if (field != null && layerMap != field)
{
this.layerMap = field;
}
}
}
if (endX == startX && endY == startY)
{
if (pActorPath.Count > 1)
{
Vector2f moveStart = pActorPath[0];
Vector2f moveEnd = pActorPath[1];
startX = layerMap.TilesToWidthPixels(moveStart.X());
startY = layerMap.TilesToHeightPixels(moveStart.Y());
endX = moveEnd.X() * layerMap.GetTileWidth();
endY = moveEnd.Y() * layerMap.GetTileHeight();
moveX = moveEnd.X() - moveStart.X();
moveY = moveEnd.Y() - moveStart.Y();
if (moveX > -2 && moveY > -2 && moveX < 2 && moveY < 2)
{
direction = Loon.Action.Map.Field2D.GetDirection(moveX, moveY,
direction);
}
}
CollectionUtils.RemoveAt(pActorPath, 0);
}
switch (direction)
{
case Config.TUP:
startY -= speed;
if (startY < endY)
{
startY = endY;
}
break;
case Config.TDOWN:
startY += speed;
if (startY > endY)
{
startY = endY;
}
break;
case Config.TLEFT:
startX -= speed;
if (startX < endX)
{
startX = endX;
}
break;
case Config.TRIGHT:
startX += speed;
if (startX > endX)
{
startX = endX;
}
break;
case Config.UP:
startX += speed;
startY -= speed;
if (startX > endX)
{
startX = endX;
}
if (startY < endY)
{
startY = endY;
}
break;
case Config.DOWN:
startX -= speed;
startY += speed;
if (startX < endX)
{
startX = endX;
}
if (startY > endY)
{
startY = endY;
}
break;
case Config.LEFT:
startX -= speed;
startY -= speed;
if (startX < endX)
{
startX = endX;
}
if (startY < endY)
{
startY = endY;
}
break;
case Config.RIGHT:
startX += speed;
startY += speed;
if (startX > endX)
{
startX = endX;
}
if (startY > endY)
{
startY = endY;
}
break;
}
lock (original)
{
original.SetLocation(startX + offsetX, startY + offsetY);
}
}
}
public Vector2f NextPos()
{
if (pActorPath != null)
{
lock (pActorPath)
{
int size = pActorPath.Count;
if (size > 0)
{
pLocation.Set(endX, endY);
}
else
{
pLocation.Set(original.GetX(), original.GetY());
}
return pLocation;
}
}
else
{
pLocation.Set(original.GetX(), original.GetY());
return pLocation;
}
}
public int GetSpeed()
{
return speed;
}
public void SetSpeed(int speed_0)
{
this.speed = speed_0;
}
public override bool IsComplete()
{
return pActorPath == null || pActorPath.Count == 0 || isComplete
|| original == null;
}
public bool IsUseCache()
{
return useCache;
}
public void SetUseCache(bool useCache_0)
{
this.useCache = useCache_0;
}
public bool IsSynchroLayerField()
{
return synchroLayerField;
}
public void SetSynchroLayerField(bool syn)
{
this.synchroLayerField = syn;
}
public AStarFindHeuristic GetHeuristic()
{
return heuristic;
}
public void SetHeuristic(AStarFindHeuristic h)
{
this.heuristic = h;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Linq;
using System.Net.Test.Common;
using System.Security.Principal;
using System.Text;
using System.Threading.Tasks;
using Xunit;
namespace System.Net.Security.Tests
{
[PlatformSpecific(TestPlatforms.Windows)] // NegotiateStream client needs explicit credentials or SPNs on unix.
public abstract class NegotiateStreamStreamToStreamTest
{
public static bool IsNtlmInstalled => Capability.IsNtlmInstalled();
private const int PartialBytesToRead = 5;
private static readonly byte[] s_sampleMsg = Encoding.UTF8.GetBytes("Sample Test Message");
private const int MaxWriteDataSize = 63 * 1024; // NegoState.MaxWriteDataSize
private static string s_longString = new string('A', MaxWriteDataSize) + 'Z';
private static readonly byte[] s_longMsg = Encoding.ASCII.GetBytes(s_longString);
protected abstract Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName);
protected abstract Task AuthenticateAsServerAsync(NegotiateStream server);
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_StreamToStream_Authentication_Success()
{
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Assert.False(server.IsAuthenticated);
Task[] auth = new Task[2];
auth[0] = AuthenticateAsClientAsync(client, CredentialCache.DefaultNetworkCredentials, string.Empty);
auth[1] = AuthenticateAsServerAsync(server);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
// Expected Client property values:
Assert.True(client.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted);
Assert.False(client.IsMutuallyAuthenticated);
Assert.False(client.IsServer);
Assert.True(client.IsSigned);
Assert.False(client.LeaveInnerStreamOpen);
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("NTLM", serverIdentity.AuthenticationType);
Assert.False(serverIdentity.IsAuthenticated);
Assert.Equal("", serverIdentity.Name);
// Expected Server property values:
Assert.True(server.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, server.ImpersonationLevel);
Assert.True(server.IsEncrypted);
Assert.False(server.IsMutuallyAuthenticated);
Assert.True(server.IsServer);
Assert.True(server.IsSigned);
Assert.False(server.LeaveInnerStreamOpen);
IIdentity clientIdentity = server.RemoteIdentity;
Assert.Equal("NTLM", clientIdentity.AuthenticationType);
Assert.True(clientIdentity.IsAuthenticated);
IdentityValidator.AssertIsCurrentIdentity(clientIdentity);
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_StreamToStream_Authentication_TargetName_Success()
{
string targetName = "testTargetName";
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Assert.False(server.IsAuthenticated);
Assert.False(client.IsMutuallyAuthenticated);
Assert.False(server.IsMutuallyAuthenticated);
Task[] auth = new Task[2];
auth[0] = AuthenticateAsClientAsync(client, CredentialCache.DefaultNetworkCredentials, targetName);
auth[1] = AuthenticateAsServerAsync(server);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
// Expected Client property values:
Assert.True(client.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted);
Assert.False(client.IsMutuallyAuthenticated);
Assert.False(client.IsServer);
Assert.True(client.IsSigned);
Assert.False(client.LeaveInnerStreamOpen);
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("NTLM", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated);
Assert.Equal(targetName, serverIdentity.Name);
// Expected Server property values:
Assert.True(server.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, server.ImpersonationLevel);
Assert.True(server.IsEncrypted);
Assert.False(server.IsMutuallyAuthenticated);
Assert.True(server.IsServer);
Assert.True(server.IsSigned);
Assert.False(server.LeaveInnerStreamOpen);
IIdentity clientIdentity = server.RemoteIdentity;
Assert.Equal("NTLM", clientIdentity.AuthenticationType);
Assert.True(clientIdentity.IsAuthenticated);
IdentityValidator.AssertIsCurrentIdentity(clientIdentity);
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_StreamToStream_Authentication_EmptyCredentials_Fails()
{
string targetName = "testTargetName";
// Ensure there is no confusion between DefaultCredentials / DefaultNetworkCredentials and a
// NetworkCredential object with empty user, password and domain.
NetworkCredential emptyNetworkCredential = new NetworkCredential("", "", "");
Assert.NotEqual(emptyNetworkCredential, CredentialCache.DefaultCredentials);
Assert.NotEqual(emptyNetworkCredential, CredentialCache.DefaultNetworkCredentials);
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Assert.False(server.IsAuthenticated);
Task[] auth = new Task[2];
auth[0] = AuthenticateAsClientAsync(client, emptyNetworkCredential, targetName);
auth[1] = AuthenticateAsServerAsync(server);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
// Expected Client property values:
Assert.True(client.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted);
Assert.False(client.IsMutuallyAuthenticated);
Assert.False(client.IsServer);
Assert.True(client.IsSigned);
Assert.False(client.LeaveInnerStreamOpen);
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("NTLM", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated);
Assert.Equal(targetName, serverIdentity.Name);
// Expected Server property values:
Assert.True(server.IsAuthenticated);
Assert.Equal(TokenImpersonationLevel.Identification, server.ImpersonationLevel);
Assert.True(server.IsEncrypted);
Assert.False(server.IsMutuallyAuthenticated);
Assert.True(server.IsServer);
Assert.True(server.IsSigned);
Assert.False(server.LeaveInnerStreamOpen);
IIdentity clientIdentity = server.RemoteIdentity;
Assert.Equal("NTLM", clientIdentity.AuthenticationType);
// TODO #5241: Behavior difference:
Assert.False(clientIdentity.IsAuthenticated);
// On .NET Desktop: Assert.True(clientIdentity.IsAuthenticated);
IdentityValidator.AssertHasName(clientIdentity, new SecurityIdentifier(WellKnownSidType.AnonymousSid, null).Translate(typeof(NTAccount)).Value);
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_StreamToStream_Successive_ClientWrite_Sync_Success()
{
byte[] recvBuf = new byte[s_sampleMsg.Length];
VirtualNetwork network = new VirtualNetwork();
int bytesRead = 0;
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Assert.False(server.IsAuthenticated);
Task[] auth = new Task[2];
auth[0] = AuthenticateAsClientAsync(client, CredentialCache.DefaultNetworkCredentials, string.Empty);
auth[1] = AuthenticateAsServerAsync(server);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
client.Write(s_sampleMsg, 0, s_sampleMsg.Length);
server.Read(recvBuf, 0, s_sampleMsg.Length);
Assert.True(s_sampleMsg.SequenceEqual(recvBuf));
client.Write(s_sampleMsg, 0, s_sampleMsg.Length);
// Test partial sync read.
bytesRead = server.Read(recvBuf, 0, PartialBytesToRead);
Assert.Equal(PartialBytesToRead, bytesRead);
bytesRead = server.Read(recvBuf, PartialBytesToRead, s_sampleMsg.Length - PartialBytesToRead);
Assert.Equal(s_sampleMsg.Length - PartialBytesToRead, bytesRead);
Assert.True(s_sampleMsg.SequenceEqual(recvBuf));
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_StreamToStream_Successive_ClientWrite_Async_Success()
{
byte[] recvBuf = new byte[s_sampleMsg.Length];
VirtualNetwork network = new VirtualNetwork();
int bytesRead = 0;
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Assert.False(server.IsAuthenticated);
Task[] auth = new Task[2];
auth[0] = AuthenticateAsClientAsync(client, CredentialCache.DefaultNetworkCredentials, string.Empty);
auth[1] = AuthenticateAsServerAsync(server);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
auth[0] = client.WriteAsync(s_sampleMsg, 0, s_sampleMsg.Length);
auth[1] = server.ReadAsync(recvBuf, 0, s_sampleMsg.Length);
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(auth);
Assert.True(s_sampleMsg.SequenceEqual(recvBuf));
await client.WriteAsync(s_sampleMsg, 0, s_sampleMsg.Length);
// Test partial async read.
bytesRead = await server.ReadAsync(recvBuf, 0, PartialBytesToRead);
Assert.Equal(PartialBytesToRead, bytesRead);
bytesRead = await server.ReadAsync(recvBuf, PartialBytesToRead, s_sampleMsg.Length - PartialBytesToRead);
Assert.Equal(s_sampleMsg.Length - PartialBytesToRead, bytesRead);
Assert.True(s_sampleMsg.SequenceEqual(recvBuf));
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_ReadWriteLongMsgSync_Success()
{
byte[] recvBuf = new byte[s_longMsg.Length];
var network = new VirtualNetwork();
int bytesRead = 0;
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(
client.AuthenticateAsClientAsync(CredentialCache.DefaultNetworkCredentials, string.Empty),
server.AuthenticateAsServerAsync());
client.Write(s_longMsg, 0, s_longMsg.Length);
while (bytesRead < s_longMsg.Length)
{
bytesRead += server.Read(recvBuf, bytesRead, s_longMsg.Length - bytesRead);
}
Assert.True(s_longMsg.SequenceEqual(recvBuf));
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public async Task NegotiateStream_ReadWriteLongMsgAsync_Success()
{
byte[] recvBuf = new byte[s_longMsg.Length];
var network = new VirtualNetwork();
int bytesRead = 0;
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new NegotiateStream(serverStream))
{
await TestConfiguration.WhenAllOrAnyFailedWithTimeout(
client.AuthenticateAsClientAsync(CredentialCache.DefaultNetworkCredentials, string.Empty),
server.AuthenticateAsServerAsync());
await client.WriteAsync(s_longMsg, 0, s_longMsg.Length);
while (bytesRead < s_longMsg.Length)
{
bytesRead += await server.ReadAsync(recvBuf, bytesRead, s_longMsg.Length - bytesRead);
}
Assert.True(s_longMsg.SequenceEqual(recvBuf));
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public void NegotiateStream_StreamToStream_Flush_Propagated()
{
VirtualNetwork network = new VirtualNetwork();
using (var stream = new VirtualNetworkStream(network, isServer: false))
using (var negotiateStream = new NegotiateStream(stream))
{
Assert.False(stream.HasBeenSyncFlushed);
negotiateStream.Flush();
Assert.True(stream.HasBeenSyncFlushed);
}
}
[ConditionalFact(nameof(IsNtlmInstalled))]
public void NegotiateStream_StreamToStream_FlushAsync_Propagated()
{
VirtualNetwork network = new VirtualNetwork();
using (var stream = new VirtualNetworkStream(network, isServer: false))
using (var negotiateStream = new NegotiateStream(stream))
{
Task task = negotiateStream.FlushAsync();
Assert.False(task.IsCompleted);
stream.CompleteAsyncFlush();
Assert.True(task.IsCompleted);
}
}
}
public sealed class NegotiateStreamStreamToStreamTest_Async : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
client.AuthenticateAsClientAsync(credential, targetName);
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
server.AuthenticateAsServerAsync();
}
public sealed class NegotiateStreamStreamToStreamTest_Async_TestOverloadNullBinding : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
client.AuthenticateAsClientAsync(credential, null, targetName);
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
server.AuthenticateAsServerAsync(null);
}
public sealed class NegotiateStreamStreamToStreamTest_Async_TestOverloadProtectionLevel : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
client.AuthenticateAsClientAsync(credential, targetName, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification);
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
server.AuthenticateAsServerAsync((NetworkCredential)CredentialCache.DefaultCredentials, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification);
}
public sealed class NegotiateStreamStreamToStreamTest_Async_TestOverloadAllParameters : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
client.AuthenticateAsClientAsync(credential, null, targetName, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification);
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
server.AuthenticateAsServerAsync((NetworkCredential)CredentialCache.DefaultCredentials, null, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification);
}
public sealed class NegotiateStreamStreamToStreamTest_BeginEnd : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
Task.Factory.FromAsync(client.BeginAuthenticateAsClient, client.EndAuthenticateAsClient, credential, targetName, null);
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
Task.Factory.FromAsync(server.BeginAuthenticateAsServer, server.EndAuthenticateAsServer, null);
}
public sealed class NegotiateStreamStreamToStreamTest_Sync : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
Task.Run(() => client.AuthenticateAsClient(credential, targetName));
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
Task.Run(() => server.AuthenticateAsServer());
}
public sealed class NegotiateStreamStreamToStreamTest_Sync_TestOverloadNullBinding : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
Task.Run(() => client.AuthenticateAsClient(credential, null, targetName));
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
Task.Run(() => server.AuthenticateAsServer(null));
}
public sealed class NegotiateStreamStreamToStreamTest_Sync_TestOverloadAllParameters : NegotiateStreamStreamToStreamTest
{
protected override Task AuthenticateAsClientAsync(NegotiateStream client, NetworkCredential credential, string targetName) =>
Task.Run(() => client.AuthenticateAsClient(credential, targetName, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification));
protected override Task AuthenticateAsServerAsync(NegotiateStream server) =>
Task.Run(() => server.AuthenticateAsServer((NetworkCredential)CredentialCache.DefaultCredentials, ProtectionLevel.EncryptAndSign, TokenImpersonationLevel.Identification));
}
}
| |
using System;
using System.Collections.Generic;
using Xunit;
namespace Peddler {
public class MaybeDefaultDistinctGeneratorTests : MaybeDefaultGeneratorTests {
protected sealed override MaybeDefaultGenerator<T> MaybeDefault<T>(
IComparableGenerator<T> inner) {
return this.MaybeDefaultDistinct<T>(inner);
}
protected sealed override MaybeDefaultGenerator<T> MaybeDefault<T>(
IComparableGenerator<T> inner,
T defaultValue) {
return this.MaybeDefaultDistinct<T>(inner, defaultValue);
}
protected sealed override MaybeDefaultGenerator<T> MaybeDefault<T>(
IComparableGenerator<T> inner,
decimal percentage) {
return this.MaybeDefaultDistinct<T>(inner, percentage);
}
protected sealed override MaybeDefaultGenerator<T> MaybeDefault<T>(
IComparableGenerator<T> inner,
T defaultValue,
decimal percentage) {
return this.MaybeDefaultDistinct<T>(inner, defaultValue, percentage);
}
protected virtual MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner) {
var generator = new MaybeDefaultDistinctGenerator<T>(inner);
Assert.Equal(default(T), generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
T defaultValue) {
var generator = new MaybeDefaultDistinctGenerator<T>(inner, defaultValue);
Assert.Equal(defaultValue, generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
decimal percentage) {
var generator = new MaybeDefaultDistinctGenerator<T>(inner, percentage);
Assert.Equal(default(T), generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
T defaultValue,
decimal percentage) {
var generator = new MaybeDefaultDistinctGenerator<T>(inner, defaultValue, percentage);
Assert.Equal(defaultValue, generator.DefaultValue);
return generator;
}
public static IEnumerable<object[]> FakeGenerators {
get {
return new List<object[]> {
new object[] {
new FakeStructGenerator(new Int32Generator(-10, -1)),
new FakeStruct { Value = -200 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(-10, 10)),
new FakeStruct { Value = -100 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(1, 10)),
new FakeStruct { Value = -50 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(0, 2)),
new FakeStruct { Value = -10 }
},
new object[] {
new FakeClassGenerator(),
new FakeClass(-100)
}
};
}
}
[Theory]
[MemberData(nameof(FakeGenerators))]
public void NextDistinct_InnerCanGenerateNonDefault(Object inner, Object defaultValue) {
this.InvokeGenericMethod(
nameof(NextDistinct_InnerCanGenerateNonDefaultImpl),
inner,
defaultValue
);
}
protected void NextDistinct_InnerCanGenerateNonDefaultImpl<T>(
IComparableGenerator<T> inner,
T defaultValue) {
var autoDefaultGenerator = this.MaybeDefaultDistinct<T>(inner);
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
var value = autoDefaultGenerator.NextDistinct(default(T));
Assert.NotEqual(default(T), value);
Assert.False(
autoDefaultGenerator.EqualityComparer.Equals(default(T), value)
);
}
var specificDefaultGenerator = this.MaybeDefaultDistinct<T>(inner, defaultValue);
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
var value = specificDefaultGenerator.NextDistinct(default(T));
Assert.NotEqual(default(T), value);
Assert.False(
specificDefaultGenerator.EqualityComparer.Equals(default(T), value)
);
}
}
public static IEnumerable<object[]> DefaultReturningGenerators {
get {
return new List<object[]> {
new object[] { new DefaultGenerator<object>() },
new object[] { new DefaultGenerator<int>() },
new object[] { new DefaultGenerator<FakeClass>() },
new object[] { new DefaultGenerator<FakeStruct>() },
new object[] { new DefaultGenerator<object>(new object()) },
new object[] { new DefaultGenerator<int>(5) },
new object[] { new DefaultGenerator<FakeClass>(new FakeClass(-2)) }
};
}
}
[Theory]
[MemberData(nameof(DefaultReturningGenerators))]
public void NextDistinct_InnerOnlyReturnsDefault(Object inner) {
this.InvokeGenericMethod(
nameof(NextDistinct_InnerOnlyReturnsDefaultImpl),
inner
);
}
protected void NextDistinct_InnerOnlyReturnsDefaultImpl<T>(
DefaultGenerator<T> inner) {
var generator = this.MaybeDefaultDistinct<T>(inner, inner.DefaultValue);
Assert.Throws<UnableToGenerateValueException>(
() => generator.NextDistinct(inner.DefaultValue)
);
}
public static IEnumerable<object[]> IgnoredPercentages {
get {
yield return new object[] { 0m };
yield return new object[] { 0.5m };
yield return new object[] { 1m };
}
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextDistinct_InnerFailsButDefaultOk(decimal percentage) {
var inner = new FakeStructGenerator(new Int32Generator(2, 3));
NextDistinct_InnerFailsButDefaultOkImpl(
this.MaybeDefaultDistinct(inner, percentage)
);
NextDistinct_InnerFailsButDefaultOkImpl(
this.MaybeDefaultDistinct(inner, new FakeStruct { Value = -100 }, percentage)
);
}
private void NextDistinct_InnerFailsButDefaultOkImpl(
MaybeDefaultDistinctGenerator<FakeStruct> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// 2 is the only thing that can be returned by FakeStructGenerator,
// but default (which is 0) is ok.
var value = generator.NextDistinct(new FakeStruct { Value = 2 });
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
}
}
[Theory]
[MemberData(nameof(FakeGenerators))]
public void NextDistinct_GeneratesDefaultAndNonDefault(Object inner, Object defaultValue) {
this.InvokeGenericMethod(
nameof(NextDistinct_WithFiftyPercentChangeOfDefault),
inner,
defaultValue
);
}
protected void NextDistinct_WithFiftyPercentChangeOfDefault<T>(
IComparableGenerator<T> inner,
T defaultValue) {
const decimal percentage = 0.5m;
NextDistinct_WithFiftyPercentChangeOfDefaultImpl<T>(
this.MaybeDefaultDistinct<T>(inner, percentage),
inner.EqualityComparer,
percentage
);
NextDistinct_WithFiftyPercentChangeOfDefaultImpl<T>(
this.MaybeDefaultDistinct<T>(inner, defaultValue, percentage),
inner.EqualityComparer,
percentage
);
}
private void NextDistinct_WithFiftyPercentChangeOfDefaultImpl<T>(
MaybeDefaultDistinctGenerator<T> generator,
IEqualityComparer<T> innerComparer,
decimal percentage) {
var hasDefault = false;
var hasNonDefault = false;
for (var attempt = 0; attempt < extendedNumberOfAttempts; attempt++) {
var value = generator.NextDistinct(generator.Next());
if (!hasDefault) {
hasDefault = innerComparer.Equals(value, generator.DefaultValue);
}
if (!hasNonDefault) {
hasNonDefault = !innerComparer.Equals(value, generator.DefaultValue);
}
if (hasDefault && hasNonDefault) {
break;
}
}
Assert.True(
hasDefault,
$"After {extendedNumberOfAttempts:N0} attempts with a {percentage * 100}% " +
$"percentage chance of generating default values, the generator did not " +
$"generate a default value. The randomization approach is unbalanced."
);
Assert.True(
hasNonDefault,
$"After {extendedNumberOfAttempts:N0} attempts with a {percentage * 100}% " +
$"percentage chance of generating default values, the generator did not " +
$"generate a non-default value. The randomization approach is unbalanced."
);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Diagnostics;
using System.Threading;
using System.Runtime.InteropServices;
public class Co9604get_IsRunning
{
public static String s_strActiveBugNums = "";
public static String s_strDtTmVer = "2003/02/12 08:43 LakshanF";
public static String s_strClassMethod = "StopWatch.IsRunning";
public static String s_strTFName = "Co9604get_IsRunning.cs";
public static String s_strTFAbbrev = s_strTFName.Substring(0, 6);
public Boolean runTest()
{
int iCountErrors = 0;
int iCountTestcases = 0;
String strLoc = "Loc_000oo";
Stopwatch watch;
try
{
//Scenario 1: Createa a new Stopwatch instance and ensure that IsRunning is false
//Scenario 2: Start and ensure that the property is true
//Sceanrio 3: Start, stop and check that property is false
strLoc = "Loc_001oo";
iCountTestcases++;
try
{
watch = new Stopwatch();
if(watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_367sfg! Unexpected value returned: {0}", watch.IsRunning);
}
watch.Start();
if(!watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_3tw7sdg! Unexpected value returned: {0}", watch.IsRunning);
}
watch.Stop();
if(watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_24t7dg! Unexpected value returned: {0}", watch.IsRunning);
}
}catch(Exception ex){
iCountErrors++;
Console.WriteLine("Err_346gr! Unexpected exception thrown! {0}", ex);
}
//Scenario 4: " Start multiple times and check that property is true
//Scenario 5: " Stop multiple times and check property is false
strLoc = "Loc_002oo";
iCountTestcases++;
try
{
watch = new Stopwatch();
for(int i=0; i<10; i++)
{
watch.Start();
if(!watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_23readg_{0}! Unexpected value returned: {1}", i, watch.IsRunning);
}
}
for(int i=0; i<10; i++)
{
watch.Stop();
if(watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_234457gd_{0}! Unexpected value returned: {1}", i, watch.IsRunning);
}
}
}catch(Exception ex){
iCountErrors++;
Console.WriteLine("Err_346gr! Unexpected exception thrown! {0}", ex);
}
//Scenario 6: " Start, Reset and then check that property is false
strLoc = "Loc_003oo";
iCountTestcases++;
try
{
watch = new Stopwatch();
watch.Start();
watch.Reset();
if(watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_24tsdg! Unexpected value returned: {0}", watch.IsRunning);
}
}catch(Exception ex){
iCountErrors++;
Console.WriteLine("Err_346gr! Unexpected exception thrown! {0}", ex);
}
//Scenario 7: " Reset (once and then multiple times) and check that property is false
strLoc = "Loc_005oo";
iCountTestcases++;
try
{
watch = new Stopwatch();
for(int i=0; i<10; i++)
{
watch.Reset();
if(watch.IsRunning)
{
iCountErrors++;
Console.WriteLine("Err_234t7g_{0}! Unexpected value returned: {1}", i, watch.IsRunning);
}
}
}catch(Exception ex){
iCountErrors++;
Console.WriteLine("Err_346gr! Unexpected exception thrown! {0}", ex);
}
}
catch(Exception globalE)
{
iCountErrors++;
Console.WriteLine("Err_9374sfg! Unexpected exception thrown: location: {0}\r\n{1}", strLoc, globalE);
}
//// Finish Diagnostics
if ( iCountErrors == 0 )
{
return true;
}
else
{
return false;
}
}
static int Main()
{
Boolean bResult = false;
Co9604get_IsRunning cbA = new Co9604get_IsRunning();
try
{
bResult = cbA.runTest();
}
catch (Exception exc_main)
{
bResult = false;
Console.WriteLine(s_strTFAbbrev + " : FAiL! Error Err_9999zzz! Uncaught Exception in main(), exc_main=="+exc_main);
}
if (bResult)
{
Console.WriteLine("Pass");
return 100;
}
else
{
Console.WriteLine("FAiL! " + s_strTFAbbrev);
Console.WriteLine(" ");
return 1;
}
}
}
| |
using Lucene.Net.Support;
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <see cref="PriorityQueue{T}"/> maintains a partial ordering of its elements such that the
/// element with least priority can always be found in constant time. Put()'s and Pop()'s
/// require log(size) time.
///
/// <para/><b>NOTE</b>: this class will pre-allocate a full array of
/// length <c>maxSize+1</c> if instantiated via the
/// <see cref="PriorityQueue(int, bool)"/> constructor with
/// <c>prepopulate</c> set to <c>true</c>. That maximum
/// size can grow as we insert elements over the time.
/// <para/>
/// @lucene.internal
/// </summary>
#if FEATURE_SERIALIZABLE
[Serializable]
#endif
public abstract class PriorityQueue<T>
{
private int size = 0;
private readonly int maxSize;
private readonly T[] heap;
protected PriorityQueue(int maxSize) // LUCENENET specific - made protected instead of public
: this(maxSize, true)
{
}
protected PriorityQueue(int maxSize, bool prepopulate) // LUCENENET specific - made protected instead of public
{
int heapSize;
if (0 == maxSize)
{
// We allocate 1 extra to avoid if statement in top()
heapSize = 2;
}
else
{
if (maxSize > ArrayUtil.MAX_ARRAY_LENGTH)
{
// Don't wrap heapSize to -1, in this case, which
// causes a confusing NegativeArraySizeException.
// Note that very likely this will simply then hit
// an OOME, but at least that's more indicative to
// caller that this values is too big. We don't +1
// in this case, but it's very unlikely in practice
// one will actually insert this many objects into
// the PQ:
// Throw exception to prevent confusing OOME:
throw new ArgumentException("maxSize must be <= " + ArrayUtil.MAX_ARRAY_LENGTH + "; got: " + maxSize);
}
else
{
// NOTE: we add +1 because all access to heap is
// 1-based not 0-based. heap[0] is unused.
heapSize = maxSize + 1;
}
}
// T is unbounded type, so this unchecked cast works always:
T[] h = new T[heapSize];
this.heap = h;
this.maxSize = maxSize;
if (prepopulate)
{
// If sentinel objects are supported, populate the queue with them
T sentinel = GetSentinelObject();
if (!EqualityComparer<T>.Default.Equals(sentinel, default))
{
heap[1] = sentinel;
for (int i = 2; i < heap.Length; i++)
{
heap[i] = GetSentinelObject();
}
size = maxSize;
}
}
}
/// <summary>
/// Determines the ordering of objects in this priority queue. Subclasses
/// must define this one method. </summary>
/// <returns> <c>true</c> if parameter <paramref name="a"/> is less than parameter <paramref name="b"/>. </returns>
protected internal abstract bool LessThan(T a, T b); // LUCENENET: Internal for testing
/// <summary>
/// This method can be overridden by extending classes to return a sentinel
/// object which will be used by the <see cref="PriorityQueue(int, bool)"/>
/// constructor to fill the queue, so that the code which uses that queue can always
/// assume it's full and only change the top without attempting to insert any new
/// object.
/// <para/>
/// Those sentinel values should always compare worse than any non-sentinel
/// value (i.e., <see cref="LessThan(T, T)"/> should always favor the
/// non-sentinel values).
/// <para/>
/// By default, this method returns <c>false</c>, which means the queue will not be
/// filled with sentinel values. Otherwise, the value returned will be used to
/// pre-populate the queue. Adds sentinel values to the queue.
/// <para/>
/// If this method is extended to return a non-null value, then the following
/// usage pattern is recommended:
///
/// <code>
/// // extends GetSentinelObject() to return a non-null value.
/// PriorityQueue<MyObject> pq = new MyQueue<MyObject>(numHits);
/// // save the 'top' element, which is guaranteed to not be null.
/// MyObject pqTop = pq.Top;
/// <...>
/// // now in order to add a new element, which is 'better' than top (after
/// // you've verified it is better), it is as simple as:
/// pqTop.Change().
/// pqTop = pq.UpdateTop();
/// </code>
/// <para/>
/// <b>NOTE:</b> if this method returns a non-<c>null</c> value, it will be called by
/// the <see cref="PriorityQueue(int, bool)"/> constructor
/// <see cref="Count"/> times, relying on a new object to be returned and will not
/// check if it's <c>null</c> again. Therefore you should ensure any call to this
/// method creates a new instance and behaves consistently, e.g., it cannot
/// return <c>null</c> if it previously returned non-<c>null</c>.
/// </summary>
/// <returns> The sentinel object to use to pre-populate the queue, or <c>null</c> if
/// sentinel objects are not supported. </returns>
protected virtual T GetSentinelObject()
{
return default;
}
/// <summary>
/// Adds an Object to a <see cref="PriorityQueue{T}"/> in log(size) time. If one tries to add
/// more objects than <see cref="maxSize"/> from initialize and it is not possible to resize
/// the heap, an <see cref="IndexOutOfRangeException"/> is thrown.
/// </summary>
/// <returns> The new 'top' element in the queue. </returns>
public T Add(T element)
{
size++;
heap[size] = element;
UpHeap();
return heap[1];
}
/// <summary>
/// Adds an Object to a <see cref="PriorityQueue{T}"/> in log(size) time.
/// If the given <paramref name="element"/> is smaller than then full
/// heap's minimum, it won't be added.
/// </summary>
public virtual void Insert(T element) // LUCENENET specific - added as a more efficient way to insert value types without reuse
{
if (size < maxSize)
{
Add(element);
}
else if (size > 0 && !LessThan(element, heap[1]))
{
heap[1] = element;
UpdateTop();
}
}
/// <summary>
/// Adds an Object to a <see cref="PriorityQueue{T}"/> in log(size) time.
/// It returns the object (if any) that was
/// dropped off the heap because it was full. This can be
/// the given parameter (in case it is smaller than the
/// full heap's minimum, and couldn't be added), or another
/// object that was previously the smallest value in the
/// heap and now has been replaced by a larger one, or <c>null</c>
/// if the queue wasn't yet full with <see cref="maxSize"/> elements.
/// </summary>
public virtual T InsertWithOverflow(T element)
{
if (size < maxSize)
{
Add(element);
return default;
}
else if (size > 0 && !LessThan(element, heap[1]))
{
T ret = heap[1];
heap[1] = element;
UpdateTop();
return ret;
}
else
{
return element;
}
}
/// <summary>
/// Returns the least element of the <see cref="PriorityQueue{T}"/> in constant time.
/// Returns <c>null</c> if the queue is empty. </summary>
public T Top =>
// We don't need to check size here: if maxSize is 0,
// then heap is length 2 array with both entries null.
// If size is 0 then heap[1] is already null.
heap[1];
/// <summary>
/// Removes and returns the least element of the <see cref="PriorityQueue{T}"/> in log(size)
/// time.
/// </summary>
public T Pop()
{
if (size > 0)
{
T result = heap[1]; // save first value
heap[1] = heap[size]; // move last to first
heap[size] = default; // permit GC of objects
size--;
DownHeap(); // adjust heap
return result;
}
else
{
return default;
}
}
/// <summary>
/// Should be called when the Object at top changes values. Still log(n) worst
/// case, but it's at least twice as fast to
///
/// <code>
/// pq.Top.Change();
/// pq.UpdateTop();
/// </code>
///
/// instead of
///
/// <code>
/// o = pq.Pop();
/// o.Change();
/// pq.Push(o);
/// </code>
/// </summary>
/// <returns> The new 'top' element. </returns>
public T UpdateTop()
{
DownHeap();
return heap[1];
}
/// <summary>
/// Returns the number of elements currently stored in the <see cref="PriorityQueue{T}"/>.
/// NOTE: This was size() in Lucene.
/// </summary>
public int Count => size;
/// <summary>
/// Removes all entries from the <see cref="PriorityQueue{T}"/>. </summary>
public void Clear()
{
for (int i = 0; i <= size; i++)
{
heap[i] = default;
}
size = 0;
}
private void UpHeap()
{
int i = size;
T node = heap[i]; // save bottom node
int j = (int)((uint)i >> 1);
while (j > 0 && LessThan(node, heap[j]))
{
heap[i] = heap[j]; // shift parents down
i = j;
j = (int)((uint)j >> 1);
}
heap[i] = node; // install saved node
}
private void DownHeap()
{
int i = 1;
T node = heap[i]; // save top node
int j = i << 1; // find smaller child
int k = j + 1;
if (k <= size && LessThan(heap[k], heap[j]))
{
j = k;
}
while (j <= size && LessThan(heap[j], node))
{
heap[i] = heap[j]; // shift up child
i = j;
j = i << 1;
k = j + 1;
if (k <= size && LessThan(heap[k], heap[j]))
{
j = k;
}
}
heap[i] = node; // install saved node
}
/// <summary>
/// This method returns the internal heap array as T[].
/// <para/>
/// @lucene.internal
/// </summary>
[WritableArray]
[SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
protected T[] HeapArray => heap;
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.WindowsAzure.Storage.Blob.Protocol;
using Microsoft.WindowsAzure.Storage.RetryPolicies;
using MS.Test.Common.MsTestLib;
namespace StorageTestLib
{
/// <summary>
/// this is a static helper class
/// </summary>
public static class Helper
{
public static void CreateContainer()
{
CloudStorageAccount account = CloudStorageAccount.Parse(Test.Data.Get("StorageConnectionString"));
CloudBlobHelper blobHelper = new CloudBlobHelper(account);
string containerName = Test.Data.Get("containerName");
if (blobHelper.CreateContainer(containerName))
{
Console.WriteLine("Cloud Blob {0} is successfully created.", containerName);
}
else
{
Console.WriteLine("Cloud Blob {0} already exists.", containerName);
}
return;
}
public static void DeleteContainer()
{
CloudStorageAccount account = CloudStorageAccount.Parse(Test.Data.Get("StorageConnectionString"));
CloudBlobHelper blobHelper = new CloudBlobHelper(account);
string containerName = Test.Data.Get("containerName");
if (blobHelper.DeleteContainer(containerName))
{
Console.WriteLine("Cloud Blob {0} is successfully deleted.", containerName);
}
else
{
Console.WriteLine("Cloud Blob {0} not found.", containerName);
}
return;
}
public static void GenerateSmallFile(string filename, int sizeKB)
{
byte[] data = new byte[sizeKB * 1024];
Random r = new Random(123456);
r.NextBytes(data);
File.WriteAllBytes(filename, data);
return;
}
public static void GenerateTinyFile(string filename, int sizeB)
{
byte[] data = new byte[sizeB];
Random r = new Random(123456);
r.NextBytes(data);
File.WriteAllBytes(filename, data);
return;
}
public static void AggregateFile(string filename, int times)
{
using (FileStream outputStream = new FileStream(filename, FileMode.Create))
{
using (FileStream inputStream = new FileStream("abc.txt", FileMode.Open))
{
for (int i = 0; i < times; i++)
{
inputStream.CopyTo(outputStream);
inputStream.Seek(0, SeekOrigin.Begin);
}
}
}
}
public static void CompressFile(string filename, int times)
{
using (FileStream outputStream = new FileStream(filename, FileMode.Create))
{
using (GZipStream compress = new GZipStream(outputStream, CompressionMode.Compress))
{
using (FileStream inputStream = new FileStream("abc.txt", FileMode.Open))
{
for (int i = 0; i < times; i++)
{
inputStream.CopyTo(compress);
inputStream.Seek(0, SeekOrigin.Begin);
}
}
}
}
}
//it takes around 74 seconds to generate a 5G file
public static void GenerateMediumFile(string filename, int sizeMB)
{
byte[] data = new byte[1024 * 1024];
Random r = new Random(123456);
using (FileStream stream = new FileStream(filename, FileMode.Create))
{
for (int i = 0; i < sizeMB; i++)
{
r.NextBytes(data);
stream.Write(data, 0, data.Length);
}
}
return;
}
// the buffer is too large, better to use GenerateMediumFile
public static void GenerateBigFile(string filename, int sizeGB)
{
byte[] data = new byte[1024 * 1024 * 1024];
Random r = new Random(123456);
using (FileStream stream = new FileStream(filename, FileMode.Create))
{
for (int i = 0; i < sizeGB; i++)
{
r.NextBytes(data);
stream.Write(data, 0, data.Length);
}
}
return;
}
//this is only for small data
public static byte[] GetMD5(byte[] data)
{
MD5 md5 = MD5.Create();
return md5.ComputeHash(data);
}
public static void GenerateRandomTestFile(string filename, int sizeKB)
{
byte[] data = new byte[sizeKB * 1024];
Random r = new Random();
r.NextBytes(data);
File.WriteAllBytes(filename, data);
}
public static void DeleteFile(string filename)
{
if (File.Exists(filename))
{
File.Delete(filename);
}
}
public static void DeleteFolder(string foldername)
{
if (Directory.Exists(foldername))
{
Directory.Delete(foldername, true);
}
}
public static void DeletePattern(string pathPattern)
{
DirectoryInfo folder = new DirectoryInfo(".");
foreach (FileInfo fi in folder.GetFiles(pathPattern, SearchOption.TopDirectoryOnly))
{
fi.Delete();
}
foreach (DirectoryInfo di in folder.GetDirectories(pathPattern, SearchOption.TopDirectoryOnly))
{
di.Delete(true);
}
}
public static void CreateNewFolder(string foldername)
{
if (Directory.Exists(foldername))
{
Directory.Delete(foldername, true);
}
Directory.CreateDirectory(foldername);
}
// for a 5G file, this can be done in 20 seconds
public static string GetFileMD5Hash(string filename)
{
using (FileStream fs = File.Open(filename, FileMode.Open))
{
MD5 md5 = MD5.Create();
byte[] md5Hash = md5.ComputeHash(fs);
StringBuilder sb = new StringBuilder();
foreach (byte b in md5Hash)
{
sb.Append(b.ToString("x2").ToLower());
}
return sb.ToString();
}
}
public static string GetFileContentMD5(string filename)
{
using (FileStream fs = File.Open(filename, FileMode.Open))
{
MD5 md5 = MD5.Create();
byte[] md5Hash = md5.ComputeHash(fs);
return Convert.ToBase64String(md5Hash);
}
}
public static void GenerateFixedTestTree(string filename, string foldername, string currentFolder, int size, int layer)
{
for (int i = 0; i < size; i++)
{
GenerateRandomTestFile(currentFolder + "\\" + filename + "_" + i, i);
}
if (layer > 0)
{
for (int i = 0; i < size; i++)
{
Directory.CreateDirectory(currentFolder + "\\" + foldername + "_" + i);
GenerateFixedTestTree(filename, foldername, currentFolder + "\\" + foldername + "_" + i, size, layer - 1);
}
}
}
public static bool CompareTwoFiles(string filename, string filename2)
{
FileInfo fi = new FileInfo(filename);
FileInfo fi2= new FileInfo(filename2);
return CompareTwoFiles(fi, fi2);
}
public static bool CompareTwoFiles(FileInfo fi, FileInfo fi2)
{
if (!fi.Exists || !fi2.Exists)
{
return false;
}
if (fi.Length != fi.Length)
{
return false;
}
long fileLength = fi.Length;
// 200M a chunk
const int ChunkSizeByte = 200 * 1024 * 1024;
using (FileStream fs = new FileStream(fi.FullName, FileMode.Open, FileAccess.Read))
{
using (FileStream fs2 = new FileStream(fi2.FullName, FileMode.Open, FileAccess.Read))
{
BinaryReader reader = new BinaryReader(fs);
BinaryReader reader2 = new BinaryReader(fs2);
long comparedLength = 0;
do
{
byte[] bytes = reader.ReadBytes(ChunkSizeByte);
byte[] bytes2 = reader2.ReadBytes(ChunkSizeByte);
MD5 md5 = MD5.Create();
byte[] md5Hash = md5.ComputeHash(bytes);
byte[] md5Hash2 = md5.ComputeHash(bytes2);
if (!md5Hash.SequenceEqual(md5Hash2))
{
return false;
}
comparedLength += bytes.Length;
}
while (comparedLength < fileLength);
}
}
return true;
}
public static bool CompareTwoFolders(string foldername, string foldername2)
{
DirectoryInfo folder = new DirectoryInfo(foldername);
DirectoryInfo folder2 = new DirectoryInfo(foldername2);
IEnumerable<FileInfo> list = folder.GetFiles("*.*", SearchOption.AllDirectories);
IEnumerable<FileInfo> list2 = folder2.GetFiles("*.*", SearchOption.AllDirectories);
FileCompare fc = new FileCompare();
return list.SequenceEqual(list2, fc);
}
public static bool CompareFolderWithBlob(string foldername, string containerName)
{
return true;
}
public static bool CompareTwoBlobs(string containerName, string containerName2)
{
return false; //todo: implement
}
public static void verifyFilesExistinBlobDirectory(int fileNumber, CloudBlobDirectory blobDirectory, string FileName, String blobType)
{
for (int i = 0; i < fileNumber; i++)
{
if (blobType == BlobType.Block)
{
CloudBlockBlob blob = blobDirectory.GetBlockBlobReference(FileName + "_" + i);
if (null == blob || !blob.Exists())
Test.Error("the file {0}_{1} in the blob virtual directory does not exist:", FileName, i);
}
else if (blobType == BlobType.Page)
{
CloudPageBlob blob = blobDirectory.GetPageBlobReference(FileName + "_" + i);
if (null == blob || !blob.Exists())
Test.Error("the file {0}_{1} in the blob virtual directory does not exist:", FileName, i);
}
}
}
public static void writePerfLog(string log)
{
if (!File.Exists(perfLogName))
{
Test.Info("The perflog name is: {0}", perfLogName);
FileStream fs1 = File.Create(perfLogName);
fs1.Close();
}
StreamWriter fs = File.AppendText(perfLogName);
fs.WriteLine(log);
fs.Close();
}
private static string perfLogName = @".\perf_" + DateTime.Now.ToString().Replace('\\', '-').Replace('/', '-').Replace(':', '-') + ".csv";
public static bool killProcess(string processName)
{
try
{
Process[] procs = Process.GetProcessesByName(processName);
if (procs.Length == 0)
{
Test.Info("No {0} process exist, so no process will be killed", processName);
return false;
}
foreach (Process p in procs)
{
Test.Info("Try to kill {0} process : {1}", processName, p.Id);
p.Kill();
p.WaitForExit();
}
return true;
}
catch (Exception e)
{
Test.Warn("Exception happen when kill {0}: {1}", processName, e.ToString());
return false;
}
}
public delegate bool StopProcess(Process p);
public static bool StopProcessByBreakNetwork(Process p)
{
String processName = Path.GetFileNameWithoutExtension(p.MainModule.FileName);
Test.Info("Stop {0} by BreakNetwork.", processName);
try
{
int i = 0;
Helper.StartProcess("ipconfig", "/release");
System.Threading.Thread.Sleep(5000);
try //Send Ctrl+c so only need to for 1 round of 900s, or need to wait for filenumber/thread *900s
{
Test.Info("Send ctrl+C.");
Test.Assert(SetConsoleCtrlHandler(null, true), "SetConsoleCtrlHandler should success");
System.Threading.Thread.Sleep(5000);
Test.Assert(GenerateConsoleCtrlEvent(ConsoleCtrlEvent.CTRL_C_EVENT, 0), "GenerateConsoleCtrlEvent should success");
System.Threading.Thread.Sleep(2000);
Test.Assert(SetConsoleCtrlHandler(null, false), "SetConsoleCtrlHandler should success");
}
catch (Exception e)
{
Test.Warn("can't send ctrl+c to {0}: {1}", processName, e.ToString());
}
for (i = 0; i < 100; i++)
{
if (p.HasExited)
{
Helper.StartProcess("ipconfig", "/renew");
System.Threading.Thread.Sleep(5000); //wait 5s for IP to restore
return true;
}
Test.Info("wait 10 s for {0} finish. Time: {1}", processName, i);
System.Threading.Thread.Sleep(10000);//As need 900s for process to exist, so wait up to 1000s.
}
Test.Warn("{0} doesn't stop successfully by Break Network. it's killed", processName);
p.Kill();
Helper.StartProcess("ipconfig", "/renew");
return false;
}
catch (Exception)
{
Helper.StartProcess("ipconfig", "/renew");
System.Threading.Thread.Sleep(5000);//wait 5s for IP to restore
return false;
}
}
public static bool StopProcessByCtrlC(Process p)
{
String processName = Path.GetFileNameWithoutExtension(p.MainModule.FileName);
Test.Info("Stop {0} by Ctrl+c.", processName);
int i = 0;
try
{
Test.Info("Send ctrl+C.");
Test.Assert(SetConsoleCtrlHandler(null, true), "SetConsoleCtrlHandler should success");
System.Threading.Thread.Sleep(5000);
Test.Assert(GenerateConsoleCtrlEvent(ConsoleCtrlEvent.CTRL_C_EVENT, 0), "GenerateConsoleCtrlEvent should success");
System.Threading.Thread.Sleep(2000);
Test.Assert(SetConsoleCtrlHandler(null, false), "SetConsoleCtrlHandler should success");
}
catch (Exception e)
{
Test.Warn("{0} doesn't stop successfully by ctrl+c. it's killed: {1}", processName, e.ToString());
System.Threading.Thread.Sleep(10000);
p.Kill();
return false;
}
for (i = 0; i < 100; i++)
{
if (p.HasExited) return true;
Test.Info("wait 10 s for {0} finish. Time: {1}", processName, i);
System.Threading.Thread.Sleep(10000);//As need 900s for process to exist, so wait up to 1000s.
}
Test.Warn("{0} doesn't stop successfully by ctrl+c. it's killed", processName);
p.Kill();
return false;
}
public static bool StopProcessByKill(Process p)
{
String processName = Path.GetFileNameWithoutExtension(p.MainModule.FileName);
Test.Info("Stop {0} by kill.", processName);
p.Kill();
return true;
}
public static Process StartProcess(string cmd, string args)
{
Test.Info("Running: {0} {1}", cmd, args);
ProcessStartInfo psi = new ProcessStartInfo(cmd, args);
psi.CreateNoWindow = false;
psi.UseShellExecute = false;
Process p = Process.Start(psi);
return p;
}
[DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall)]
static extern bool GenerateConsoleCtrlEvent(ConsoleCtrlEvent sigevent, int dwProcessGroupId);
[DllImport("kernel32.dll", CharSet = CharSet.Auto)]
public static extern bool SetConsoleCtrlHandler(HandlerRoutine Handler, bool Add);
public delegate bool HandlerRoutine(ConsoleCtrlEvent CtrlType);
// An enumerated type for the control messages
// sent to the handler routine.
public enum ConsoleCtrlEvent
{
CTRL_C_EVENT = 0,
CTRL_BREAK_EVENT,
CTRL_CLOSE_EVENT,
CTRL_LOGOFF_EVENT = 5,
CTRL_SHUTDOWN_EVENT
}
public static Process StartProcess(string cmd, string args, out StreamReader stdout, out StreamReader stderr, out StreamWriter stdin)
{
Test.Logger.Verbose("Running: {0} {1}", cmd, args);
ProcessStartInfo psi = new ProcessStartInfo(cmd, args);
psi.CreateNoWindow = true;
psi.WindowStyle = ProcessWindowStyle.Hidden;
psi.UseShellExecute = false;
psi.RedirectStandardError = true;
psi.RedirectStandardOutput = true;
psi.RedirectStandardInput = true;
Process p = Process.Start(psi);
stdout = p.StandardOutput;
stderr = p.StandardError;
stdin = p.StandardInput;
return p;
}
}
public class FileCompare : IEqualityComparer<FileInfo>
{
public FileCompare() { }
public bool Equals(FileInfo f1, FileInfo f2)
{
if (f1.Name != f2.Name)
{
Test.Verbose("file name {0}:{1} not equal {2}:{3}", f1.FullName, f1.Name, f2.FullName, f2.Name);
return false;
}
if (f1.Length != f2.Length)
{
Test.Verbose("file length {0}:{1} not equal {2}:{3}", f1.FullName, f1.Length, f2.FullName, f2.Length);
return false;
}
if (f1.Length < 200 * 1024 * 1024)
{
string f1MD5Hash = f1.MD5Hash();
string f2MD5Hash = f2.MD5Hash();
if (f1MD5Hash != f2MD5Hash)
{
Test.Verbose("file MD5 mismatch {0}:{1} not equal {2}:{3}", f1.FullName, f1MD5Hash,f2.FullName, f2MD5Hash);
return false;
}
}
else
{
if (!Helper.CompareTwoFiles(f1, f2))
{
Test.Verbose("file MD5 mismatch {0} not equal {1}", f1.FullName, f2.FullName);
return false;
}
}
return true;
}
public int GetHashCode(FileInfo fi)
{
string s = String.Format("{0}{1}", fi.Name, fi.Length);
return s.GetHashCode();
}
}
public static class FileOp
{
public static string MD5Hash(this FileInfo fi)
{
return Helper.GetFileMD5Hash(fi.FullName);
}
public static string NextString(Random Randomint)
{
int length = Randomint.Next(1, 100);
return NextString(Randomint, length);
}
public static string NextString(Random Randomint, int length)
{
return new String(
Enumerable.Repeat(0, length)
.Select(p => (char)Randomint.Next(0x20, 0xD7FF))
.ToArray());
}
public static void SetFileAttribute(string Filename, FileAttributes attribute)
{
FileAttributes fa = File.GetAttributes(Filename);
if ((fa & attribute) == attribute)
{
Test.Info("Attribute {0} is already in file{1}. Don't need to add again.", attribute.ToString(), Filename);
return;
}
switch (attribute)
{
case FileAttributes.Encrypted:
File.Encrypt(Filename);
break;
case FileAttributes.Normal:
RemoveFileAttribute(Filename, FileAttributes.Encrypted);
RemoveFileAttribute(Filename, FileAttributes.Compressed);
fa = fa & ~fa | FileAttributes.Normal;
File.SetAttributes(Filename, fa);
break;
case FileAttributes.Compressed:
compress(Filename);
break;
default:
fa = fa | attribute;
File.SetAttributes(Filename, fa);
break;
}
Test.Info("Attribute {0} is added to file{1}.", attribute.ToString(), Filename);
}
public static void RemoveFileAttribute(string Filename, FileAttributes attribute)
{
FileAttributes fa = File.GetAttributes(Filename);
if ((fa & attribute) != attribute)
{
Test.Info("Attribute {0} is NOT in file{1}. Don't need to remove.", attribute.ToString(), Filename);
return;
}
switch (attribute)
{
case FileAttributes.Encrypted:
File.Decrypt(Filename);
break;
case FileAttributes.Normal:
fa = fa | FileAttributes.Archive;
File.SetAttributes(Filename, fa);
break;
case FileAttributes.Compressed:
uncompress(Filename);
break;
default:
fa = fa & ~attribute;
File.SetAttributes(Filename, fa);
break;
}
Test.Info("Attribute {0} is removed from file{1}.", attribute.ToString(), Filename);
}
[DllImport("kernel32.dll")]
public static extern int DeviceIoControl(IntPtr hDevice, int
dwIoControlCode, ref short lpInBuffer, int nInBufferSize, IntPtr
lpOutBuffer, int nOutBufferSize, ref int lpBytesReturned, IntPtr
lpOverlapped);
private static int FSCTL_SET_COMPRESSION = 0x9C040;
private static short COMPRESSION_FORMAT_DEFAULT = 1;
private static short COMPRESSION_FORMAT_NONE = 0;
#pragma warning disable 612, 618
public static void compress(string filename)
{
if ((File.GetAttributes(filename) & FileAttributes.Encrypted) == FileAttributes.Encrypted)
{
Test.Info("Decrypt File {0} to prepare for compress.", filename);
File.Decrypt(filename);
}
int lpBytesReturned = 0;
FileStream f = File.Open(filename, System.IO.FileMode.Open,
System.IO.FileAccess.ReadWrite, System.IO.FileShare.None);
int result = DeviceIoControl(f.Handle, FSCTL_SET_COMPRESSION,
ref COMPRESSION_FORMAT_DEFAULT, 2 /*sizeof(short)*/, IntPtr.Zero, 0,
ref lpBytesReturned, IntPtr.Zero);
f.Close();
}
public static void uncompress(string filename)
{
int lpBytesReturned = 0;
FileStream f = File.Open(filename, System.IO.FileMode.Open,
System.IO.FileAccess.ReadWrite, System.IO.FileShare.None);
int result = DeviceIoControl(f.Handle, FSCTL_SET_COMPRESSION,
ref COMPRESSION_FORMAT_NONE, 2 /*sizeof(short)*/, IntPtr.Zero, 0,
ref lpBytesReturned, IntPtr.Zero);
f.Close();
}
#pragma warning restore 612, 618
}
/// <summary>
/// This class helps to do operations on cloud blobs
/// </summary>
public class CloudBlobHelper
{
private CloudStorageAccount account;
/// <summary>
/// The storage account
/// </summary>
public CloudStorageAccount Account
{
get { return account; }
private set { account = value; }
}
private CloudBlobClient blobClient;
/// <summary>
/// The blob client
/// </summary>
public CloudBlobClient BlobClient
{
get { return blobClient; }
set { blobClient = value; }
}
/// <summary>
/// Construct the helper with the storage account
/// </summary>
/// <param name="account"></param>
public CloudBlobHelper(CloudStorageAccount account)
{
Account = account;
BlobClient = account.CreateCloudBlobClient();
BlobClient.RetryPolicy = new LinearRetry(TimeSpan.Zero, 3);
}
/// <summary>
/// Create a container for blobs
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <returns>Return true on success, false if already exists, throw exception on error</returns>
public bool CreateContainer(string containerName)
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
return container.CreateIfNotExists();
}
/// <summary>
/// Delete the container for the blobs
/// </summary>
/// <param name="containerName">the name of container</param>
/// <returns>Return true on success (or the container was deleted before), false if the container doesnot exist, throw exception on error</returns>
public bool DeleteContainer(string containerName)
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
return container.DeleteIfExists();
}
/// <summary>
/// Set the specific container to the accesstype
/// </summary>
/// <param name="containerName">container Name</param>
/// <param name="accesstype">the accesstype the contain will be set</param>
/// <returns>the container 's permission before set, so can be set back when test case finish</returns>
public BlobContainerPermissions SetContainerAccessType(string containerName, BlobContainerPublicAccessType accesstype)
{
try
{
CloudBlobContainer container = blobClient.GetContainerReference(containerName);
container.CreateIfNotExists();
BlobContainerPermissions oldPerm = container.GetPermissions();
BlobContainerPermissions blobPermissions = new BlobContainerPermissions();
blobPermissions.PublicAccess = accesstype;
container.SetPermissions(blobPermissions);
return oldPerm;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return null;
}
throw;
}
}
/// <summary>
/// list blobs in a container, TODO: implement this for batch operations on blobs
/// </summary>
/// <param name="containerName"></param>
/// <param name="blobList"></param>
/// <returns></returns>
public bool ListBlobs(string containerName, out List<ICloudBlob> blobList)
{
blobList = new List<ICloudBlob>();
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
IEnumerable<IListBlobItem> blobs = container.ListBlobs(null, true, BlobListingDetails.All);
if (blobs != null)
{
foreach (ICloudBlob blob in blobs)
{
blobList.Add(blob);
}
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// Validate the uploaded tree which is created by Helper.GenerateFixedTestTree()
/// </summary>
/// <param name="filename">the file prefix of the tree</param>
/// <param name="foldername">the folder prefix of the tree</param>
/// <param name="currentFolder">current folder to validate</param>
/// <param name="size">how many files in each folder</param>
/// <param name="layer">how many folder level to verify</param>
/// <param name="containerName">the container which contain the uploaded tree</param>
/// <param name="empty">true means should verify the folder not exist. false means verify the folder exist.</param>
/// <returns>true if verify pass, false mean verify fail</returns>
public bool ValidateFixedTestTree(string filename, string foldername, string currentFolder, int size, int layer, string containerName, bool empty = false)
{
Test.Info("Verify the folder {0}...", currentFolder);
for (int i = 0; i < size; i++)
{
string sourcefilename = currentFolder + "\\" + filename + "_" + i;
string destblobname = currentFolder + "\\" + filename + "_" + i;
ICloudBlob blob = this.QueryBlob(containerName, destblobname);
if (!empty)
{
if (blob == null)
{
Test.Error("Blob {0} not exist.", destblobname);
return false;
}
string source_MD5 = Helper.GetFileContentMD5(sourcefilename);
string Dest_MD5 = blob.Properties.ContentMD5;
if (source_MD5 != Dest_MD5)
{
Test.Error("sourcefile:{0}: {1} == destblob:{2}:{3}", sourcefilename, source_MD5, destblobname, Dest_MD5);
return false;
}
}
else
{
if (blob != null && blob.Properties.Length !=0)
{
Test.Error("Blob {0} should not exist.", destblobname);
return false;
}
}
}
if (layer > 0)
{
for (int i = 0; i < size; i++)
{
if (! ValidateFixedTestTree(filename, foldername, currentFolder + "\\" + foldername + "_" + i, size, layer - 1, containerName, empty))
return false;
}
}
return true;
}
/// <summary>
/// Get SAS of a container with specific permission and period
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="sap">the permission of the SAS</param>
/// <param name="validatePeriod">How long the SAS will be valid before expire, in second</param>
/// <returns>the SAS</returns>
public string GetSASofContainer(string containerName, SharedAccessBlobPermissions SAB, int validatePeriod, bool UseSavedPolicy = true, string PolicySignedIdentifier = "PolicyIdentifier")
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
string SAS = string.Empty;
SharedAccessBlobPolicy sap = new SharedAccessBlobPolicy();
sap.Permissions = SAB;
sap.SharedAccessStartTime = DateTimeOffset.Now.AddMinutes(-5);
sap.SharedAccessExpiryTime = DateTimeOffset.Now.AddSeconds(validatePeriod);
if (UseSavedPolicy)
{
BlobContainerPermissions bp = container.GetPermissions();
bp.SharedAccessPolicies.Clear();
bp.SharedAccessPolicies.Add(PolicySignedIdentifier, sap);
container.SetPermissions(bp);
SAS = container.GetSharedAccessSignature(new SharedAccessBlobPolicy(), PolicySignedIdentifier);
}
else
{
SAS = container.GetSharedAccessSignature(sap);
}
Test.Info("The SAS is {0}", SAS);
return SAS;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return string.Empty;
}
throw;
}
}
/// <summary>
/// Clear the SAS policy set to a container, used to revoke the SAS
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <returns>True for success</returns>
public bool ClearSASPolicyofContainer(string containerName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
BlobContainerPermissions bp = container.GetPermissions();
bp.SharedAccessPolicies.Clear();
container.SetPermissions(bp);
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
public bool CleanupContainer(string containerName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
IEnumerable<IListBlobItem> blobs = container.ListBlobs(null, true, BlobListingDetails.All);
if (blobs != null)
{
foreach (ICloudBlob blob in blobs)
{
if (blob == null) continue;
if (!blob.Exists())
{
try
{
blob.Delete(DeleteSnapshotsOption.IncludeSnapshots);
continue;
}
catch (Exception)
{
continue;
}
}
blob.Delete(DeleteSnapshotsOption.IncludeSnapshots);
}
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
public bool CleanupContainerByRecreateIt(string containerName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
if (container == null || !container.Exists()) return false;
BlobRequestOptions bro = new BlobRequestOptions();
bro.RetryPolicy = new LinearRetry(new TimeSpan(0,1,0),3);
try
{
container.Delete(null, bro);
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
throw;
}
}
Console.WriteLine("container deleted");
bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 3, 0),3);
bool createSuccess = false;
while (!createSuccess)
{
try
{
container.Create(bro);
createSuccess = true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerAlreadyExists == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
Thread.Sleep(3000);
}
else
{
throw;
}
}
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
throw;
}
return false;
}
}
/// <summary>
/// Query the blob
/// </summary>
/// <param name="containerName"></param>
/// <param name="blobName"></param>
/// <returns></returns>
public ICloudBlob QueryBlob(string containerName, string blobName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
//since GetBlobReference method return no null value even if blob is not exist.
//use FetchAttributes method to confirm the existence of the blob
blob.FetchAttributes();
return blob;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return null;
}
throw;
}
}
public BlobProperties QueryBlobProperties(string containerName, string blobName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
if (blob == null)
{
return null;
}
blob.FetchAttributes();
return blob.Properties;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return null;
}
throw;
}
}
/// <summary>
/// Query the blob virtual directory
/// </summary>
/// <param name="containerName"></param>
/// <param name="blobName"></param>
/// <returns></returns>
public CloudBlobDirectory QueryBlobDirectory(string containerName, string blobDirectoryName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
if (container == null || !container.Exists()) return null;
CloudBlobDirectory blobDirectory = container.GetDirectoryReference(blobDirectoryName);
return blobDirectory;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return null;
}
throw;
}
}
/// <summary>
/// Create or update a blob by its name
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName">the name of the blob</param>
/// <param name="content">the content to the blob</param>
/// <returns>Return true on success, false if unable to create, throw exception on error</returns>
public bool PutBlob(string containerName, string blobName, string content)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
if (container == null || !container.Exists()) return false;
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
MemoryStream MStream = new MemoryStream(ASCIIEncoding.Default.GetBytes(content));
blob.UploadFromStream(MStream);
MStream.Close();
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// change an exist Blob MD5 hash
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName">the name of the blob</param>
/// <param name="MD5Hash">the MD5 hash to set, must be a base 64 string</param>
/// <returns>Return true on success, false if unable to set</returns>
public bool SetMD5Hash(string containerName, string blobName, string MD5Hash)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
blob.FetchAttributes();
blob.Properties.ContentMD5 = MD5Hash;
blob.SetProperties();
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// put block list. TODO: implement this for large files
/// </summary>
/// <param name="containerName"></param>
/// <param name="blobName"></param>
/// <param name="blockIds"></param>
/// <returns></returns>
public bool PutBlockList(string containerName, string blobName, string[] blockIds)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
if (container == null || !container.Exists()) return false;
CloudBlockBlob blob = container.GetBlockBlobReference(blobName);
blob.PutBlockList(blockIds);
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// Download Blob text by the blob name
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName"></param>
/// <param name="content"></param>
/// <returns></returns>
public bool GetBlob(string containerName, string blobName, out string content)
{
content = null;
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
string tempfile = "temp.txt";
using (FileStream fileStream = new FileStream(tempfile, FileMode.Create))
{
blob.DownloadToStream(fileStream);
fileStream.Close();
}
content = File.ReadAllText(tempfile);
File.Delete(tempfile);
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// Delete a blob by its name
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName">the name of the blob</param>
/// <returns>Return true on success, false if blob not found, throw exception on error</returns>
public bool DeleteBlob(string containerName, string blobName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
return blob.DeleteIfExists();
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
catch (Exception e1)
{
throw e1;
}
}
public bool DeleteBlobDirectory(string containerName, string blobDirectoryName, bool recursive)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
CloudBlobDirectory blobDirectory = container.GetDirectoryReference(blobDirectoryName);
if (recursive)
{
foreach (ICloudBlob blob in blobDirectory.ListBlobs(recursive, BlobListingDetails.All))
{
blob.Delete();
}
}
else
{
foreach (ICloudBlob blob in blobDirectory.ListBlobs(recursive))
{
blob.Delete();
}
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
private void deleteBlobDirRecursive(CloudBlobDirectory cbd)
{
if (cbd == null) return;
foreach (ICloudBlob blob in cbd.ListBlobs(true, BlobListingDetails.All))
{
blob.Delete();
}
}
public bool UploadFileToBlockBlob(string containerName, string blobName, string filePath)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName);
BlobRequestOptions bro = new BlobRequestOptions();
bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30),3);
bro.ServerTimeout = new TimeSpan(1, 30, 0);
bro.MaximumExecutionTime = new TimeSpan(1, 30, 0);
using (FileStream fileStream = new FileStream(Path.Combine(filePath), FileMode.Open))
{
blockBlob.UploadFromStream(fileStream, null, bro);
fileStream.Close();
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
public bool UploadFileToPageBlob(string containerName, string blobName, string filePath)
{
try
{
FileInfo fi = new FileInfo(filePath);
if (!fi.Exists)
{
return false;
}
long fileLength = fi.Length;
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
CloudPageBlob pageBlob = container.GetPageBlobReference(blobName);
BlobRequestOptions bro = new BlobRequestOptions();
bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30),3);
bro.ServerTimeout = new TimeSpan(1, 30, 0);
bro.MaximumExecutionTime = new TimeSpan(1, 30, 0);
MD5CryptoServiceProvider md5CSP = new MD5CryptoServiceProvider();
long offset = 0;
const int pageBlobPageSize = 512;
const int maxPageBlobWriteSize= 4*1024*1024;
long blobSize = (fileLength + pageBlobPageSize - 1) & ~(pageBlobPageSize - 1);
pageBlob.Create(blobSize);
using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read))
{
BinaryReader br = new BinaryReader(fs);
while (offset < fileLength)
{
byte[] range = br.ReadBytes(maxPageBlobWriteSize);
md5CSP.TransformBlock(range, 0, range.Length, null, 0);
if (range.Length % pageBlobPageSize > 0)
{
int pad = pageBlobPageSize - (range.Length % pageBlobPageSize);
Array.Resize(ref range, range.Length + pad);
}
MemoryStream ms = new MemoryStream(range, false);
pageBlob.WritePages(ms, offset, null, null, bro);
offset += range.Length;
}
md5CSP.TransformFinalBlock(new byte[0], 0, 0);
}
//update the page blob contentMD5
pageBlob.Properties.ContentMD5 = Convert.ToBase64String(md5CSP.Hash);
pageBlob.SetProperties(null, bro);
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
public bool DownloadFile(string containerName, string blobName, string filePath)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
BlobRequestOptions bro = new BlobRequestOptions();
bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30),3);
bro.ServerTimeout = new TimeSpan(1, 30, 0);
bro.MaximumExecutionTime = new TimeSpan(1, 30, 0);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
using (FileStream fileStream = new FileStream(filePath, FileMode.Create))
{
blob.DownloadToStream(fileStream, null, bro);
fileStream.Close();
}
return true;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return false;
}
throw;
}
}
/// <summary>
/// Creates a snapshot of the blob
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName">the name of blob</param>
/// <returns>blob snapshot</returns>
public ICloudBlob CreateSnapshot(string containerName, string blobName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
if (blob.Properties.BlobType == Microsoft.WindowsAzure.Storage.Blob.BlobType.BlockBlob)
{
CloudBlockBlob BBlock = blob as CloudBlockBlob;
return BBlock.CreateSnapshot();
}
else
{
CloudPageBlob BBlock = blob as CloudPageBlob;
return BBlock.CreateSnapshot();
}
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return null;
}
throw;
}
}
/// <summary>
/// delete snapshot of the blob (DO NOT delete blob)
/// </summary>
/// <param name="containerName">the name of the container</param>
/// <param name="blobName">the name of blob</param>
/// <returns></returns>
public void DeleteSnapshotOnly(string containerName, string blobName)
{
try
{
CloudBlobContainer container = BlobClient.GetContainerReference(containerName);
ICloudBlob blob = container.GetBlobReferenceFromServer(blobName);
//Indicate that any snapshots should be deleted.
blob.Delete(DeleteSnapshotsOption.DeleteSnapshotsOnly);
return;
}
catch (StorageException e)
{
if (null == e ||
null == e.RequestInformation ||
404 == e.RequestInformation.HttpStatusCode ||
null == e.RequestInformation.ExtendedErrorInformation ||
BlobErrorCodeStrings.ContainerNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode ||
BlobErrorCodeStrings.BlobNotFound == e.RequestInformation.ExtendedErrorInformation.ErrorCode)
{
return;
}
throw;
}
}
/// <summary>
/// return name of snapshot
/// </summary>
/// <param name="fileName">the name of blob</param>
/// <param name="snapShot">A blob snapshot</param>
/// <returns>name of snapshot</returns>
public string GetNameOfSnapshot(string fileName, ICloudBlob snapshot)
{
string fileNameNoExt = Path.GetFileNameWithoutExtension(fileName);
string extension = Path.GetExtension(fileName);
string timeStamp = string.Format("{0:u}", snapshot.SnapshotTime.Value);
return string.Format("{0} ({1}){2}",
fileNameNoExt, timeStamp.Replace(":", string.Empty).TrimEnd(new char[] { 'Z' }), extension);
}
}
}
| |
// Copyright 2006 Alp Toker <alp@atoker.com>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.Linq;
using System.Collections.Generic;
using System.Reflection;
namespace DBus
{
using Protocol;
static class Mapper
{
//TODO: move these Get*Name helpers somewhere more appropriate
public static string GetArgumentName (ParameterInfo pi)
{
string argName = pi.Name;
if (pi.IsRetval && String.IsNullOrEmpty (argName))
argName = "ret";
return GetArgumentName ((ICustomAttributeProvider)pi, argName);
}
public static string GetArgumentName (ICustomAttributeProvider attrProvider, string defaultName)
{
string argName = defaultName;
//TODO: no need for foreach
foreach (ArgumentAttribute aa in attrProvider.GetCustomAttributes (typeof (ArgumentAttribute), true))
argName = aa.Name;
return argName;
}
public static IEnumerable<KeyValuePair<Type, MemberInfo>> GetPublicMembers (Type type)
{
//note that Type.GetInterfaces() returns all interfaces with flattened hierarchy
foreach (Type ifType in type.GetInterfaces ()) {
if (!IsPublic (ifType))
continue;
foreach (MemberInfo mi in WalkInterfaceHierarchy (ifType))
yield return new KeyValuePair<Type, MemberInfo> (ifType, mi);
}
if (IsPublic (type))
foreach (MemberInfo mi in GetDeclaredPublicMembers (type))
yield return new KeyValuePair<Type, MemberInfo> (type, mi);
}
static IEnumerable<MemberInfo> WalkInterfaceHierarchy (Type iface)
{
foreach (MemberInfo mi in GetDeclaredPublicMembers (iface))
yield return mi;
// We recurse to get the method the interface inherited from other interface
var internalIfaces = iface.GetInterfaces ();
foreach (var internalIface in internalIfaces)
foreach (var mi in WalkInterfaceHierarchy (internalIface))
yield return mi;
}
static IEnumerable<MemberInfo> GetDeclaredPublicMembers (Type type)
{
foreach (MemberInfo mi in type.GetMembers (BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly))
yield return mi;
}
//this method walks the interface tree in an undefined manner and returns the first match, or if no matches are found, null
//the logic needs review and cleanup
//TODO: unify member name mapping as is already done with interfaces and args
public static MethodInfo GetMethod (Type type, MessageContainer method_call)
{
var mems = Mapper.GetPublicMembers (type).ToArray ();
foreach (var memberForType in mems) {
//this could be made more efficient by using the given interface name earlier and avoiding walking through all public interfaces
if (method_call.Interface != null)
if (GetInterfaceName (memberForType.Key) != method_call.Interface)
continue;
MemberInfo member = memberForType.Value;
MethodInfo meth = null;
Type[] inTypes = null;
if (member is PropertyInfo) {
PropertyInfo prop = member as PropertyInfo;
MethodInfo getter = prop.GetGetMethod (false);
MethodInfo setter = prop.GetSetMethod (false);
if (getter != null && "Get" + prop.Name == method_call.Member) {
meth = getter;
inTypes = Type.EmptyTypes;
} else if (setter != null && "Set" + prop.Name == method_call.Member) {
meth = setter;
inTypes = new Type[] {prop.PropertyType};
}
} else {
meth = member as MethodInfo;
if (meth == null)
continue;
if (meth.Name != method_call.Member)
continue;
inTypes = Mapper.GetTypes (ArgDirection.In, meth.GetParameters ());
}
if (meth == null || inTypes == null)
continue;
Signature inSig = Signature.GetSig (inTypes);
if (inSig != method_call.Signature)
continue;
return meth;
}
return null;
}
public static bool IsPublic (MemberInfo mi)
{
return IsPublic (mi.DeclaringType);
}
public static bool IsPublic (Type type)
{
//we need to have a proper look at what's really public at some point
//this will do for now
if (type.IsDefined (typeof (InterfaceAttribute), false))
return true;
if (type.IsSubclassOf (typeof (MarshalByRefObject)) &&
type.GetCustomAttributes (typeof (ExportInterfaceMembersOnlyAttribute), true).Length == 0)
return true;
return false;
}
public static string GetInterfaceName (MemberInfo mi)
{
return GetInterfaceName (mi.DeclaringType);
}
public static string GetInterfaceName (Type type)
{
return type.GetCustomAttributes (typeof (InterfaceAttribute), true)
.Cast<InterfaceAttribute> ()
.Select (i => i.Name)
.DefaultIfEmpty (type.FullName)
.FirstOrDefault ();
}
public static Type[] GetTypes (ArgDirection dir, ParameterInfo[] parms)
{
List<Type> types = new List<Type> ();
//TODO: consider InOut/Ref
for (int i = 0 ; i != parms.Length ; i++) {
if (i == 0 && parms[i].ParameterType == typeof (DisposableList))
continue;
switch (dir) {
case ArgDirection.In:
//docs say IsIn isn't reliable, and this is indeed true
//if (parms[i].IsIn)
if (!parms[i].IsOut)
types.Add (parms[i].ParameterType);
break;
case ArgDirection.Out:
if (parms[i].IsOut) {
//TODO: note that IsOut is optional to the compiler, we may want to use IsByRef instead
//eg: if (parms[i].ParameterType.IsByRef)
types.Add (parms[i].ParameterType.GetElementType ());
}
break;
}
}
return types.ToArray ();
}
public static bool IsDeprecated (ICustomAttributeProvider attrProvider)
{
return attrProvider.IsDefined (typeof (ObsoleteAttribute), true);
}
internal static Type GetGenericType (Type defType, Type[] parms)
{
Type type = defType.MakeGenericType (parms);
return type;
}
}
//TODO: this class is messy, move the methods somewhere more appropriate
static class MessageHelper
{
public static Message CreateUnknownMethodError (MessageContainer method_call)
{
Message msg = method_call.Message;
if (!msg.ReplyExpected)
return null;
string errMsg = String.Format ("Method \"{0}\" with signature \"{1}\" on interface \"{2}\" doesn't exist",
method_call.Member,
method_call.Signature.Value,
method_call.Interface);
return method_call.CreateError ("org.freedesktop.DBus.Error.UnknownMethod", errMsg);
}
public static void WriteDynamicValues (MessageWriter mw, ParameterInfo[] parms, object[] vals)
{
foreach (ParameterInfo parm in parms) {
if (!parm.IsOut)
continue;
Type actualType = parm.ParameterType.GetElementType ();
mw.Write (actualType, vals[parm.Position]);
}
}
public static object[] GetDynamicValues (Message msg, ParameterInfo[] parms)
{
//TODO: this validation check should provide better information, eg. message dump or a stack trace, or at least the interface/member
/*
if (Protocol.Verbose) {
Signature expected = Signature.GetSig (types);
Signature actual = msg.Signature;
if (actual != expected)
Console.Error.WriteLine ("Warning: The signature of the message does not match that of the handler: " + "Expected '" + expected + "', got '" + actual + "'");
}
*/
object[] vals = new object[parms.Length];
if (msg.Body != null) {
MessageReader reader = new MessageReader (msg);
foreach (ParameterInfo parm in parms) {
if (parm.IsOut)
continue;
vals[parm.Position] = reader.ReadValue (parm.ParameterType);
}
}
return vals;
}
public static object[] GetDynamicValues (Message msg, Type[] types)
{
//TODO: this validation check should provide better information, eg. message dump or a stack trace, or at least the interface/member
if (ProtocolInformation.Verbose) {
Signature expected = Signature.GetSig (types);
Signature actual = msg.Signature;
if (actual != expected)
Console.Error.WriteLine ("Warning: The signature of the message does not match that of the handler: " + "Expected '" + expected + "', got '" + actual + "'");
}
object[] vals = new object[types.Length];
if (msg.Body != null) {
MessageReader reader = new MessageReader (msg);
for (int i = 0 ; i != types.Length ; i++)
vals[i] = reader.ReadValue (types[i]);
}
return vals;
}
public static object[] GetDynamicValues (Message msg)
{
Type[] types = msg.Signature.ToTypes ();
return GetDynamicValues (msg, types);
}
public static Message ConstructReply (MessageContainer method_call, params object[] vals)
{
var msg = method_call.Message;
MessageContainer method_return = new MessageContainer {
Type = MessageType.MethodReturn,
ReplySerial = msg.Header.Serial
};
Message replyMsg = method_return.Message;
Signature inSig = Signature.GetSig (vals);
if (vals != null && vals.Length != 0) {
MessageWriter writer = new MessageWriter (Connection.NativeEndianness);
foreach (object arg in vals)
writer.Write (arg.GetType (), arg);
replyMsg.AttachBodyTo (writer);
}
//TODO: we should be more strict here, but this fallback was added as a quick fix for p2p
if (method_call.Sender != null)
replyMsg.Header[FieldCode.Destination] = method_call.Sender;
replyMsg.Signature = inSig;
//replyMsg.WriteHeader ();
return replyMsg;
}
public static Message ConstructDynamicReply (MessageContainer method_call, MethodInfo mi, object retVal, object[] vals)
{
Type retType = mi.ReturnType;
MessageContainer method_return = new MessageContainer {
Serial = method_call.Serial,
};
Message replyMsg = method_return.Message;
Signature outSig = Signature.GetSig (retType);
outSig += Signature.GetSig (Mapper.GetTypes (ArgDirection.Out, mi.GetParameters ()));
if (outSig != Signature.Empty) {
MessageWriter writer = new MessageWriter (Connection.NativeEndianness);
//first write the return value, if any
if (retType != null && retType != typeof (void))
writer.Write (retType, retVal);
//then write the out args
WriteDynamicValues (writer, mi.GetParameters (), vals);
replyMsg.AttachBodyTo (writer);
}
//TODO: we should be more strict here, but this fallback was added as a quick fix for p2p
if (method_call.Sender != null)
replyMsg.Header[FieldCode.Destination] = method_call.Sender;
replyMsg.Signature = outSig;
return replyMsg;
}
}
[AttributeUsage (AttributeTargets.Class, AllowMultiple=false, Inherited=true)]
public class ExportInterfaceMembersOnlyAttribute : Attribute
{
}
[AttributeUsage (AttributeTargets.Interface | AttributeTargets.Class, AllowMultiple=false, Inherited=true)]
public class InterfaceAttribute : Attribute
{
public string Name;
public InterfaceAttribute (string name)
{
this.Name = name;
}
}
[AttributeUsage (AttributeTargets.Parameter | AttributeTargets.ReturnValue, AllowMultiple=false, Inherited=true)]
public class ArgumentAttribute : Attribute
{
public string Name;
public ArgumentAttribute (string name)
{
this.Name = name;
}
public static string GetSignatureString (Type type)
{
return Signature.GetSig (type).Value;
}
}
}
| |
namespace Gu.Inject.Tests
{
using System;
using Gu.Inject.Tests.Types;
using NUnit.Framework;
public static class ThrowTests
{
[TestCase(typeof(IWith), "Type IWith has no binding.")]
[TestCase(typeof(int), "Type int has no binding.")]
[TestCase(typeof(int?), "Type Nullable<int> has no binding.")]
[TestCase(typeof(int[]), "Type int[] has no binding.")]
[TestCase(typeof(IWith<int>), "Type IWith<int> has no binding.")]
[TestCase(typeof(IWith<int?>), "Type IWith<Nullable<int>> has no binding.")]
[TestCase(typeof(IWith<IWith<int>>), "Type IWith<IWith<int>> has no binding.")]
[TestCase(typeof(OneToMany.Abstract), "Type OneToMany.Abstract has no binding.")]
[TestCase(typeof(OneToMany.IAbstract), "Type OneToMany.IAbstract has no binding.")]
[TestCase(typeof(OneToMany.IConcrete), "Type OneToMany.IConcrete has no binding.")]
public static void GetWhenNoBinding(Type type, string expected)
{
using var kernel = new Kernel();
Assert.AreEqual(
expected,
Assert.Throws<NoBindingException>(() => kernel.Get(type))?.Message);
}
[Test]
public static void GetSimpleCircular()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Circular dependency when resolving SimpleCircular.A.\r\n" +
"\r\n" +
"new SimpleCircular.A(\r\n" +
" new SimpleCircular.B(\r\n" +
" new SimpleCircular.A(... Circular dependency detected.",
Assert.Throws<CircularDependencyException>(() => kernel.Get<SimpleCircular.A>())?.Message);
}
[Test]
public static void GetWithSimpleCircular()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Circular dependency when resolving SimpleCircular.A.\r\n" +
"\r\n" +
"new With<SimpleCircular.A>(\r\n" +
" new SimpleCircular.A(\r\n" +
" new SimpleCircular.B(\r\n" +
" new SimpleCircular.A(... Circular dependency detected.",
Assert.Throws<CircularDependencyException>(() => kernel.Get<With<SimpleCircular.A>>())?.Message);
}
[Test]
public static void GetWithSimpleCircularResolver()
{
using var kernel = new Kernel();
kernel.Bind(c => new With<SimpleCircular.A>(c.Get<SimpleCircular.A>()));
Assert.AreEqual(
"Circular dependency when resolving SimpleCircular.A.\r\n" +
"\r\n" +
"x.Get<With<SimpleCircular.A>>(\r\n" +
" new SimpleCircular.A(\r\n" +
" new SimpleCircular.B(\r\n" +
" new SimpleCircular.A(... Circular dependency detected.",
Assert.Throws<CircularDependencyException>(() => kernel.Get<With<SimpleCircular.A>>())?.Message);
}
[Test]
public static void GetComplexCircular()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Circular dependency when resolving ComplexCircular.A.\r\n" +
"\r\n" +
"new ComplexCircular.A(\r\n" +
" new ComplexCircular.E(\r\n" +
" new ComplexCircular.G(\r\n" +
" new ComplexCircular.A(... Circular dependency detected.",
Assert.Throws<CircularDependencyException>(() => kernel.Get<ComplexCircular.A>())?.Message);
}
[Test]
public static void GetWhenTwoConstructors()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Type Error.TwoCtors has more than one constructor.\r\n" +
"Add a binding specifying which constructor to use.",
Assert.Throws<ResolveException>(() => kernel.Get<Error.TwoCtors>())?.Message);
}
[Test]
public static void GetWhenParamsCtor()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Type Error.ParamsCtor has params parameter which is not supported.\r\n" +
"Add a binding specifying how to create an instance.",
Assert.Throws<ResolveException>(() => kernel.Get<Error.ParamsCtor>())?.Message);
}
[Test]
public static void BindWhenHasResolved()
{
using var kernel = new Kernel();
_ = kernel.Get<DefaultCtor>();
Assert.AreEqual(
"Bind not allowed after Get<T>().\r\n" +
"This could create hard to track down graph bugs.",
Assert.Throws<InvalidOperationException>(() => kernel.Bind<IWith, With<DefaultCtor>>())?.Message);
}
[Test]
public static void BindBindingToSame()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Trying to bind to the same type.\r\n" +
"This is the equivalent of kernel.Bind<C, C>().\r\n" +
"It is not strictly wrong but redundant and could indicate a mistake and hence disallowed.",
Assert.Throws<InvalidOperationException>(() => kernel.Bind<C, C>())?.Message);
}
[Test]
public static void BindTypeWhenHasBinding()
{
using var kernel = new Kernel();
kernel.Bind<I1, C>();
Assert.AreEqual(
"I1 already has a binding. It is mapped to the type C",
Assert.Throws<InvalidOperationException>(() => kernel.Bind<I1, C>())?.Message);
}
[Test]
public static void BindTypeWhenHasInstanceBinding()
{
using var kernel = new Kernel();
var instance = new C();
kernel.Bind<I1>(instance);
Assert.AreEqual(
"I1 already has a binding. It is mapped to C",
Assert.Throws<InvalidOperationException>(() => kernel.Bind<I1, C>())?.Message);
}
[Test]
public static void BindInstanceWhenHasTypeBinding()
{
using var kernel = new Kernel();
kernel.Bind<IWith, With<DefaultCtor>>();
var instance = new With<DefaultCtor>(new DefaultCtor());
Assert.AreEqual(
"IWith already has a binding. It is mapped to the type With<DefaultCtor>",
Assert.Throws<InvalidOperationException>(() => kernel.Bind<IWith>(instance))?.Message);
}
[Test]
public static void BindFuncAndInterfaceThenGetTypeFirst()
{
using var kernel = new Kernel();
kernel.Bind<I1>(() => new C());
_ = kernel.Get<C>(); // This works as it resolves using reflection and constructor.
// Next get fails as there is already an instance created. Solution is Bind<I1, C>(() => new C())
Assert.AreEqual(
"An instance of type C was already created.\r\n" +
"The existing instance was created via constructor.\r\n" +
"This can happen by doing:\r\n" +
"1. Bind<I>(() => new C())\r\n" +
"2. Get<C>() this creates an instance of C using the constructor.\r\n" +
"3. Get<I>() this creates an instance of C using the bound Func<C> and then detects the instance created in 2.\r\n" +
"\r\n" +
"Specify explicit binding for the concrete type.\r\n" +
"For example by:\r\n" +
"Bind<I, C>(() => new C())\r\n" +
"or\r\n" +
"Bind<I, C>()\r\n" +
"Bind<C>(() => new C())",
Assert.Throws<ResolveException>(() => kernel.Get<I1>())?.Message);
}
[Test]
public static void BindResolverAndInterfaceThenGetTypeFirst()
{
using var kernel = new Kernel();
kernel.Bind<I1>(_ => new C());
_ = kernel.Get<C>(); // This works as it resolves using reflection and constructor.
// Next get fails as there is already an instance created. Solution is Bind<I1, C>(c => new C())
Assert.AreEqual(
"An instance of type C was already created.\r\n" +
"The existing instance was created via constructor.\r\n" +
"This can happen by doing:\r\n" +
"1. Bind<I>(x => new C(...))\r\n" +
"2. Get<C>() this creates an instance of C using the constructor.\r\n" +
"3. Get<I>() this creates an instance of C using the bound Func<IReadOnlyKernel, C> and then detects the instance created in 2.\r\n" +
"\r\n" +
"Specify explicit binding for the concrete type.\r\n" +
"For example by:\r\n" +
"Bind<I, C>(x => new C(...))\r\n" +
"or\r\n" +
"Bind<I, C>()\r\n" +
"Bind<C>(x => new C(...))",
Assert.Throws<ResolveException>(() => kernel.Get<I1>())?.Message);
}
[Test]
public static void NoBindingImplicit()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Type int has no binding.",
Assert.Throws<NoBindingException>(() => kernel.Get<int>())?.Message);
}
[Test]
public static void NoBindingImplicitOneLevel()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Type int has no binding.\r\n" +
"\r\n" +
"new With<int>(\r\n" +
" could not resolve int here.",
Assert.Throws<NoBindingException>(() => kernel.Get<With<int>>())?.Message);
}
[Test]
public static void NoBindingImplicitTwoLevels()
{
using var kernel = new Kernel();
Assert.AreEqual(
"Type int has no binding.\r\n" +
"\r\n" +
"new With<With<int>>(\r\n" +
" new With<int>(\r\n" +
" could not resolve int here.",
Assert.Throws<NoBindingException>(() => kernel.Get<With<With<int>>>())?.Message);
}
[Test]
public static void NoBindingResolverOneLevel()
{
using var kernel = new Kernel();
kernel.Bind(x => new With<int>(x.Get<int>()));
Assert.AreEqual(
"Type int has no binding.\r\n" +
"\r\n" +
"x.Get<With<int>>(\r\n" +
" could not resolve int here.",
Assert.Throws<NoBindingException>(() => kernel.Get<With<int>>())?.Message);
}
[Test]
public static void NoBindingFunc()
{
using var kernel = new Kernel();
//// ReSharper disable once AccessToDisposedClosure
kernel.Bind(() => new With<int>(kernel.Get<int>()));
Assert.AreEqual(
"Type int has no binding.\r\n" +
"\r\n" +
"Func<With<int>>.Invoke(\r\n" +
" could not resolve int here.",
Assert.Throws<NoBindingException>(() => kernel.Get<With<int>>())?.Message);
}
}
}
| |
using System;
using SubSonic.Schema;
using SubSonic.DataProviders;
using System.Data;
namespace Solution.DataAccess.DataModel {
/// <summary>
/// Table: TOOL_LIST
/// Primary Key: Id
/// </summary>
public class TOOL_LISTStructs: DatabaseTable {
public TOOL_LISTStructs(IDataProvider provider):base("TOOL_LIST",provider){
ClassName = "TOOL_LIST";
SchemaName = "dbo";
Columns.Add(new DatabaseColumn("Id", this)
{
IsPrimaryKey = true,
DataType = DbType.Int32,
IsNullable = false,
AutoIncrement = true,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "Id"
});
Columns.Add(new DatabaseColumn("TOOL_NO", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = false,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "TOOL_NO"
});
Columns.Add(new DatabaseColumn("TEXT", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "TEXT"
});
Columns.Add(new DatabaseColumn("TOOLTIP", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "TOOLTIP"
});
Columns.Add(new DatabaseColumn("IMAGE", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "IMAGE"
});
Columns.Add(new DatabaseColumn("DIVIDER", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "DIVIDER"
});
Columns.Add(new DatabaseColumn("DISABLED", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "DISABLED"
});
Columns.Add(new DatabaseColumn("DROPDOWN", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "DROPDOWN"
});
Columns.Add(new DatabaseColumn("DROPWHOLE", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "DROPWHOLE"
});
Columns.Add(new DatabaseColumn("TAGDATA", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "TAGDATA"
});
Columns.Add(new DatabaseColumn("SORT_ORDER", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = false,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "SORT_ORDER"
});
Columns.Add(new DatabaseColumn("DWOBJECT", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "DWOBJECT"
});
Columns.Add(new DatabaseColumn("GROUPNO", this)
{
IsPrimaryKey = false,
DataType = DbType.Int32,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 0,
PropertyName = "GROUPNO"
});
Columns.Add(new DatabaseColumn("GROUPNAME", this)
{
IsPrimaryKey = false,
DataType = DbType.AnsiString,
IsNullable = true,
AutoIncrement = false,
IsForeignKey = false,
MaxLength = 255,
PropertyName = "GROUPNAME"
});
}
public IColumn Id{
get{
return this.GetColumn("Id");
}
}
public IColumn TOOL_NO{
get{
return this.GetColumn("TOOL_NO");
}
}
public IColumn TEXT{
get{
return this.GetColumn("TEXT");
}
}
public IColumn TOOLTIP{
get{
return this.GetColumn("TOOLTIP");
}
}
public IColumn IMAGE{
get{
return this.GetColumn("IMAGE");
}
}
public IColumn DIVIDER{
get{
return this.GetColumn("DIVIDER");
}
}
public IColumn DISABLED{
get{
return this.GetColumn("DISABLED");
}
}
public IColumn DROPDOWN{
get{
return this.GetColumn("DROPDOWN");
}
}
public IColumn DROPWHOLE{
get{
return this.GetColumn("DROPWHOLE");
}
}
public IColumn TAGDATA{
get{
return this.GetColumn("TAGDATA");
}
}
public IColumn SORT_ORDER{
get{
return this.GetColumn("SORT_ORDER");
}
}
public IColumn DWOBJECT{
get{
return this.GetColumn("DWOBJECT");
}
}
public IColumn GROUPNO{
get{
return this.GetColumn("GROUPNO");
}
}
public IColumn GROUPNAME{
get{
return this.GetColumn("GROUPNAME");
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.Azure.AcceptanceTestsAzureSpecials
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// SubscriptionInCredentialsOperations operations.
/// </summary>
internal partial class SubscriptionInCredentialsOperations : IServiceOperations<AutoRestAzureSpecialParametersTestClient>, ISubscriptionInCredentialsOperations
{
/// <summary>
/// Initializes a new instance of the SubscriptionInCredentialsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal SubscriptionInCredentialsOperations(AutoRestAzureSpecialParametersTestClient client)
{
if (client == null)
{
throw new ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestAzureSpecialParametersTestClient
/// </summary>
public AutoRestAzureSpecialParametersTestClient Client { get; private set; }
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> PostMethodGlobalValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to null, and client-side validation should
/// prevent you from making this call
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> PostMethodGlobalNullWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalNull", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/global/null/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> PostMethodGlobalNotProvidedValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (this.Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalNotProvidedValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/globalNotProvided/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (this.Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> PostPathGlobalValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostPathGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/path/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> PostSwaggerGlobalValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PostSwaggerGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/swagger/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// Copyright (c) DotSpatial Team. All rights reserved.
// Licensed under the MIT license. See License.txt file in the project root for full license information.
using System;
using System.Drawing;
using System.Drawing.Drawing2D;
using NetTopologySuite.Geometries;
namespace DotSpatial.Data
{
/// <summary>
/// A new model, now that we support 3.5 framework and extension methods that are essentially
/// derived characteristics away from the IRaster interface, essentially reducing it
/// to the simplest interface possible for future implementers, while extending the most
/// easy-to-find functionality to the users.
/// </summary>
public static class RasterExt
{
#region Methods
/// <summary>
/// Determines if the shape is partially inside grid extents.
/// </summary>
/// <param name="raster">Raster that should be checked.</param>
/// <param name="shape">Shape that should be checked.</param>
/// <returns>false, if the shape is completely outside grid extents
/// true, if it's at least partially inside. </returns>
public static bool ContainsFeature(this IRaster raster, IFeature shape)
{
IRasterBounds bounds = raster.Bounds;
Extent shapeExtent = shape.Geometry.EnvelopeInternal.ToExtent();
return !(shapeExtent.MinX > bounds.Extent.MaxX) && !(shapeExtent.MinY > bounds.Extent.MaxY) && !(shapeExtent.MaxX < bounds.Extent.MinX) && !(shapeExtent.MaxY < bounds.Extent.MinY);
}
/// <summary>
/// Gets a boolean that is true if the Window extents contain are all the information for the raster.
/// In otherwords, StartRow = StartColumn = 0, EndRow = NumRowsInFile - 1, and EndColumn = NumColumnsInFile - 1.
/// </summary>
/// <param name="raster">Raster that should be checked.</param>
/// <returns>True, if the whole raster is inside the window.</returns>
public static bool IsFullyWindowed(this IRaster raster)
{
if (raster.StartRow != 0) return false;
if (raster.StartColumn != 0) return false;
if (raster.EndRow != raster.NumRowsInFile - 1) return false;
if (raster.EndColumn != raster.NumColumnsInFile - 1) return false;
return true;
}
#region GeoReference
/// <summary>
/// This doesn't change the data, but instead performs a translation where the upper left coordinate
/// is specified in world coordinates.
/// </summary>
/// <param name="raster">Moves this raster so that the upper left coordinate will match the specified position. The skew and cellsize will remain unaltered.</param>
/// <param name="position">The location to move the upper left corner of the raster to in world coordinates.</param>
public static void MoveTo(this IRaster raster, Coordinate position)
{
double[] vals = raster.Bounds.AffineCoefficients;
vals[0] = position.X;
vals[3] = position.Y;
}
/// <summary>
/// Rotates the geospatial reference points for this image by rotating the affine coordinates.
/// The center for this rotation will be the center of the image.
/// </summary>
/// <param name="raster">The raster to rotate.</param>
/// <param name="degrees">The angle in degrees to rotate the image counter clockwise.</param>
public static void Rotate(this IRaster raster, float degrees)
{
Matrix m = raster.Bounds.GetAffineMatrix();
m.Rotate(degrees);
raster.Bounds.SetAffineMatrix(m);
}
/// <summary>
/// Rotates the geospatial reference points for this image by rotating the affine coordinates.
/// The center for this rotation will be the center of the image.
/// </summary>
/// <param name="raster">The raster to rotate about the specified coordinate.</param>
/// <param name="degrees">The angle in degrees to rotate the image counterclockwise.</param>
/// <param name="center">The point that marks the center of the desired rotation in geographic coordiantes.</param>
public static void RotateAt(this IRaster raster, float degrees, Coordinate center)
{
Matrix m = raster.Bounds.GetAffineMatrix();
m.RotateAt(degrees, new PointF(Convert.ToSingle(center.X), Convert.ToSingle(center.Y)));
raster.Bounds.SetAffineMatrix(m);
}
/// <summary>
/// This method uses a matrix transform to adjust the scale. The precision of using
/// a Drawing2D transform is float precision, so some accuracy may be lost.
/// </summary>
/// <param name="raster">The raster to apply the scale transform to.</param>
/// <param name="scaleX">The multiplier to adjust the geographic extents of the raster in the X direction.</param>
/// <param name="scaleY">The multiplier to adjust the geographic extents of the raster in the Y direction.</param>
public static void Scale(this IRaster raster, float scaleX, float scaleY)
{
Matrix m = raster.Bounds.GetAffineMatrix();
m.Scale(scaleX, scaleY);
raster.Bounds.SetAffineMatrix(m);
}
/// <summary>
/// This method uses a matrix transform to adjust the shear. The precision of using
/// a Drawing2D transform is float precision, so some accuracy may be lost.
/// </summary>
/// <param name="raster">The raster to apply the transform to.</param>
/// <param name="shearX">The floating point horizontal shear factor.</param>
/// <param name="shearY">The floating ponit vertical shear factor.</param>
public static void Shear(this IRaster raster, float shearX, float shearY)
{
Matrix m = raster.Bounds.GetAffineMatrix();
m.Shear(shearX, shearY);
raster.Bounds.SetAffineMatrix(m);
}
/// <summary>
/// Applies a translation transform to the georeferenced coordinates on this raster.
/// </summary>
/// <param name="raster">The raster to apply the translation to.</param>
/// <param name="shift">An ICoordinate with shear values.</param>
public static void Translate(this IRaster raster, Coordinate shift)
{
double[] affine = raster.Bounds.AffineCoefficients;
affine[0] += shift.X;
affine[3] += shift.Y;
}
#endregion
#region Nearest Values
/// <summary>
/// Retrieves the data from the cell that is closest to the specified coordinates. This will
/// return a No-Data value if the specified coordintes are outside of the grid.
/// </summary>
/// <param name="raster">The raster to get the value from.</param>
/// <param name="location">A valid implementation of Icoordinate specifying the geographic location.</param>
/// <returns>The value of type T of the cell that has a center closest to the specified coordinates.</returns>
public static double GetNearestValue(this IRaster raster, Coordinate location)
{
RcIndex position = raster.ProjToCell(location.X, location.Y);
if (position.Row < 0 || position.Row >= raster.NumRows) return raster.NoDataValue;
if (position.Column < 0 || position.Column >= raster.NumColumns) return raster.NoDataValue;
return raster.Value[position.Row, position.Column];
}
/// <summary>
/// Retrieves the data from the cell that is closest to the specified coordinates. This will
/// return a No-Data value if the specified coordintes are outside of the grid.
/// </summary>
/// <param name="raster">The raster to get the value from.</param>
/// <param name="x">The longitude or horizontal coordinate.</param>
/// <param name="y">The latitude or vertical coordinate.</param>
/// <returns>The double value of the cell that has a center closest to the specified coordinates.</returns>
public static double GetNearestValue(this IRaster raster, double x, double y)
{
RcIndex position = raster.ProjToCell(x, y);
if (position.Row < 0 || position.Row >= raster.NumRows) return raster.NoDataValue;
if (position.Column < 0 || position.Column >= raster.NumColumns) return raster.NoDataValue;
return raster.Value[position.Row, position.Column];
}
/// <summary>
/// Retrieves the location from the cell that is closest to the specified coordinates. This will
/// do nothing if the specified coordinates are outside of the raster.
/// </summary>
/// <param name="raster">The IRaster to set the value for.</param>
/// <param name="x">The longitude or horizontal coordinate.</param>
/// <param name="y">The latitude or vertical coordinate.</param>
/// <param name="value">The value to assign to the nearest cell to the specified location.</param>
public static void SetNearestValue(this IRaster raster, double x, double y, double value)
{
RcIndex position = raster.ProjToCell(x, y);
if (position.Row < 0 || position.Row >= raster.NumRows) return;
if (position.Column < 0 || position.Column >= raster.NumColumns) return;
raster.Value[position.Row, position.Column] = value;
}
/// <summary>
/// Retrieves the location from the cell that is closest to the specified coordinates. This will
/// do nothing if the specified coordinates are outside of the raster.
/// </summary>
/// <param name="raster">The IRaster to set the value for.</param>
/// <param name="location">An Icoordinate specifying the location.</param>
/// <param name="value">The value to assign to the nearest cell to the specified location.</param>
public static void SetNearestValue(this IRaster raster, Coordinate location, double value)
{
RcIndex position = raster.ProjToCell(location.X, location.Y);
if (position.Row < 0 || position.Row >= raster.NumRows) return;
if (position.Column < 0 || position.Column >= raster.NumColumns) return;
raster.Value[position.Row, position.Column] = value;
}
#endregion
#region Projection
/// <summary>
/// Extends the IRaster interface to return the coordinate of the center of a row column position.
/// </summary>
/// <param name="raster">The raster interface to extend.</param>
/// <param name="position">The zero based integer index of the row and column of the cell to locate.</param>
/// <returns>The geographic location of the center of the specified cell.</returns>
public static Coordinate CellToProj(this IRaster raster, RcIndex position)
{
return raster?.Bounds?.CellCenterToProj(position.Row, position.Column);
}
/// <summary>
/// Extends the IRaster interface to return the coordinate of the center of a row column position.
/// </summary>
/// <param name="raster">The raster interface to extend.</param>
/// <param name="row">The zero based integer index of the row of the cell to locate.</param>
/// <param name="col">The zero based integer index of the column of the cell to locate.</param>
/// <returns>The geographic location of the center of the specified cell.</returns>
public static Coordinate CellToProj(this IRaster raster, int row, int col)
{
return raster?.Bounds?.CellCenterToProj(row, col);
}
/// <summary>
/// Extends the IRaster interface to return the zero based integer row and column indices.
/// </summary>
/// <param name="raster">The raster interface to extend.</param>
/// <param name="location">The geographic coordinate describing the latitude and longitude.</param>
/// <returns>The RcIndex that describes the zero based integer row and column indices.</returns>
public static RcIndex ProjToCell(this IRaster raster, Coordinate location)
{
if (raster?.Bounds == null) return RcIndex.Empty;
return raster.Bounds.ProjToCell(location);
}
/// <summary>
/// Extends the IRaster interface to return the zero based integer row and column indices.
/// </summary>
/// <param name="raster">The raster interface to extend.</param>
/// <param name="x">A double precision floating point describing the longitude.</param>
/// <param name="y">A double precision floating point describing the latitude.</param>
/// <returns>The RcIndex that describes the zero based integer row and column indices.</returns>
public static RcIndex ProjToCell(this IRaster raster, double x, double y)
{
if (raster?.Bounds == null) return RcIndex.Empty;
return raster.Bounds.ProjToCell(new Coordinate(x, y));
}
#endregion
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void MaskStoreUInt64()
{
var test = new StoreBinaryOpTest__MaskStoreUInt64();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class StoreBinaryOpTest__MaskStoreUInt64
{
private struct TestStruct
{
public Vector256<UInt64> _fld1;
public Vector256<UInt64> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
return testStruct;
}
public void RunStructFldScenario(StoreBinaryOpTest__MaskStoreUInt64 testClass)
{
Avx2.MaskStore((UInt64*)testClass._dataTable.outArrayPtr, _fld1, _fld2);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static UInt64[] _data1 = new UInt64[Op1ElementCount];
private static UInt64[] _data2 = new UInt64[Op2ElementCount];
private static Vector256<UInt64> _clsVar1;
private static Vector256<UInt64> _clsVar2;
private Vector256<UInt64> _fld1;
private Vector256<UInt64> _fld2;
private SimpleBinaryOpTest__DataTable<UInt64, UInt64, UInt64> _dataTable;
static StoreBinaryOpTest__MaskStoreUInt64()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _clsVar1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _clsVar2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
}
public StoreBinaryOpTest__MaskStoreUInt64()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _fld1), ref Unsafe.As<UInt64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _fld2), ref Unsafe.As<UInt64, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt64(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt64(); }
_dataTable = new SimpleBinaryOpTest__DataTable<UInt64, UInt64, UInt64>(_data1, _data2, new UInt64[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
Avx2.MaskStore(
(UInt64*)_dataTable.outArrayPtr,
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr)
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
Avx2.MaskStore(
(UInt64*)_dataTable.outArrayPtr,
Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
Avx2.MaskStore(
(UInt64*)_dataTable.outArrayPtr,
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
typeof(Avx2).GetMethod(nameof(Avx2.MaskStore), new Type[] { typeof(UInt64*), typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(UInt64*)),
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr)
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
typeof(Avx2).GetMethod(nameof(Avx2.MaskStore), new Type[] { typeof(UInt64*), typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(UInt64*)),
Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
typeof(Avx2).GetMethod(nameof(Avx2.MaskStore), new Type[] { typeof(UInt64*), typeof(Vector256<UInt64>), typeof(Vector256<UInt64>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(UInt64*)),
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
Avx2.MaskStore(
(UInt64*)_dataTable.outArrayPtr,
_clsVar1,
_clsVar2
);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var left = Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<UInt64>>(_dataTable.inArray2Ptr);
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var left = Avx.LoadVector256((UInt64*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((UInt64*)(_dataTable.inArray2Ptr));
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var left = Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArray2Ptr));
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new StoreBinaryOpTest__MaskStoreUInt64();
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, _fld1, _fld2);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
Avx2.MaskStore((UInt64*)_dataTable.outArrayPtr, test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<UInt64> left, Vector256<UInt64> right, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray1 = new UInt64[Op1ElementCount];
UInt64[] inArray2 = new UInt64[Op2ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), left);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray1 = new UInt64[Op1ElementCount];
UInt64[] inArray2 = new UInt64[Op2ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt64[] left, UInt64[] right, UInt64[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (((left[0] & (1UL << 63)) != 0) ? right[0] : result[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (((left[i] & (1UL << 63)) != 0) ? right[i] : result[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.MaskStore)}<UInt64>(Vector256<UInt64>, Vector256<UInt64>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Compute.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedBackendServicesClientTest
{
[xunit::FactAttribute]
public void GetRequestObject()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetBackendServiceRequest request = new GetBackendServiceRequest
{
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendService expectedResponse = new BackendService
{
Id = 11672635353343658936UL,
Iap = new BackendServiceIAP(),
ConsistentHash = new ConsistentHashLoadBalancerSettings(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Port = -78310000,
CustomRequestHeaders =
{
"custom_request_headers3532c035",
},
CreationTimestamp = "creation_timestamp235e59a1",
EdgeSecurityPolicy = "edge_security_policy85c5b8f4",
PortName = "port_namebaaa4cd4",
MaxStreamDuration = new Duration(),
TimeoutSec = -1529270667,
Protocol = "protocola08b7881",
FailoverPolicy = new BackendServiceFailoverPolicy(),
LocalityLbPolicy = "locality_lb_policyc8722098",
Region = "regionedb20d96",
ConnectionTrackingPolicy = new BackendServiceConnectionTrackingPolicy(),
SecurityPolicy = "security_policy76596315",
CdnPolicy = new BackendServiceCdnPolicy(),
Network = "networkd22ce091",
Fingerprint = "fingerprint009e6052",
EnableCDN = false,
LogConfig = new BackendServiceLogConfig(),
OutlierDetection = new OutlierDetection(),
LoadBalancingScheme = "load_balancing_scheme21346104",
AffinityCookieTtlSec = -328985636,
CustomResponseHeaders =
{
"custom_response_headersda5d431e",
},
CircuitBreakers = new CircuitBreakers(),
Description = "description2cf9da67",
HealthChecks =
{
"health_checksedb1f3f8",
},
Subsetting = new Subsetting(),
SelfLink = "self_link7e87f12d",
ConnectionDraining = new ConnectionDraining(),
SessionAffinity = "session_affinitye702dadf",
SecuritySettings = new SecuritySettings(),
Backends = { new Backend(), },
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendService response = client.Get(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRequestObjectAsync()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetBackendServiceRequest request = new GetBackendServiceRequest
{
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendService expectedResponse = new BackendService
{
Id = 11672635353343658936UL,
Iap = new BackendServiceIAP(),
ConsistentHash = new ConsistentHashLoadBalancerSettings(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Port = -78310000,
CustomRequestHeaders =
{
"custom_request_headers3532c035",
},
CreationTimestamp = "creation_timestamp235e59a1",
EdgeSecurityPolicy = "edge_security_policy85c5b8f4",
PortName = "port_namebaaa4cd4",
MaxStreamDuration = new Duration(),
TimeoutSec = -1529270667,
Protocol = "protocola08b7881",
FailoverPolicy = new BackendServiceFailoverPolicy(),
LocalityLbPolicy = "locality_lb_policyc8722098",
Region = "regionedb20d96",
ConnectionTrackingPolicy = new BackendServiceConnectionTrackingPolicy(),
SecurityPolicy = "security_policy76596315",
CdnPolicy = new BackendServiceCdnPolicy(),
Network = "networkd22ce091",
Fingerprint = "fingerprint009e6052",
EnableCDN = false,
LogConfig = new BackendServiceLogConfig(),
OutlierDetection = new OutlierDetection(),
LoadBalancingScheme = "load_balancing_scheme21346104",
AffinityCookieTtlSec = -328985636,
CustomResponseHeaders =
{
"custom_response_headersda5d431e",
},
CircuitBreakers = new CircuitBreakers(),
Description = "description2cf9da67",
HealthChecks =
{
"health_checksedb1f3f8",
},
Subsetting = new Subsetting(),
SelfLink = "self_link7e87f12d",
ConnectionDraining = new ConnectionDraining(),
SessionAffinity = "session_affinitye702dadf",
SecuritySettings = new SecuritySettings(),
Backends = { new Backend(), },
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BackendService>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendService responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
BackendService responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Get()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetBackendServiceRequest request = new GetBackendServiceRequest
{
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendService expectedResponse = new BackendService
{
Id = 11672635353343658936UL,
Iap = new BackendServiceIAP(),
ConsistentHash = new ConsistentHashLoadBalancerSettings(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Port = -78310000,
CustomRequestHeaders =
{
"custom_request_headers3532c035",
},
CreationTimestamp = "creation_timestamp235e59a1",
EdgeSecurityPolicy = "edge_security_policy85c5b8f4",
PortName = "port_namebaaa4cd4",
MaxStreamDuration = new Duration(),
TimeoutSec = -1529270667,
Protocol = "protocola08b7881",
FailoverPolicy = new BackendServiceFailoverPolicy(),
LocalityLbPolicy = "locality_lb_policyc8722098",
Region = "regionedb20d96",
ConnectionTrackingPolicy = new BackendServiceConnectionTrackingPolicy(),
SecurityPolicy = "security_policy76596315",
CdnPolicy = new BackendServiceCdnPolicy(),
Network = "networkd22ce091",
Fingerprint = "fingerprint009e6052",
EnableCDN = false,
LogConfig = new BackendServiceLogConfig(),
OutlierDetection = new OutlierDetection(),
LoadBalancingScheme = "load_balancing_scheme21346104",
AffinityCookieTtlSec = -328985636,
CustomResponseHeaders =
{
"custom_response_headersda5d431e",
},
CircuitBreakers = new CircuitBreakers(),
Description = "description2cf9da67",
HealthChecks =
{
"health_checksedb1f3f8",
},
Subsetting = new Subsetting(),
SelfLink = "self_link7e87f12d",
ConnectionDraining = new ConnectionDraining(),
SessionAffinity = "session_affinitye702dadf",
SecuritySettings = new SecuritySettings(),
Backends = { new Backend(), },
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendService response = client.Get(request.Project, request.BackendService);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAsync()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetBackendServiceRequest request = new GetBackendServiceRequest
{
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendService expectedResponse = new BackendService
{
Id = 11672635353343658936UL,
Iap = new BackendServiceIAP(),
ConsistentHash = new ConsistentHashLoadBalancerSettings(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Port = -78310000,
CustomRequestHeaders =
{
"custom_request_headers3532c035",
},
CreationTimestamp = "creation_timestamp235e59a1",
EdgeSecurityPolicy = "edge_security_policy85c5b8f4",
PortName = "port_namebaaa4cd4",
MaxStreamDuration = new Duration(),
TimeoutSec = -1529270667,
Protocol = "protocola08b7881",
FailoverPolicy = new BackendServiceFailoverPolicy(),
LocalityLbPolicy = "locality_lb_policyc8722098",
Region = "regionedb20d96",
ConnectionTrackingPolicy = new BackendServiceConnectionTrackingPolicy(),
SecurityPolicy = "security_policy76596315",
CdnPolicy = new BackendServiceCdnPolicy(),
Network = "networkd22ce091",
Fingerprint = "fingerprint009e6052",
EnableCDN = false,
LogConfig = new BackendServiceLogConfig(),
OutlierDetection = new OutlierDetection(),
LoadBalancingScheme = "load_balancing_scheme21346104",
AffinityCookieTtlSec = -328985636,
CustomResponseHeaders =
{
"custom_response_headersda5d431e",
},
CircuitBreakers = new CircuitBreakers(),
Description = "description2cf9da67",
HealthChecks =
{
"health_checksedb1f3f8",
},
Subsetting = new Subsetting(),
SelfLink = "self_link7e87f12d",
ConnectionDraining = new ConnectionDraining(),
SessionAffinity = "session_affinitye702dadf",
SecuritySettings = new SecuritySettings(),
Backends = { new Backend(), },
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BackendService>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendService responseCallSettings = await client.GetAsync(request.Project, request.BackendService, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
BackendService responseCancellationToken = await client.GetAsync(request.Project, request.BackendService, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetHealthRequestObject()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetHealthBackendServiceRequest request = new GetHealthBackendServiceRequest
{
ResourceGroupReferenceResource = new ResourceGroupReference(),
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendServiceGroupHealth expectedResponse = new BackendServiceGroupHealth
{
Kind = "kindf7aa39d9",
Annotations =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
HealthStatus = { new HealthStatus(), },
};
mockGrpcClient.Setup(x => x.GetHealth(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendServiceGroupHealth response = client.GetHealth(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetHealthRequestObjectAsync()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetHealthBackendServiceRequest request = new GetHealthBackendServiceRequest
{
ResourceGroupReferenceResource = new ResourceGroupReference(),
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendServiceGroupHealth expectedResponse = new BackendServiceGroupHealth
{
Kind = "kindf7aa39d9",
Annotations =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
HealthStatus = { new HealthStatus(), },
};
mockGrpcClient.Setup(x => x.GetHealthAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BackendServiceGroupHealth>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendServiceGroupHealth responseCallSettings = await client.GetHealthAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
BackendServiceGroupHealth responseCancellationToken = await client.GetHealthAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetHealth()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetHealthBackendServiceRequest request = new GetHealthBackendServiceRequest
{
ResourceGroupReferenceResource = new ResourceGroupReference(),
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendServiceGroupHealth expectedResponse = new BackendServiceGroupHealth
{
Kind = "kindf7aa39d9",
Annotations =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
HealthStatus = { new HealthStatus(), },
};
mockGrpcClient.Setup(x => x.GetHealth(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendServiceGroupHealth response = client.GetHealth(request.Project, request.BackendService, request.ResourceGroupReferenceResource);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetHealthAsync()
{
moq::Mock<BackendServices.BackendServicesClient> mockGrpcClient = new moq::Mock<BackendServices.BackendServicesClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForGlobalOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetHealthBackendServiceRequest request = new GetHealthBackendServiceRequest
{
ResourceGroupReferenceResource = new ResourceGroupReference(),
Project = "projectaa6ff846",
BackendService = "backend_serviceed490d45",
};
BackendServiceGroupHealth expectedResponse = new BackendServiceGroupHealth
{
Kind = "kindf7aa39d9",
Annotations =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
HealthStatus = { new HealthStatus(), },
};
mockGrpcClient.Setup(x => x.GetHealthAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<BackendServiceGroupHealth>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BackendServicesClient client = new BackendServicesClientImpl(mockGrpcClient.Object, null);
BackendServiceGroupHealth responseCallSettings = await client.GetHealthAsync(request.Project, request.BackendService, request.ResourceGroupReferenceResource, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
BackendServiceGroupHealth responseCancellationToken = await client.GetHealthAsync(request.Project, request.BackendService, request.ResourceGroupReferenceResource, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
//
// Device.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
//
// Copyright (C) 2006-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Collections.Generic;
using NDesk.DBus;
namespace Hal
{
public struct PropertyModification
{
public string Key;
public bool Added;
public bool Removed;
}
internal delegate void DBusPropertyModifiedHandler(int modificationsLength,
PropertyModification [] modifications);
[Interface("org.freedesktop.Hal.Device")]
internal interface IDevice
{
// TODO:
// Need to support the Condition event, but it has a
// variable number of arguments, not currently supported
event DBusPropertyModifiedHandler PropertyModified;
void SetPropertyString(string key, string value);
void SetPropertyInteger(string key, int value);
void SetPropertyBoolean(string key, bool value);
void SetPropertyDouble(string key, double value);
void SetPropertyStringList(string key, string [] value);
void SetProperty(string key, ulong value);
ulong GetProperty(string key); // nasty hack to get around the fact
// that HAL doesn't actually send this
// in a variant, nor does it have a
// GetPropertyUInt64
// should be object GetProperty(string key)
void StringListPrepend(string key, string value);
void StringListAppend(string key, string value);
void StringListRemove(string key, string value);
string GetPropertyString(string key);
int GetPropertyInteger(string key);
bool GetPropertyBoolean(string key);
double GetPropertyDouble(string key);
string [] GetPropertyStringList(string key);
IDictionary<string, object> GetAllProperties();
void RemoveProperty(string key);
PropertyType GetPropertyType(string key);
bool PropertyExists(string key);
void AddCapability(string capability);
bool QueryCapability(string capability);
void Lock(string reason);
void Unlock();
}
internal enum DType : byte
{
Invalid = (byte)'\0',
Byte = (byte)'y',
Boolean = (byte)'b',
Int16 = (byte)'n',
UInt16 = (byte)'q',
Int32 = (byte)'i',
UInt32 = (byte)'u',
Int64 = (byte)'x',
UInt64 = (byte)'t',
Single = (byte)'f',
Double = (byte)'d',
String = (byte)'s',
ObjectPath = (byte)'o',
Signature = (byte)'g',
Array = (byte)'a',
Struct = (byte)'r',
DictEntry = (byte)'e',
Variant = (byte)'v',
StructBegin = (byte)'(',
StructEnd = (byte)')',
DictEntryBegin = (byte)'{',
DictEntryEnd = (byte)'}',
}
public enum PropertyType
{
Invalid = DType.Invalid,
Int32 = DType.Int32,
UInt64 = DType.UInt64,
Double = DType.Double,
Boolean = DType.Boolean,
String = DType.String,
StrList = ((int)(DType.String << 8) + ('l'))
}
public class PropertyModifiedArgs : EventArgs
{
private PropertyModification [] modifications;
public PropertyModifiedArgs(PropertyModification [] modifications)
{
this.modifications = modifications;
}
public PropertyModification [] Modifications {
get { return modifications; }
}
}
public delegate void PropertyModifiedHandler(object o, PropertyModifiedArgs args);
public class Device : IEnumerable<KeyValuePair<string, object>>, IEqualityComparer<Device>,
IEquatable<Device>, IComparer<Device>, IComparable<Device>
{
private string udi;
private IDevice device;
public event PropertyModifiedHandler PropertyModified;
public Device(string udi)
{
this.udi = udi;
device = CastDevice<IDevice>();
device.PropertyModified += OnPropertyModified;
}
public static Device [] UdisToDevices(string [] udis)
{
if(udis == null || udis.Length == 0) {
return new Device[0];
}
Device [] devices = new Device[udis.Length];
for(int i = 0; i < udis.Length; i++) {
devices[i] = new Device(udis[i]);
}
return devices;
}
protected virtual void OnPropertyModified(int modificationsLength, PropertyModification [] modifications)
{
if(modifications.Length != modificationsLength) {
throw new ApplicationException("Number of modified properties does not match");
}
PropertyModifiedHandler handler = PropertyModified;
if(handler != null) {
handler(this, new PropertyModifiedArgs(modifications));
}
}
public string [] GetChildren(Manager manager)
{
return manager.FindDeviceByStringMatch("info.parent", Udi);
}
public Device [] GetChildrenAsDevice(Manager manager)
{
return manager.FindDeviceByStringMatchAsDevice("info.parent", Udi);
}
public void Lock(string reason)
{
device.Lock(reason);
}
public void Unlock()
{
device.Unlock();
}
public string GetPropertyString(string key)
{
return device.GetPropertyString(key);
}
public int GetPropertyInteger(string key)
{
return device.GetPropertyInteger(key);
}
public ulong GetPropertyUInt64(string key)
{
return device.GetProperty(key);
}
public double GetPropertyDouble(string key)
{
return device.GetPropertyDouble(key);
}
public bool GetPropertyBoolean(string key)
{
return device.GetPropertyBoolean(key);
}
public string [] GetPropertyStringList(string key)
{
return device.GetPropertyStringList(key);
}
public PropertyType GetPropertyType(string key)
{
return PropertyExists(key) ? device.GetPropertyType(key) : PropertyType.Invalid;
}
public void StringListPrepend(string key, string value)
{
device.SetPropertyString(key, value);
}
public void StringListAppend(string key, string value)
{
device.StringListAppend(key, value);
}
public void StringListRemove(string key, string value)
{
device.StringListRemove(key, value);
}
public void SetPropertyString(string key, string value)
{
device.SetPropertyString(key, value);
}
public void SetPropertyUInt64(string key, ulong value)
{
device.SetProperty(key, value);
}
public void SetPropertyInteger(string key, int value)
{
device.SetPropertyInteger(key, value);
}
public void SetPropertyDouble(string key, double value)
{
device.SetPropertyDouble(key, value);
}
public void SetPropertyBoolean(string key, bool value)
{
device.SetPropertyBoolean(key, value);
}
public void SetPropertyStringList(string key, string [] value)
{
device.SetPropertyStringList(key, value);
}
public void RemoveProperty(string key)
{
device.RemoveProperty(key);
}
public bool PropertyExists(string key)
{
return device.PropertyExists(key);
}
public void AddCapability(string capability)
{
device.AddCapability(capability);
}
public bool QueryCapability(string capability)
{
return device.QueryCapability(capability);
}
public T CastDevice<T>()
{
if(!Bus.System.NameHasOwner("org.freedesktop.Hal")) {
throw new ApplicationException("Could not find org.freedesktop.Hal");
}
return Bus.System.GetObject<T>("org.freedesktop.Hal", new ObjectPath(Udi));
}
public IEnumerator<KeyValuePair<string, object>> GetEnumerator()
{
return device.GetAllProperties().GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return device.GetAllProperties().GetEnumerator();
}
public bool Equals(Device other)
{
return Udi.Equals(other.Udi);
}
public bool Equals(Device a, Device b)
{
return a.Udi.Equals(b.Udi);
}
public int CompareTo(Device other)
{
return Udi.CompareTo(other.Udi);
}
public int Compare(Device a, Device b)
{
return a.Udi.CompareTo(b.Udi);
}
public int GetHashCode(Device a)
{
return a.Udi.GetHashCode();
}
public override int GetHashCode()
{
return Udi.GetHashCode();
}
public override string ToString()
{
return udi;
}
public string this[string property] {
get { return PropertyExists(property) ? GetPropertyString(property) : null; }
set { SetPropertyString(property, value); }
}
public string Udi {
get { return udi; }
}
public bool IsVolume {
get {
if(!PropertyExists("info.interfaces")) {
return false;
}
foreach(string @interface in GetPropertyStringList("info.interfaces")) {
if(@interface == "org.freedesktop.Hal.Device.Volume") {
return true;
}
}
return false;
}
}
public Volume Volume {
get { return new Volume(Udi); }
}
public Device Parent {
get {
if(PropertyExists("info.parent")) {
return new Device(this["info.parent"]);
}
return null;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Moq;
using NuGet.Test.Mocks;
using Xunit;
namespace NuGet.Test
{
public class PackageWalkerTest
{
[Fact]
public void ResolveDependenciesForInstallPackageWithUnknownDependencyThrows()
{
// Arrange
IPackage package = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
new MockPackageRepository(),
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(package), "Unable to resolve dependency 'B'.");
IPackageOperationResolver resolver = new InstallWalker(localRepository,
repository.Object,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
}
[Fact]
public void ReverseDependencyWalkerUsersVersionAndIdToDetermineVisited()
{
// Arrange
// A 1.0 -> B 1.0
IPackage packageA1 = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
});
// A 2.0 -> B 2.0
IPackage packageA2 = PackageUtility.CreatePackage("A",
"2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[2.0]")
});
IPackage packageB1 = PackageUtility.CreatePackage("B", "1.0");
IPackage packageB2 = PackageUtility.CreatePackage("B", "2.0");
var mockRepository = new MockPackageRepository();
mockRepository.AddPackage(packageA1);
mockRepository.AddPackage(packageA2);
mockRepository.AddPackage(packageB1);
mockRepository.AddPackage(packageB2);
// Act
IDependentsResolver lookup = new DependentsWalker(mockRepository);
// Assert
Assert.Equal(0, lookup.GetDependents(packageA1).Count());
Assert.Equal(0, lookup.GetDependents(packageA2).Count());
Assert.Equal(1, lookup.GetDependents(packageB1).Count());
Assert.Equal(1, lookup.GetDependents(packageB2).Count());
}
[Fact]
public void ResolveDependenciesForInstallPackageResolvesDependencyUsingDependencyProvider()
{
// Arrange
IPackage packageA = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B");
var repository = new Mock<PackageRepositoryBase>();
repository.Setup(c => c.GetPackages()).Returns(new[] { packageA }.AsQueryable());
var dependencyProvider = repository.As<IDependencyResolver>();
dependencyProvider.Setup(c => c.ResolveDependency(It.Is<PackageDependency>(p => p.Id == "B"), It.IsAny<IPackageConstraintProvider>(), false, true, DependencyVersion.Lowest))
.Returns(packageB).Verifiable();
var localRepository = new MockPackageRepository();
IPackageOperationResolver resolver = new InstallWalker(localRepository,
repository.Object,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(2, operations.Count);
Assert.Equal(PackageAction.Install, operations.First().Action);
Assert.Equal(packageB, operations.First().Package);
Assert.Equal(PackageAction.Install, operations.Last().Action);
Assert.Equal(packageA, operations.Last().Package);
dependencyProvider.Verify();
}
[Fact]
public void ResolveDependenciesForInstallPackageResolvesDependencyWithConstraintsUsingDependencyResolver()
{
// Arrange
var packageDependency = new PackageDependency("B", new VersionSpec { MinVersion = new SemanticVersion("1.1") });
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> { packageDependency });
IPackage packageB12 = PackageUtility.CreatePackage("B", "1.2");
var repository = new Mock<PackageRepositoryBase>(MockBehavior.Strict);
repository.Setup(c => c.GetPackages()).Returns(new[] { packageA }.AsQueryable());
var dependencyProvider = repository.As<IDependencyResolver>();
dependencyProvider.Setup(c => c.ResolveDependency(packageDependency, It.IsAny<IPackageConstraintProvider>(), false, true, DependencyVersion.Lowest))
.Returns(packageB12).Verifiable();
var localRepository = new MockPackageRepository();
IPackageOperationResolver resolver = new InstallWalker(localRepository,
repository.Object,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(2, operations.Count);
Assert.Equal(PackageAction.Install, operations.First().Action);
Assert.Equal(packageB12, operations.First().Package);
Assert.Equal(PackageAction.Install, operations.Last().Action);
Assert.Equal(packageA, operations.Last().Package);
dependencyProvider.Verify();
}
[Fact]
public void ResolveDependenciesForInstallCircularReferenceThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("A")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Circular dependency detected 'A 1.0 => B 1.0 => A 1.0'.");
}
[Fact]
public void ResolveDependenciesForInstallDiamondDependencyGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
// A
// / \
// B C
// \ /
// D
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var packages = resolver.ResolveOperations(packageA).ToList();
// Assert
var dict = packages.ToDictionary(p => p.Package.Id);
Assert.Equal(4, packages.Count);
Assert.NotNull(dict["A"]);
Assert.NotNull(dict["B"]);
Assert.NotNull(dict["C"]);
Assert.NotNull(dict["D"]);
}
[Fact]
public void ResolveDependenciesForInstallDiamondDependencyGraphWithDifferntVersionOfSamePackage()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D >= 1, E >= 2]
// C -> [D >= 2, E >= 1]
// A
// / \
// B C
// | \ | \
// D1 E2 D2 E1
IPackage packageA = PackageUtility.CreateProjectLevelPackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreateProjectLevelPackage("B", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("D", "1.0"),
PackageDependency.CreateDependency("E", "2.0")
});
IPackage packageC = PackageUtility.CreateProjectLevelPackage("C", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("D", "2.0"),
PackageDependency.CreateDependency("E", "1.0")
});
IPackage packageD10 = PackageUtility.CreateProjectLevelPackage("D", "1.0");
IPackage packageD20 = PackageUtility.CreateProjectLevelPackage("D", "2.0");
IPackage packageE10 = PackageUtility.CreateProjectLevelPackage("E", "1.0");
IPackage packageE20 = PackageUtility.CreateProjectLevelPackage("E", "2.0");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD20);
sourceRepository.AddPackage(packageD10);
sourceRepository.AddPackage(packageE20);
sourceRepository.AddPackage(packageE10);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
var projectOperations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(5, operations.Count);
Assert.Equal("E", operations[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), operations[0].Package.Version);
Assert.Equal("B", operations[1].Package.Id);
Assert.Equal("D", operations[2].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), operations[2].Package.Version);
Assert.Equal("C", operations[3].Package.Id);
Assert.Equal("A", operations[4].Package.Id);
Assert.Equal(5, projectOperations.Count);
Assert.Equal("E", projectOperations[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), projectOperations[0].Package.Version);
Assert.Equal("B", projectOperations[1].Package.Id);
Assert.Equal("D", projectOperations[2].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), projectOperations[2].Package.Version);
Assert.Equal("C", projectOperations[3].Package.Id);
Assert.Equal("A", projectOperations[4].Package.Id);
}
[Fact]
public void UninstallWalkerIgnoresMissingDependencies()
{
// Arrange
var localRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(3, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["C"]);
Assert.NotNull(packages["D"]);
}
[Fact]
public void ResolveDependenciesForUninstallDiamondDependencyGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
// A
// / \
// B C
// \ /
// D
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(4, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
Assert.NotNull(packages["C"]);
Assert.NotNull(packages["D"]);
}
[Fact]
public void ResolveDependencyForInstallCircularReferenceWithDifferentVersionOfPackageReferenceThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageA15 = PackageUtility.CreatePackage("A", "1.5",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("A", "[1.5]")
});
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA15);
sourceRepository.AddPackage(packageB10);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA10), "Circular dependency detected 'A 1.0 => B 1.0 => A 1.5'.");
}
[Fact]
public void ResolvingDependencyForUpdateWithConflictingDependents()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A 1.0 -> B [1.0]
IPackage A10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
}, content: new[] { "a1" });
// A 2.0 -> B (any version)
IPackage A20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
}, content: new[] { "a2" });
IPackage B10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "b1" });
IPackage B101 = PackageUtility.CreatePackage("B", "1.0.1", content: new[] { "b101" });
IPackage B20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "a2" });
localRepository.Add(A10);
localRepository.Add(B10);
sourceRepository.AddPackage(A10);
sourceRepository.AddPackage(A20);
sourceRepository.AddPackage(B10);
sourceRepository.AddPackage(B101);
sourceRepository.AddPackage(B20);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false) { AcceptedTargets = PackageTargets.Project };
// Act
var packages = resolver.ResolveOperations(B101).ToList();
// Assert
Assert.Equal(4, packages.Count);
AssertOperation("A", "1.0", PackageAction.Uninstall, packages[0]);
AssertOperation("B", "1.0", PackageAction.Uninstall, packages[1]);
AssertOperation("A", "2.0", PackageAction.Install, packages[2]);
AssertOperation("B", "1.0.1", PackageAction.Install, packages[3]);
}
[Fact]
public void ResolvingDependencyForUpdateThatHasAnUnsatisfiedConstraint()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
var constraintProvider = new Mock<IPackageConstraintProvider>();
constraintProvider.Setup(m => m.GetConstraint("B")).Returns(VersionUtility.ParseVersionSpec("[1.4]"));
constraintProvider.Setup(m => m.Source).Returns("foo");
IPackage A10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.5")
});
IPackage A20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0")
});
IPackage B15 = PackageUtility.CreatePackage("B", "1.5");
IPackage B20 = PackageUtility.CreatePackage("B", "2.0");
localRepository.Add(A10);
localRepository.Add(B15);
sourceRepository.AddPackage(A10);
sourceRepository.AddPackage(A20);
sourceRepository.AddPackage(B15);
sourceRepository.AddPackage(B20);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
constraintProvider.Object,
null,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(A20), "Unable to resolve dependency 'B (\u2265 2.0)'.'B' has an additional constraint (= 1.4) defined in foo.");
}
[Fact]
public void ResolveDependencyForInstallPackageWithDependencyThatDoesntMeetMinimumVersionThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.5")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.4");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Unable to resolve dependency 'B (\u2265 1.5)'.");
}
[Fact]
public void ResolveDependencyForInstallPackageWithDependencyThatDoesntMeetExactVersionThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.5]")
});
sourceRepository.AddPackage(packageA);
IPackage packageB = PackageUtility.CreatePackage("B", "1.4");
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Unable to resolve dependency 'B (= 1.5)'.");
}
[Fact]
public void ResolveOperationsForInstallSameDependencyAtDifferentLevelsInGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A1 -> B1, C1
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("C", "1.0")
});
// B1
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
// C1 -> B1, D1
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("D", "1.0")
});
// D1 -> B1
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
var packages = resolver.ResolveOperations(packageA).ToList();
Assert.Equal(4, packages.Count);
Assert.Equal("B", packages[0].Package.Id);
Assert.Equal("D", packages[1].Package.Id);
Assert.Equal("C", packages[2].Package.Id);
Assert.Equal("A", packages[3].Package.Id);
}
[Fact]
public void ResolveDependenciesForInstallSameDependencyAtDifferentLevelsInGraphDuringUpdate()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A1 -> B1, C1
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
content: new[] { "A1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("C", "1.0")
});
// B1
IPackage packageB = PackageUtility.CreatePackage("B", "1.0", new[] { "B1" });
// C1 -> B1, D1
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
content: new[] { "C1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("D", "1.0")
});
// D1 -> B1
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
content: new[] { "A1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0")
});
// A2 -> B2, C2
IPackage packageA2 = PackageUtility.CreatePackage("A", "2.0",
content: new[] { "A2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0"),
PackageDependency.CreateDependency("C", "2.0")
});
// B2
IPackage packageB2 = PackageUtility.CreatePackage("B", "2.0", new[] { "B2" });
// C2 -> B2, D2
IPackage packageC2 = PackageUtility.CreatePackage("C", "2.0",
content: new[] { "C2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0"),
PackageDependency.CreateDependency("D", "2.0")
});
// D2 -> B2
IPackage packageD2 = PackageUtility.CreatePackage("D", "2.0",
content: new[] { "D2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
sourceRepository.AddPackage(packageA2);
sourceRepository.AddPackage(packageB2);
sourceRepository.AddPackage(packageC2);
sourceRepository.AddPackage(packageD2);
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false);
var operations = resolver.ResolveOperations(packageA2).ToList();
Assert.Equal(8, operations.Count);
AssertOperation("A", "1.0", PackageAction.Uninstall, operations[0]);
AssertOperation("C", "1.0", PackageAction.Uninstall, operations[1]);
AssertOperation("D", "1.0", PackageAction.Uninstall, operations[2]);
AssertOperation("B", "1.0", PackageAction.Uninstall, operations[3]);
AssertOperation("B", "2.0", PackageAction.Install, operations[4]);
AssertOperation("D", "2.0", PackageAction.Install, operations[5]);
AssertOperation("C", "2.0", PackageAction.Install, operations[6]);
AssertOperation("A", "2.0", PackageAction.Install, operations[7]);
}
[Fact]
public void ResolveDependenciesForInstallPackageWithDependencyReturnsPackageAndDependency()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(2, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: false,
forceRemove: false);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageB), "Unable to uninstall 'B 1.0' because 'A 1.0' depends on it.");
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentAndRemoveDependenciesThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageB), "Unable to uninstall 'B 1.0' because 'A 1.0' depends on it.");
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentAndForceReturnsPackage()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: false,
forceRemove: true);
// Act
var packages = resolver.ResolveOperations(packageB)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(1, packages.Count);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithRemoveDependenciesExcludesDependencyIfDependencyInUse()
{
// Arrange
var localRepository = new MockPackageRepository();
// A 1.0 -> [B, C]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
IPackage packageC = PackageUtility.CreatePackage("C", "1.0");
// D -> [C]
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("C"),
});
localRepository.AddPackage(packageD);
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(2, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithRemoveDependenciesSetAndForceReturnsAllDependencies()
{
// Arrange
var localRepository = new MockPackageRepository();
// A 1.0 -> [B, C]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
IPackage packageC = PackageUtility.CreatePackage("C", "1.0");
// D -> [C]
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("C"),
});
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: true);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
Assert.NotNull(packages["C"]);
}
[Fact]
public void ProjectInstallWalkerIgnoresSolutionLevelPackages()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage projectPackage = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.5]")
},
content: new[] { "content" });
sourceRepository.AddPackage(projectPackage);
IPackage toolsPackage = PackageUtility.CreatePackage("B", "1.5",
content: Enumerable.Empty<string>(),
tools: new[] { "init.ps1" });
sourceRepository.AddPackage(toolsPackage);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false) { AcceptedTargets = PackageTargets.Project };
// Act
var packages = resolver.ResolveOperations(projectPackage)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(1, packages.Count);
Assert.NotNull(packages["A"]);
}
[Fact]
public void AfterPackageWalkMetaPackageIsClassifiedTheSameAsDependencies()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage metaPackage = PackageUtility.CreatePackage(
"A", "1.0",
content: Enumerable.Empty<string>(),
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
},
createRealStream: false);
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
IPackage projectPackageB = PackageUtility.CreatePackage("C", "1.0", content: new[] { "contentC" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(projectPackageB);
Assert.Equal(PackageTargets.None, walker.GetPackageInfo(metaPackage).Target);
// Act
walker.Walk(metaPackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(metaPackage).Target);
}
[Fact]
public void LocalizedIntelliSenseFileCountsAsProjectTarget()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage runtimePackage = PackageUtility.CreatePackage("A", "1.0",
assemblyReferences: new[] { @"lib\A.dll", @"lib\A.xml" });
IPackage satellitePackage = PackageUtility.CreatePackage("A.fr-fr", "1.0",
dependencies: new[] { new PackageDependency("A") },
satelliteAssemblies: new[] { @"lib\fr-fr\A.xml" },
language: "fr-fr");
mockRepository.AddPackage(runtimePackage);
mockRepository.AddPackage(satellitePackage);
// Act
walker.Walk(satellitePackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(satellitePackage).Target);
}
[Fact]
public void AfterPackageWalkSatellitePackageIsClassifiedTheSameAsDependencies()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage runtimePackage = PackageUtility.CreatePackage("A", "1.0",
assemblyReferences: new[] { @"lib\A.dll" });
IPackage satellitePackage = PackageUtility.CreatePackage("A.fr-fr", "1.0",
dependencies: new[] { new PackageDependency("A") },
satelliteAssemblies: new[] { @"lib\fr-fr\A.resources.dll" },
language: "fr-fr");
mockRepository.AddPackage(runtimePackage);
mockRepository.AddPackage(satellitePackage);
// Act
walker.Walk(satellitePackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(satellitePackage).Target);
}
[Fact]
public void MetaPackageWithMixedTargetsThrows()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage metaPackage = PackageUtility.CreatePackage("A", "1.0",
content: Enumerable.Empty<string>(),
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
},
createRealStream: false);
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
IPackage solutionPackage = PackageUtility.CreatePackage("C", "1.0", content: Enumerable.Empty<string>(), tools: new[] { "tools" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(solutionPackage);
// Act && Assert
ExceptionAssert.Throws<InvalidOperationException>(() => walker.Walk(metaPackage), "Child dependencies of dependency only packages cannot mix external and project packages.");
}
[Fact]
public void ExternalPackagesThatDepdendOnProjectLevelPackagesThrows()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage solutionPackage = PackageUtility.CreatePackage(
"A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
},
content: Enumerable.Empty<string>(),
tools: new[] { "install.ps1" });
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(solutionPackage);
// Act && Assert
ExceptionAssert.Throws<InvalidOperationException>(() => walker.Walk(solutionPackage), "External packages cannot depend on packages that target projects.");
}
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorVersionForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(
new MockPackageRepository(),
repository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.HighestPatch);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(4, packages.Count);
Assert.Equal("D", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), packages[0].Package.Version);
Assert.Equal("C", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.1.3"), packages[1].Package.Version);
Assert.Equal("B", packages[2].Package.Id);
Assert.Equal(new SemanticVersion("1.0.9"), packages[2].Package.Version);
Assert.Equal("A", packages[3].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[3].Package.Version);
}
// Tests that when DependencyVersion is lowest, the dependency with the lowest major minor and patch version
// is picked.
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorAndPatchVersionOfListedPackagesForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }, listed: false),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }, listed: false),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
repository,
constraintProvider: null,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(2, packages.Count);
Assert.Equal("B", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("1.0.1"), packages[0].Package.Version);
Assert.Equal("A", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[1].Package.Version);
}
// Tests that when DependencyVersion is HighestPatch, the dependency with the lowest major minor and highest patch version
// is picked.
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorHighestPatchVersionOfListedPackagesForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }, listed: false),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }, listed: false),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
repository,
constraintProvider: null,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.HighestPatch);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(4, packages.Count);
Assert.Equal("D", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), packages[0].Package.Version);
Assert.Equal("C", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.1.3"), packages[1].Package.Version);
Assert.Equal("B", packages[2].Package.Id);
Assert.Equal(new SemanticVersion("1.0.9"), packages[2].Package.Version);
Assert.Equal("A", packages[3].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[3].Package.Version);
}
[Fact]
public void ResolveOperationsForPackagesWherePackagesOrderIsDifferentFromItsDependencyOrder()
{
// Arrange
// A 1.0 -> B 1.0 to 1.5
// A 2.0 -> B 1.8
// B 1.0
// B 2.0
// C 1.0
// C 2.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "[1.0, 1.5]") });
var A20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.8") });
var B10 = PackageUtility.CreatePackage("B", "1.0");
var B20 = PackageUtility.CreatePackage("B", "2.0");
var C10 = PackageUtility.CreatePackage("C", "1.0");
var C20 = PackageUtility.CreatePackage("C", "2.0");
var sourceRepository = new MockPackageRepository() {
A10,
A20,
B10,
B20,
C10,
C20,
};
var localRepository = new MockPackageRepository() {
A10,
B10,
C10
};
var resolver = new InstallWalker(localRepository,
sourceRepository,
constraintProvider: NullConstraintProvider.Instance,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
var updatePackages = new List<IPackage> { A20, B20, C20 };
IList<IPackage> allUpdatePackagesByDependencyOrder;
// Act
var operations = resolver.ResolveOperations(updatePackages, out allUpdatePackagesByDependencyOrder);
// Assert
Assert.True(operations.Count == 3);
Assert.True(operations[0].Package == B20 && operations[0].Action == PackageAction.Install);
Assert.True(operations[1].Package == A20 && operations[1].Action == PackageAction.Install);
Assert.True(operations[2].Package == C20 && operations[2].Action == PackageAction.Install);
Assert.True(allUpdatePackagesByDependencyOrder[0] == B20);
Assert.True(allUpdatePackagesByDependencyOrder[1] == A20);
Assert.True(allUpdatePackagesByDependencyOrder[2] == C20);
}
private void AssertOperation(string expectedId, string expectedVersion, PackageAction expectedAction, PackageOperation operation)
{
Assert.Equal(expectedAction, operation.Action);
Assert.Equal(expectedId, operation.Package.Id);
Assert.Equal(new SemanticVersion(expectedVersion), operation.Package.Version);
}
private class TestWalker : PackageWalker
{
private readonly IPackageRepository _repository;
public TestWalker(IPackageRepository repository)
{
_repository = repository;
}
protected override IPackage ResolveDependency(PackageDependency dependency)
{
return PackageRepositoryExtensions.ResolveDependency(_repository, dependency, AllowPrereleaseVersions, false);
}
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.IO;
using Newtonsoft.Json.Utilities;
using System.Globalization;
#if NETFX_CORE
using IConvertible = Newtonsoft.Json.Utilities.Convertible;
#endif
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
namespace Newtonsoft.Json
{
/// <summary>
/// Represents a writer that provides a fast, non-cached, forward-only way of generating Json data.
/// </summary>
public abstract class JsonWriter : IDisposable
{
internal enum State
{
Start,
Property,
ObjectStart,
Object,
ArrayStart,
Array,
ConstructorStart,
Constructor,
Bytes,
Closed,
Error
}
// array that gives a new state based on the current state an the token being written
private static readonly State[][] StateArray;
internal static readonly State[][] StateArrayTempate = new[] {
// Start PropertyName ObjectStart Object ArrayStart Array ConstructorStart Constructor Closed Error
//
/* None */new[]{ State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error },
/* StartObject */new[]{ State.ObjectStart, State.ObjectStart, State.Error, State.Error, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.Error, State.Error },
/* StartArray */new[]{ State.ArrayStart, State.ArrayStart, State.Error, State.Error, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.Error, State.Error },
/* StartConstructor */new[]{ State.ConstructorStart, State.ConstructorStart, State.Error, State.Error, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.Error, State.Error },
/* StartProperty */new[]{ State.Property, State.Error, State.Property, State.Property, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error },
/* Comment */new[]{ State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error },
/* Raw */new[]{ State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error },
/* Value (this will be copied) */new[]{ State.Start, State.Object, State.Error, State.Error, State.Array, State.Array, State.Constructor, State.Constructor, State.Error, State.Error }
};
internal static State[][] BuildStateArray()
{
var allStates = StateArrayTempate.ToList();
var errorStates = StateArrayTempate[0];
var valueStates = StateArrayTempate[7];
foreach (JsonToken valueToken in EnumUtils.GetValues(typeof(JsonToken)))
{
if (allStates.Count <= (int)valueToken)
{
switch (valueToken)
{
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.String:
case JsonToken.Boolean:
case JsonToken.Null:
case JsonToken.Undefined:
case JsonToken.Date:
case JsonToken.Bytes:
allStates.Add(valueStates);
break;
default:
allStates.Add(errorStates);
break;
}
}
}
return allStates.ToArray();
}
static JsonWriter()
{
StateArray = BuildStateArray();
}
private readonly List<JsonPosition> _stack;
private JsonPosition _currentPosition;
private State _currentState;
private Formatting _formatting;
/// <summary>
/// Gets or sets a value indicating whether the underlying stream or
/// <see cref="TextReader"/> should be closed when the writer is closed.
/// </summary>
/// <value>
/// true to close the underlying stream or <see cref="TextReader"/> when
/// the writer is closed; otherwise false. The default is true.
/// </value>
public bool CloseOutput { get; set; }
/// <summary>
/// Gets the top.
/// </summary>
/// <value>The top.</value>
protected internal int Top
{
get
{
int depth = _stack.Count;
if (Peek() != JsonContainerType.None)
depth++;
return depth;
}
}
internal string ContainerPath
{
get
{
if (_currentPosition.Type == JsonContainerType.None)
return string.Empty;
IEnumerable<JsonPosition> positions = (_currentPosition.InsideContainer())
? _stack
: _stack.Concat(new[] { _currentPosition });
return JsonPosition.BuildPath(positions);
}
}
/// <summary>
/// Gets the state of the writer.
/// </summary>
public WriteState WriteState
{
get
{
switch (_currentState)
{
case State.Error:
return WriteState.Error;
case State.Closed:
return WriteState.Closed;
case State.Object:
case State.ObjectStart:
return WriteState.Object;
case State.Array:
case State.ArrayStart:
return WriteState.Array;
case State.Constructor:
case State.ConstructorStart:
return WriteState.Constructor;
case State.Property:
return WriteState.Property;
case State.Start:
return WriteState.Start;
default:
throw JsonWriterException.Create(this, "Invalid state: " + _currentState, null);
}
}
}
/// <summary>
/// Gets the path of the writer.
/// </summary>
public string Path
{
get
{
if (_currentPosition.Type == JsonContainerType.None)
return string.Empty;
return JsonPosition.BuildPath(_stack.Concat(new[] { _currentPosition }));
}
}
private DateFormatHandling _dateFormatHandling;
private DateTimeZoneHandling _dateTimeZoneHandling;
/// <summary>
/// Indicates how JSON text output is formatted.
/// </summary>
public Formatting Formatting
{
get { return _formatting; }
set { _formatting = value; }
}
/// <summary>
/// Get or set how dates are written to JSON text.
/// </summary>
public DateFormatHandling DateFormatHandling
{
get { return _dateFormatHandling; }
set { _dateFormatHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> time zones are handling when writing JSON.
/// </summary>
public DateTimeZoneHandling DateTimeZoneHandling
{
get { return _dateTimeZoneHandling; }
set { _dateTimeZoneHandling = value; }
}
/// <summary>
/// Creates an instance of the <c>JsonWriter</c> class.
/// </summary>
protected JsonWriter()
{
_stack = new List<JsonPosition>(4);
_currentState = State.Start;
_formatting = Formatting.None;
_dateTimeZoneHandling = DateTimeZoneHandling.RoundtripKind;
CloseOutput = true;
}
private void UpdateScopeWithFinishedValue()
{
if (_currentPosition.Type == JsonContainerType.Array
|| _currentPosition.Type == JsonContainerType.Constructor)
{
if (_currentPosition.Position == null)
_currentPosition.Position = 0;
else
_currentPosition.Position++;
}
}
private void Push(JsonContainerType value)
{
UpdateScopeWithFinishedValue();
if (_currentPosition.Type == JsonContainerType.None)
{
_currentPosition.Type = value;
}
else
{
_stack.Add(_currentPosition);
var state = new JsonPosition
{
Type = value
};
_currentPosition = state;
}
}
private JsonContainerType Pop()
{
JsonPosition oldPosition;
if (_stack.Count > 0)
{
oldPosition = _currentPosition;
_currentPosition = _stack[_stack.Count - 1];
_stack.RemoveAt(_stack.Count - 1);
}
else
{
oldPosition = _currentPosition;
_currentPosition = new JsonPosition();
}
return oldPosition.Type;
}
private JsonContainerType Peek()
{
return _currentPosition.Type;
}
/// <summary>
/// Flushes whatever is in the buffer to the underlying streams and also flushes the underlying stream.
/// </summary>
public abstract void Flush();
/// <summary>
/// Closes this stream and the underlying stream.
/// </summary>
public virtual void Close()
{
AutoCompleteAll();
}
/// <summary>
/// Writes the beginning of a Json object.
/// </summary>
public virtual void WriteStartObject()
{
AutoComplete(JsonToken.StartObject);
Push(JsonContainerType.Object);
}
/// <summary>
/// Writes the end of a Json object.
/// </summary>
public virtual void WriteEndObject()
{
AutoCompleteClose(JsonToken.EndObject);
}
/// <summary>
/// Writes the beginning of a Json array.
/// </summary>
public virtual void WriteStartArray()
{
AutoComplete(JsonToken.StartArray);
Push(JsonContainerType.Array);
}
/// <summary>
/// Writes the end of an array.
/// </summary>
public virtual void WriteEndArray()
{
AutoCompleteClose(JsonToken.EndArray);
}
/// <summary>
/// Writes the start of a constructor with the given name.
/// </summary>
/// <param name="name">The name of the constructor.</param>
public virtual void WriteStartConstructor(string name)
{
AutoComplete(JsonToken.StartConstructor);
Push(JsonContainerType.Constructor);
}
/// <summary>
/// Writes the end constructor.
/// </summary>
public virtual void WriteEndConstructor()
{
AutoCompleteClose(JsonToken.EndConstructor);
}
/// <summary>
/// Writes the property name of a name/value pair on a Json object.
/// </summary>
/// <param name="name">The name of the property.</param>
public virtual void WritePropertyName(string name)
{
_currentPosition.PropertyName = name;
AutoComplete(JsonToken.PropertyName);
}
/// <summary>
/// Writes the end of the current Json object or array.
/// </summary>
public virtual void WriteEnd()
{
WriteEnd(Peek());
}
/// <summary>
/// Writes the current <see cref="JsonReader"/> token.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> to read the token from.</param>
public void WriteToken(JsonReader reader)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
int initialDepth;
if (reader.TokenType == JsonToken.None)
initialDepth = -1;
else if (!IsStartToken(reader.TokenType))
initialDepth = reader.Depth + 1;
else
initialDepth = reader.Depth;
WriteToken(reader, initialDepth);
}
internal void WriteToken(JsonReader reader, int initialDepth)
{
do
{
switch (reader.TokenType)
{
case JsonToken.None:
// read to next
break;
case JsonToken.StartObject:
WriteStartObject();
break;
case JsonToken.StartArray:
WriteStartArray();
break;
case JsonToken.StartConstructor:
string constructorName = reader.Value.ToString();
// write a JValue date when the constructor is for a date
if (string.Equals(constructorName, "Date", StringComparison.Ordinal))
WriteConstructorDate(reader);
else
WriteStartConstructor(reader.Value.ToString());
break;
case JsonToken.PropertyName:
WritePropertyName(reader.Value.ToString());
break;
case JsonToken.Comment:
WriteComment(reader.Value.ToString());
break;
case JsonToken.Integer:
WriteValue(Convert.ToInt64(reader.Value, CultureInfo.InvariantCulture));
break;
case JsonToken.Float:
WriteValue(Convert.ToDouble(reader.Value, CultureInfo.InvariantCulture));
break;
case JsonToken.String:
WriteValue(reader.Value.ToString());
break;
case JsonToken.Boolean:
WriteValue(Convert.ToBoolean(reader.Value, CultureInfo.InvariantCulture));
break;
case JsonToken.Null:
WriteNull();
break;
case JsonToken.Undefined:
WriteUndefined();
break;
case JsonToken.EndObject:
WriteEndObject();
break;
case JsonToken.EndArray:
WriteEndArray();
break;
case JsonToken.EndConstructor:
WriteEndConstructor();
break;
case JsonToken.Date:
WriteValue((DateTime)reader.Value);
break;
case JsonToken.Raw:
WriteRawValue((string)reader.Value);
break;
case JsonToken.Bytes:
WriteValue((byte[])reader.Value);
break;
default:
throw MiscellaneousUtils.CreateArgumentOutOfRangeException("TokenType", reader.TokenType, "Unexpected token type.");
}
}
while (
// stop if we have reached the end of the token being read
initialDepth - 1 < reader.Depth - (IsEndToken(reader.TokenType) ? 1 : 0)
&& reader.Read());
}
private void WriteConstructorDate(JsonReader reader)
{
if (!reader.Read())
throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null);
if (reader.TokenType != JsonToken.Integer)
throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected Integer, got " + reader.TokenType, null);
long ticks = (long)reader.Value;
DateTime date = JsonConvert.ConvertJavaScriptTicksToDateTime(ticks);
if (!reader.Read())
throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null);
if (reader.TokenType != JsonToken.EndConstructor)
throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected EndConstructor, got " + reader.TokenType, null);
WriteValue(date);
}
private bool IsEndToken(JsonToken token)
{
switch (token)
{
case JsonToken.EndObject:
case JsonToken.EndArray:
case JsonToken.EndConstructor:
return true;
default:
return false;
}
}
private bool IsStartToken(JsonToken token)
{
switch (token)
{
case JsonToken.StartObject:
case JsonToken.StartArray:
case JsonToken.StartConstructor:
return true;
default:
return false;
}
}
private void WriteEnd(JsonContainerType type)
{
switch (type)
{
case JsonContainerType.Object:
WriteEndObject();
break;
case JsonContainerType.Array:
WriteEndArray();
break;
case JsonContainerType.Constructor:
WriteEndConstructor();
break;
default:
throw JsonWriterException.Create(this, "Unexpected type when writing end: " + type, null);
}
}
private void AutoCompleteAll()
{
while (Top > 0)
{
WriteEnd();
}
}
private JsonContainerType GetTypeForCloseToken(JsonToken token)
{
switch (token)
{
case JsonToken.EndObject:
return JsonContainerType.Object;
case JsonToken.EndArray:
return JsonContainerType.Array;
case JsonToken.EndConstructor:
return JsonContainerType.Constructor;
default:
throw JsonWriterException.Create(this, "No type for token: " + token, null);
}
}
private JsonToken GetCloseTokenForType(JsonContainerType type)
{
switch (type)
{
case JsonContainerType.Object:
return JsonToken.EndObject;
case JsonContainerType.Array:
return JsonToken.EndArray;
case JsonContainerType.Constructor:
return JsonToken.EndConstructor;
default:
throw JsonWriterException.Create(this, "No close token for type: " + type, null);
}
}
private void AutoCompleteClose(JsonToken tokenBeingClosed)
{
// write closing symbol and calculate new state
int levelsToComplete = 0;
JsonContainerType type = GetTypeForCloseToken(tokenBeingClosed);
if (_currentPosition.Type == type)
{
levelsToComplete = 1;
}
else
{
int top = Top - 2;
for (int i = top; i >= 0; i--)
{
int currentLevel = top - i;
if (_stack[currentLevel].Type == type)
{
levelsToComplete = i + 2;
break;
}
}
}
if (levelsToComplete == 0)
throw JsonWriterException.Create(this, "No token to close.", null);
for (int i = 0; i < levelsToComplete; i++)
{
JsonToken token = GetCloseTokenForType(Pop());
if (_formatting == Formatting.Indented)
{
if (_currentState != State.ObjectStart && _currentState != State.ArrayStart)
WriteIndent();
}
WriteEnd(token);
JsonContainerType currentLevelType = Peek();
switch (currentLevelType)
{
case JsonContainerType.Object:
_currentState = State.Object;
break;
case JsonContainerType.Array:
_currentState = State.Array;
break;
case JsonContainerType.Constructor:
_currentState = State.Array;
break;
case JsonContainerType.None:
_currentState = State.Start;
break;
default:
throw JsonWriterException.Create(this, "Unknown JsonType: " + currentLevelType, null);
}
}
}
/// <summary>
/// Writes the specified end token.
/// </summary>
/// <param name="token">The end token to write.</param>
protected virtual void WriteEnd(JsonToken token)
{
}
/// <summary>
/// Writes indent characters.
/// </summary>
protected virtual void WriteIndent()
{
}
/// <summary>
/// Writes the JSON value delimiter.
/// </summary>
protected virtual void WriteValueDelimiter()
{
}
/// <summary>
/// Writes an indent space.
/// </summary>
protected virtual void WriteIndentSpace()
{
}
internal void AutoComplete(JsonToken tokenBeingWritten)
{
if (tokenBeingWritten != JsonToken.StartObject
&& tokenBeingWritten != JsonToken.StartArray
&& tokenBeingWritten != JsonToken.StartConstructor)
UpdateScopeWithFinishedValue();
// gets new state based on the current state and what is being written
State newState = StateArray[(int)tokenBeingWritten][(int)_currentState];
if (newState == State.Error)
throw JsonWriterException.Create(this, "Token {0} in state {1} would result in an invalid JSON object.".FormatWith(CultureInfo.InvariantCulture, tokenBeingWritten.ToString(), _currentState.ToString()), null);
if ((_currentState == State.Object || _currentState == State.Array || _currentState == State.Constructor) && tokenBeingWritten != JsonToken.Comment)
{
WriteValueDelimiter();
}
else if (_currentState == State.Property)
{
if (_formatting == Formatting.Indented)
WriteIndentSpace();
}
if (_formatting == Formatting.Indented)
{
WriteState writeState = WriteState;
// don't indent a property when it is the first token to be written (i.e. at the start)
if ((tokenBeingWritten == JsonToken.PropertyName && writeState != WriteState.Start) ||
writeState == WriteState.Array || writeState == WriteState.Constructor)
{
WriteIndent();
}
}
_currentState = newState;
}
#region WriteValue methods
/// <summary>
/// Writes a null value.
/// </summary>
public virtual void WriteNull()
{
AutoComplete(JsonToken.Null);
}
/// <summary>
/// Writes an undefined value.
/// </summary>
public virtual void WriteUndefined()
{
AutoComplete(JsonToken.Undefined);
}
/// <summary>
/// Writes raw JSON without changing the writer's state.
/// </summary>
/// <param name="json">The raw JSON to write.</param>
public virtual void WriteRaw(string json)
{
}
/// <summary>
/// Writes raw JSON where a value is expected and updates the writer's state.
/// </summary>
/// <param name="json">The raw JSON to write.</param>
public virtual void WriteRawValue(string json)
{
// hack. want writer to change state as if a value had been written
AutoComplete(JsonToken.Undefined);
WriteRaw(json);
}
/// <summary>
/// Writes a <see cref="String"/> value.
/// </summary>
/// <param name="value">The <see cref="String"/> value to write.</param>
public virtual void WriteValue(string value)
{
AutoComplete(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Int32"/> value.
/// </summary>
/// <param name="value">The <see cref="Int32"/> value to write.</param>
public virtual void WriteValue(int value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt32"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt32"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(uint value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Int64"/> value.
/// </summary>
/// <param name="value">The <see cref="Int64"/> value to write.</param>
public virtual void WriteValue(long value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt64"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt64"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ulong value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Single"/> value.
/// </summary>
/// <param name="value">The <see cref="Single"/> value to write.</param>
public virtual void WriteValue(float value)
{
AutoComplete(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="Double"/> value.
/// </summary>
/// <param name="value">The <see cref="Double"/> value to write.</param>
public virtual void WriteValue(double value)
{
AutoComplete(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="Boolean"/> value.
/// </summary>
/// <param name="value">The <see cref="Boolean"/> value to write.</param>
public virtual void WriteValue(bool value)
{
AutoComplete(JsonToken.Boolean);
}
/// <summary>
/// Writes a <see cref="Int16"/> value.
/// </summary>
/// <param name="value">The <see cref="Int16"/> value to write.</param>
public virtual void WriteValue(short value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt16"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt16"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ushort value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Char"/> value.
/// </summary>
/// <param name="value">The <see cref="Char"/> value to write.</param>
public virtual void WriteValue(char value)
{
AutoComplete(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Byte"/> value.
/// </summary>
/// <param name="value">The <see cref="Byte"/> value to write.</param>
public virtual void WriteValue(byte value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="SByte"/> value.
/// </summary>
/// <param name="value">The <see cref="SByte"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(sbyte value)
{
AutoComplete(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Decimal"/> value.
/// </summary>
/// <param name="value">The <see cref="Decimal"/> value to write.</param>
public virtual void WriteValue(decimal value)
{
AutoComplete(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="DateTime"/> value.
/// </summary>
/// <param name="value">The <see cref="DateTime"/> value to write.</param>
public virtual void WriteValue(DateTime value)
{
AutoComplete(JsonToken.Date);
}
#if !PocketPC && !NET20
/// <summary>
/// Writes a <see cref="DateTimeOffset"/> value.
/// </summary>
/// <param name="value">The <see cref="DateTimeOffset"/> value to write.</param>
public virtual void WriteValue(DateTimeOffset value)
{
AutoComplete(JsonToken.Date);
}
#endif
/// <summary>
/// Writes a <see cref="Guid"/> value.
/// </summary>
/// <param name="value">The <see cref="Guid"/> value to write.</param>
public virtual void WriteValue(Guid value)
{
AutoComplete(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="TimeSpan"/> value.
/// </summary>
/// <param name="value">The <see cref="TimeSpan"/> value to write.</param>
public virtual void WriteValue(TimeSpan value)
{
AutoComplete(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Nullable{Int32}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int32}"/> value to write.</param>
public virtual void WriteValue(int? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt32}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt32}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(uint? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Int64}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int64}"/> value to write.</param>
public virtual void WriteValue(long? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt64}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt64}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ulong? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Single}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Single}"/> value to write.</param>
public virtual void WriteValue(float? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Double}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Double}"/> value to write.</param>
public virtual void WriteValue(double? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Boolean}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Boolean}"/> value to write.</param>
public virtual void WriteValue(bool? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Int16}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int16}"/> value to write.</param>
public virtual void WriteValue(short? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt16}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt16}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ushort? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Char}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Char}"/> value to write.</param>
public virtual void WriteValue(char? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Byte}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Byte}"/> value to write.</param>
public virtual void WriteValue(byte? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{SByte}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{SByte}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(sbyte? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Decimal}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Decimal}"/> value to write.</param>
public virtual void WriteValue(decimal? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{DateTime}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{DateTime}"/> value to write.</param>
public virtual void WriteValue(DateTime? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
#if !PocketPC && !NET20
/// <summary>
/// Writes a <see cref="Nullable{DateTimeOffset}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{DateTimeOffset}"/> value to write.</param>
public virtual void WriteValue(DateTimeOffset? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
#endif
/// <summary>
/// Writes a <see cref="Nullable{Guid}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Guid}"/> value to write.</param>
public virtual void WriteValue(Guid? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{TimeSpan}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{TimeSpan}"/> value to write.</param>
public virtual void WriteValue(TimeSpan? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="T:Byte[]"/> value.
/// </summary>
/// <param name="value">The <see cref="T:Byte[]"/> value to write.</param>
public virtual void WriteValue(byte[] value)
{
if (value == null)
WriteNull();
else
AutoComplete(JsonToken.Bytes);
}
/// <summary>
/// Writes a <see cref="Uri"/> value.
/// </summary>
/// <param name="value">The <see cref="Uri"/> value to write.</param>
public virtual void WriteValue(Uri value)
{
if (value == null)
WriteNull();
else
AutoComplete(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Object"/> value.
/// An error will raised if the value cannot be written as a single JSON token.
/// </summary>
/// <param name="value">The <see cref="Object"/> value to write.</param>
public virtual void WriteValue(object value)
{
if (value == null)
{
WriteNull();
return;
}
else if (ConvertUtils.IsConvertible(value))
{
IConvertible convertible = ConvertUtils.ToConvertible(value);
switch (convertible.GetTypeCode())
{
case TypeCode.String:
WriteValue(convertible.ToString(CultureInfo.InvariantCulture));
return;
case TypeCode.Char:
WriteValue(convertible.ToChar(CultureInfo.InvariantCulture));
return;
case TypeCode.Boolean:
WriteValue(convertible.ToBoolean(CultureInfo.InvariantCulture));
return;
case TypeCode.SByte:
WriteValue(convertible.ToSByte(CultureInfo.InvariantCulture));
return;
case TypeCode.Int16:
WriteValue(convertible.ToInt16(CultureInfo.InvariantCulture));
return;
case TypeCode.UInt16:
WriteValue(convertible.ToUInt16(CultureInfo.InvariantCulture));
return;
case TypeCode.Int32:
WriteValue(convertible.ToInt32(CultureInfo.InvariantCulture));
return;
case TypeCode.Byte:
WriteValue(convertible.ToByte(CultureInfo.InvariantCulture));
return;
case TypeCode.UInt32:
WriteValue(convertible.ToUInt32(CultureInfo.InvariantCulture));
return;
case TypeCode.Int64:
WriteValue(convertible.ToInt64(CultureInfo.InvariantCulture));
return;
case TypeCode.UInt64:
WriteValue(convertible.ToUInt64(CultureInfo.InvariantCulture));
return;
case TypeCode.Single:
WriteValue(convertible.ToSingle(CultureInfo.InvariantCulture));
return;
case TypeCode.Double:
WriteValue(convertible.ToDouble(CultureInfo.InvariantCulture));
return;
case TypeCode.DateTime:
WriteValue(convertible.ToDateTime(CultureInfo.InvariantCulture));
return;
case TypeCode.Decimal:
WriteValue(convertible.ToDecimal(CultureInfo.InvariantCulture));
return;
#if !(NETFX_CORE || PORTABLE)
case TypeCode.DBNull:
WriteNull();
return;
#endif
}
}
#if !PocketPC && !NET20
else if (value is DateTimeOffset)
{
WriteValue((DateTimeOffset)value);
return;
}
#endif
else if (value is byte[])
{
WriteValue((byte[])value);
return;
}
else if (value is Guid)
{
WriteValue((Guid)value);
return;
}
else if (value is Uri)
{
WriteValue((Uri)value);
return;
}
else if (value is TimeSpan)
{
WriteValue((TimeSpan)value);
return;
}
throw JsonWriterException.Create(this, "Unsupported type: {0}. Use the JsonSerializer class to get the object's JSON representation.".FormatWith(CultureInfo.InvariantCulture, value.GetType()), null);
}
#endregion
/// <summary>
/// Writes out a comment <code>/*...*/</code> containing the specified text.
/// </summary>
/// <param name="text">Text to place inside the comment.</param>
public virtual void WriteComment(string text)
{
AutoComplete(JsonToken.Comment);
}
/// <summary>
/// Writes out the given white space.
/// </summary>
/// <param name="ws">The string of white space characters.</param>
public virtual void WriteWhitespace(string ws)
{
if (ws != null)
{
if (!StringUtils.IsWhiteSpace(ws))
throw JsonWriterException.Create(this, "Only white space characters should be used.", null);
}
}
void IDisposable.Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (_currentState != State.Closed)
Close();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
using System.Text;
using System;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Globalization;
namespace System.Globalization
{
internal static class TimeSpanFormat
{
private static String IntToString(int n, int digits)
{
return ParseNumbers.IntToString(n, 10, digits, '0', 0);
}
internal static readonly FormatLiterals PositiveInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(false /*isNegative*/);
internal static readonly FormatLiterals NegativeInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(true /*isNegative*/);
internal enum Pattern
{
None = 0,
Minimum = 1,
Full = 2,
}
//
// Format
//
// Actions: Main method called from TimeSpan.ToString
//
internal static String Format(TimeSpan value, String format, IFormatProvider formatProvider)
{
if (format == null || format.Length == 0)
format = "c";
// standard formats
if (format.Length == 1)
{
char f = format[0];
if (f == 'c' || f == 't' || f == 'T')
return FormatStandard(value, true, format, Pattern.Minimum);
if (f == 'g' || f == 'G')
{
Pattern pattern;
DateTimeFormatInfo dtfi = DateTimeFormatInfo.GetInstance(formatProvider);
if (value._ticks < 0)
format = dtfi.FullTimeSpanNegativePattern;
else
format = dtfi.FullTimeSpanPositivePattern;
if (f == 'g')
pattern = Pattern.Minimum;
else
pattern = Pattern.Full;
return FormatStandard(value, false, format, pattern);
}
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
}
return FormatCustomized(value, format, DateTimeFormatInfo.GetInstance(formatProvider));
}
//
// FormatStandard
//
// Actions: Format the TimeSpan instance using the specified format.
//
private static String FormatStandard(TimeSpan value, bool isInvariant, String format, Pattern pattern)
{
StringBuilder sb = StringBuilderCache.Acquire();
int day = (int)(value._ticks / TimeSpan.TicksPerDay);
long time = value._ticks % TimeSpan.TicksPerDay;
if (value._ticks < 0)
{
day = -day;
time = -time;
}
int hours = (int)(time / TimeSpan.TicksPerHour % 24);
int minutes = (int)(time / TimeSpan.TicksPerMinute % 60);
int seconds = (int)(time / TimeSpan.TicksPerSecond % 60);
int fraction = (int)(time % TimeSpan.TicksPerSecond);
FormatLiterals literal;
if (isInvariant)
{
if (value._ticks < 0)
literal = NegativeInvariantFormatLiterals;
else
literal = PositiveInvariantFormatLiterals;
}
else
{
literal = new FormatLiterals();
literal.Init(format, pattern == Pattern.Full);
}
if (fraction != 0)
{ // truncate the partial second to the specified length
fraction = (int)((long)fraction / (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - literal.ff));
}
// Pattern.Full: [-]dd.hh:mm:ss.fffffff
// Pattern.Minimum: [-][d.]hh:mm:ss[.fffffff]
sb.Append(literal.Start); // [-]
if (pattern == Pattern.Full || day != 0)
{ //
sb.Append(day); // [dd]
sb.Append(literal.DayHourSep); // [.]
} //
sb.Append(IntToString(hours, literal.hh)); // hh
sb.Append(literal.HourMinuteSep); // :
sb.Append(IntToString(minutes, literal.mm)); // mm
sb.Append(literal.MinuteSecondSep); // :
sb.Append(IntToString(seconds, literal.ss)); // ss
if (!isInvariant && pattern == Pattern.Minimum)
{
int effectiveDigits = literal.ff;
while (effectiveDigits > 0)
{
if (fraction % 10 == 0)
{
fraction = fraction / 10;
effectiveDigits--;
}
else
{
break;
}
}
if (effectiveDigits > 0)
{
sb.Append(literal.SecondFractionSep); // [.FFFFFFF]
sb.Append((fraction).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture));
}
}
else if (pattern == Pattern.Full || fraction != 0)
{
sb.Append(literal.SecondFractionSep); // [.]
sb.Append(IntToString(fraction, literal.ff)); // [fffffff]
} //
sb.Append(literal.End); //
return StringBuilderCache.GetStringAndRelease(sb);
}
//
// FormatCustomized
//
// Actions: Format the TimeSpan instance using the specified format.
//
internal static String FormatCustomized(TimeSpan value, String format, DateTimeFormatInfo dtfi)
{
Debug.Assert(dtfi != null, "dtfi == null");
int day = (int)(value._ticks / TimeSpan.TicksPerDay);
long time = value._ticks % TimeSpan.TicksPerDay;
if (value._ticks < 0)
{
day = -day;
time = -time;
}
int hours = (int)(time / TimeSpan.TicksPerHour % 24);
int minutes = (int)(time / TimeSpan.TicksPerMinute % 60);
int seconds = (int)(time / TimeSpan.TicksPerSecond % 60);
int fraction = (int)(time % TimeSpan.TicksPerSecond);
long tmp = 0;
int i = 0;
int tokenLen;
StringBuilder result = StringBuilderCache.Acquire();
while (i < format.Length)
{
char ch = format[i];
int nextChar;
switch (ch)
{
case 'h':
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > 2)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
DateTimeFormat.FormatDigits(result, hours, tokenLen);
break;
case 'm':
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > 2)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
DateTimeFormat.FormatDigits(result, minutes, tokenLen);
break;
case 's':
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > 2)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
DateTimeFormat.FormatDigits(result, seconds, tokenLen);
break;
case 'f':
//
// The fraction of a second in single-digit precision. The remaining digits are truncated.
//
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
tmp = (long)fraction;
tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen);
result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[tokenLen - 1], CultureInfo.InvariantCulture));
break;
case 'F':
//
// Displays the most significant digit of the seconds fraction. Nothing is displayed if the digit is zero.
//
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
tmp = (long)fraction;
tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen);
int effectiveDigits = tokenLen;
while (effectiveDigits > 0)
{
if (tmp % 10 == 0)
{
tmp = tmp / 10;
effectiveDigits--;
}
else
{
break;
}
}
if (effectiveDigits > 0)
{
result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture));
}
break;
case 'd':
//
// tokenLen == 1 : Day as digits with no leading zero.
// tokenLen == 2+: Day as digits with leading zero for single-digit days.
//
tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch);
if (tokenLen > 8)
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
DateTimeFormat.FormatDigits(result, day, tokenLen, true);
break;
case '\'':
case '\"':
tokenLen = DateTimeFormat.ParseQuoteString(format, i, result);
break;
case '%':
// Optional format character.
// For example, format string "%d" will print day
// Most of the cases, "%" can be ignored.
nextChar = DateTimeFormat.ParseNextChar(format, i);
// nextChar will be -1 if we already reach the end of the format string.
// Besides, we will not allow "%%" appear in the pattern.
if (nextChar >= 0 && nextChar != (int)'%')
{
result.Append(TimeSpanFormat.FormatCustomized(value, ((char)nextChar).ToString(), dtfi));
tokenLen = 2;
}
else
{
//
// This means that '%' is at the end of the format string or
// "%%" appears in the format string.
//
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
}
break;
case '\\':
// Escaped character. Can be used to insert character into the format string.
// For example, "\d" will insert the character 'd' into the string.
//
nextChar = DateTimeFormat.ParseNextChar(format, i);
if (nextChar >= 0)
{
result.Append(((char)nextChar));
tokenLen = 2;
}
else
{
//
// This means that '\' is at the end of the formatting string.
//
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
}
break;
default:
throw new FormatException(Environment.GetResourceString("Format_InvalidString"));
}
i += tokenLen;
}
return StringBuilderCache.GetStringAndRelease(result);
}
internal struct FormatLiterals
{
internal String Start
{
get
{
return literals[0];
}
}
internal String DayHourSep
{
get
{
return literals[1];
}
}
internal String HourMinuteSep
{
get
{
return literals[2];
}
}
internal String MinuteSecondSep
{
get
{
return literals[3];
}
}
internal String SecondFractionSep
{
get
{
return literals[4];
}
}
internal String End
{
get
{
return literals[5];
}
}
internal String AppCompatLiteral;
internal int dd;
internal int hh;
internal int mm;
internal int ss;
internal int ff;
private String[] literals;
/* factory method for static invariant FormatLiterals */
internal static FormatLiterals InitInvariant(bool isNegative)
{
FormatLiterals x = new FormatLiterals();
x.literals = new String[6];
x.literals[0] = isNegative ? "-" : String.Empty;
x.literals[1] = ".";
x.literals[2] = ":";
x.literals[3] = ":";
x.literals[4] = ".";
x.literals[5] = String.Empty;
x.AppCompatLiteral = ":."; // MinuteSecondSep+SecondFractionSep;
x.dd = 2;
x.hh = 2;
x.mm = 2;
x.ss = 2;
x.ff = DateTimeFormat.MaxSecondsFractionDigits;
return x;
}
// For the "v1" TimeSpan localized patterns, the data is simply literal field separators with
// the constants guaranteed to include DHMSF ordered greatest to least significant.
// Once the data becomes more complex than this we will need to write a proper tokenizer for
// parsing and formatting
internal void Init(String format, bool useInvariantFieldLengths)
{
literals = new String[6];
for (int i = 0; i < literals.Length; i++)
literals[i] = String.Empty;
dd = 0;
hh = 0;
mm = 0;
ss = 0;
ff = 0;
StringBuilder sb = StringBuilderCache.Acquire();
bool inQuote = false;
char quote = '\'';
int field = 0;
for (int i = 0; i < format.Length; i++)
{
switch (format[i])
{
case '\'':
case '\"':
if (inQuote && (quote == format[i]))
{
/* we were in a quote and found a matching exit quote, so we are outside a quote now */
Debug.Assert(field >= 0 && field <= 5, "field >= 0 && field <= 5");
if (field >= 0 && field <= 5)
{
literals[field] = sb.ToString();
sb.Length = 0;
inQuote = false;
}
else
{
return; // how did we get here?
}
}
else if (!inQuote)
{
/* we are at the start of a new quote block */
quote = format[i];
inQuote = true;
}
else
{
/* we were in a quote and saw the other type of quote character, so we are still in a quote */
}
break;
case '%':
Debug.Assert(false, "Unexpected special token '%', Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
goto default;
case '\\':
if (!inQuote)
{
i++; /* skip next character that is escaped by this backslash or percent sign */
break;
}
goto default;
case 'd':
if (!inQuote)
{
Debug.Assert((field == 0 && sb.Length == 0) || field == 1,
"field == 0 || field == 1, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
field = 1; // DayHourSep
dd++;
}
break;
case 'h':
if (!inQuote)
{
Debug.Assert((field == 1 && sb.Length == 0) || field == 2,
"field == 1 || field == 2, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
field = 2; // HourMinuteSep
hh++;
}
break;
case 'm':
if (!inQuote)
{
Debug.Assert((field == 2 && sb.Length == 0) || field == 3,
"field == 2 || field == 3, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
field = 3; // MinuteSecondSep
mm++;
}
break;
case 's':
if (!inQuote)
{
Debug.Assert((field == 3 && sb.Length == 0) || field == 4,
"field == 3 || field == 4, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
field = 4; // SecondFractionSep
ss++;
}
break;
case 'f':
case 'F':
if (!inQuote)
{
Debug.Assert((field == 4 && sb.Length == 0) || field == 5,
"field == 4 || field == 5, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
field = 5; // End
ff++;
}
break;
default:
sb.Append(format[i]);
break;
}
}
Debug.Assert(field == 5);
AppCompatLiteral = MinuteSecondSep + SecondFractionSep;
Debug.Assert(0 < dd && dd < 3, "0 < dd && dd < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
Debug.Assert(0 < hh && hh < 3, "0 < hh && hh < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
Debug.Assert(0 < mm && mm < 3, "0 < mm && mm < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
Debug.Assert(0 < ss && ss < 3, "0 < ss && ss < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
Debug.Assert(0 < ff && ff < 8, "0 < ff && ff < 8, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern");
if (useInvariantFieldLengths)
{
dd = 2;
hh = 2;
mm = 2;
ss = 2;
ff = DateTimeFormat.MaxSecondsFractionDigits;
}
else
{
if (dd < 1 || dd > 2) dd = 2; // The DTFI property has a problem. let's try to make the best of the situation.
if (hh < 1 || hh > 2) hh = 2;
if (mm < 1 || mm > 2) mm = 2;
if (ss < 1 || ss > 2) ss = 2;
if (ff < 1 || ff > 7) ff = 7;
}
StringBuilderCache.Release(sb);
}
} //end of struct FormatLiterals
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Amazon.AWSSupport;
using Amazon.AWSSupport.Model;
using Amazon;
using System.IO;
using System.Text;
using CommonTests.Framework;
using NUnit.Framework;
namespace CommonTests.IntegrationTests
{
[TestFixture]
public class AWSSupportTests : TestBase<AmazonAWSSupportClient>
{
private static String
SUBJECT = ".NET SDK Test Case " + DateTime.UtcNow.Ticks,
CATEGORY_CODE = "apis",
SERVICE_CODE = "amazon-dynamodb",
COMMUNICATION_BODY = "This is a test case generated by the .NET SDK integration test suite",
LANGUAGE = "ja",
SEVERITY_CODE = "low",
ATTACHMENT_CONTENTS = "This is test data";
protected override RegionEndpoint AlternateEndpoint
{
get
{
return RegionEndpoint.USEast1;
}
}
[OneTimeTearDown]
public void ClassCleanup()
{
BaseClean();
}
// Test are disabled because not all acounts are subscribed to AWS Support
//[Test]
public void TestCaseOperations()
{
string caseId = null;
try
{
caseId = Client.CreateCaseAsync(new CreateCaseRequest
{
Subject = SUBJECT,
CategoryCode = CATEGORY_CODE,
ServiceCode = SERVICE_CODE,
Language = LANGUAGE,
SeverityCode = SEVERITY_CODE,
CommunicationBody = COMMUNICATION_BODY
}).Result.CaseId;
Assert.IsNotNull(caseId);
var cases = Client.DescribeCasesAsync(new DescribeCasesRequest { Language = LANGUAGE }).Result.Cases;
Assert.IsTrue(cases.Count > 0);
cases = Client.DescribeCasesAsync(new DescribeCasesRequest { Language = LANGUAGE, CaseIdList = new List<string> { caseId } }).Result.Cases;
Assert.AreEqual(1, cases.Count);
Assert.AreEqual(caseId, cases[0].CaseId);
Assert.AreEqual(CATEGORY_CODE, cases[0].CategoryCode);
Assert.AreEqual(LANGUAGE, cases[0].Language);
Assert.AreEqual(SERVICE_CODE, cases[0].ServiceCode);
Assert.AreEqual(SEVERITY_CODE, cases[0].SeverityCode);
Assert.IsTrue(cases[0].RecentCommunications.Communications.Count > 0);
var attachmentData = new MemoryStream(Encoding.UTF8.GetBytes(ATTACHMENT_CONTENTS));
var filename = "file1.txt";
var attachmentSetId = Client.AddAttachmentsToSetAsync(new AddAttachmentsToSetRequest
{
Attachments = new List<Attachment>
{
new Attachment
{
FileName = filename,
Data = attachmentData
}
}
}).Result.AttachmentSetId;
var result = Client.AddCommunicationToCaseAsync(new AddCommunicationToCaseRequest
{
CaseId = caseId,
CcEmailAddresses = new List<string> { "aws-dr-tools-test@amazon.com" },
CommunicationBody = COMMUNICATION_BODY,
AttachmentSetId = attachmentSetId
}).Result;
Assert.IsNotNull(result);
var comms = Client.DescribeCommunicationsAsync(new DescribeCommunicationsRequest { CaseId = caseId }).Result.Communications;
Assert.IsTrue(comms.Count > 0);
Assert.AreEqual(caseId, comms[0].CaseId);
Assert.AreEqual(COMMUNICATION_BODY.Trim(), comms[0].Body.Trim());
Assert.IsNotNull(comms[0].SubmittedBy);
Assert.IsNotNull(comms[0].TimeCreated);
string attachmentId = null;
attachmentId = GetAttachmentId(comms, attachmentId);
Assert.IsNotNull(attachmentId);
VerifyAttachment(attachmentData, filename, attachmentId);
cases = Client.DescribeCasesAsync(new DescribeCasesRequest { Language = LANGUAGE, CaseIdList = new List<string> { caseId }, IncludeCommunications = true }).Result.Cases;
Assert.AreEqual(1, cases.Count);
var communications = cases[0].RecentCommunications;
attachmentId = GetAttachmentId(communications.Communications, attachmentId);
VerifyAttachment(attachmentData, filename, attachmentId);
}
finally
{
if (caseId != null)
{
Client.ResolveCaseAsync(new ResolveCaseRequest { CaseId = caseId }).Wait();
}
}
}
private void VerifyAttachment(MemoryStream attachmentData, string filename, string attachmentId)
{
var attachment = Client.DescribeAttachmentAsync(new DescribeAttachmentRequest
{
AttachmentId = attachmentId
}).Result.Attachment;
Assert.IsNotNull(attachment);
Assert.AreEqual(
Encoding.UTF8.GetString(attachmentData.ToArray()),
Encoding.UTF8.GetString(attachment.Data.ToArray()));
Assert.AreEqual(filename, attachment.FileName);
}
private static string GetAttachmentId(List<Communication> comms, string attachmentId)
{
foreach (var comm in comms)
{
var attachmentSet = comm.AttachmentSet;
if (attachmentSet != null && attachmentSet.Count > 0)
{
foreach (var att in attachmentSet)
{
if (!string.IsNullOrEmpty(att.AttachmentId))
attachmentId = att.AttachmentId;
}
}
}
return attachmentId;
}
// Test are disabled because not all acounts are subscribed to AWS Support
//[Test]
public void TestDescribeServices()
{
var services = Client.DescribeServicesAsync().Result.Services;
Assert.IsTrue(services.Count > 0);
Assert.IsNotNull(services[0].Code);
Assert.IsNotNull(services[0].Name);
Assert.IsTrue(services[0].Categories.Count > 0);
Assert.IsNotNull(services[0].Categories[0].Code);
Assert.IsNotNull(services[0].Categories[0].Name);
services = Client.DescribeServicesAsync(new DescribeServicesRequest { ServiceCodeList = new List<string> { SERVICE_CODE } }).Result.Services;
Assert.AreEqual(1, services.Count);
Assert.IsNotNull(services[0].Name);
Assert.AreEqual(SERVICE_CODE, services[0].Code);
}
// Test are disabled because not all acounts are subscribed to AWS Support
//[Test]
public void TestSeverityLevels()
{
var levels = Client.DescribeSeverityLevelsAsync().Result.SeverityLevels;
Assert.IsTrue(levels.Count > 0);
Assert.IsNotNull(levels[0].Name);
Assert.IsNotNull(levels[0].Code);
}
// Test are disabled because not all acounts are subscribed to AWS Support
//[Test]
public void TestTrustedAdvisorChecks()
{
var checks = Client.DescribeTrustedAdvisorChecksAsync(new DescribeTrustedAdvisorChecksRequest { Language = LANGUAGE }).Result.Checks;
Assert.IsTrue(checks.Count > 0);
var checkId = checks[0].Id;
Assert.IsNotNull(checks[0].Name);
Assert.IsNotNull(checks[0].Category);
Assert.IsNotNull(checks[0].Description);
Assert.IsTrue(checks[0].Metadata.Count > 0);
Assert.IsNotNull(checks[0].Metadata[0]);
var statuses = Client.DescribeTrustedAdvisorCheckRefreshStatusesAsync(new DescribeTrustedAdvisorCheckRefreshStatusesRequest { CheckIds = new List<string> { checkId } }).Result.Statuses;
Assert.AreEqual(1, statuses.Count);
Assert.AreEqual(checkId, statuses[0].CheckId);
Assert.IsNotNull(statuses[0].Status);
Assert.IsNotNull(statuses[0].MillisUntilNextRefreshable);
var status = Client.RefreshTrustedAdvisorCheckAsync(new RefreshTrustedAdvisorCheckRequest { CheckId = checkId }).Result.Status;
Assert.IsNotNull(status);
var summaries = Client.DescribeTrustedAdvisorCheckSummariesAsync(new DescribeTrustedAdvisorCheckSummariesRequest { CheckIds = new List<string> { checkId } })
.Result.Summaries;
Assert.AreEqual(1, summaries.Count);
Assert.AreEqual(checkId, summaries[0].CheckId);
Assert.IsNotNull(summaries[0].Status);
Assert.IsNotNull(summaries[0].Timestamp);
Assert.IsNotNull(summaries[0].ResourcesSummary);
Assert.IsNotNull(summaries[0].CategorySpecificSummary);
var resultresult = Client.DescribeTrustedAdvisorCheckResultAsync(new DescribeTrustedAdvisorCheckResultRequest { CheckId = checkId }).Result.Result;
Assert.IsNotNull(resultresult.Timestamp);
Assert.IsNotNull(resultresult.Status);
Assert.IsNotNull(resultresult.ResourcesSummary);
}
}
}
| |
using Analysis.Section;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using WosadSteel = Steel;
namespace Wosad.WebApi.Controllers
{
/// <summary>
///Steel.AISC10.Connection.AffectedElements Dynamo Web Api Wrapper
/// </summary>
[RoutePrefix("api")]
public class SteelAISC10ConnectionAffectedElementsController : ApiController
{
///<summary> Calculates Block shear strength </summary>
///<param name="A_gv"> Gross area subject to shear </param>
///<param name="A_nv"> Net area subject to shear </param>
///<param name="A_nt"> Net area subject to tension </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="F_u"> Specified minimum tensile strength </param>
///<param name="StressDistibutionType"> Type of stress distribution in connected element </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/BlockShearStrength")]
public Dictionary<string, object> BlockShearStrength(Double A_gv, Double A_nv, Double A_nt, Double F_y, Double F_u, String StressDistibutionType)
{
return WosadSteel.AISC_10.Connection.AffectedElements.BlockShearStrength(A_gv, A_nv, A_nt, F_y, F_u, StressDistibutionType);
}
///<summary> Calculates Strength of bolt group for the bearing on base material limit state </summary>
///<param name="N_BoltRowParallel"> Number of bolt rows parallel to direction of load </param>
///<param name="N_BoltRowPerpendicular"> Number of bolt columns perpendicular to direction of load </param>
///<param name="phiR_nFirstRow"> Bolt bearing strength for first row of bolts </param>
///<param name="phiR_nInnerRow"> Bolt bearing strength for inner row of bolts </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/BoltGroupBearingStrength")]
public Dictionary<string, object> BoltGroupBearingStrength(Double N_BoltRowParallel, Double N_BoltRowPerpendicular, Double phiR_nFirstRow, Double phiR_nInnerRow)
{
return WosadSteel.AISC_10.Connection.AffectedElements.BoltGroupBearingStrength(N_BoltRowParallel, N_BoltRowPerpendicular, phiR_nFirstRow, phiR_nInnerRow);
}
///<summary> Calculates Net and gross shear and tension areas for block shear, shear yielding and shear rupture calculations </summary>
///<param name="ShearAreaCaseId"> Case selection for shear area calculations in affected elements in connections (block shear, shear yielding, shear rupture).Values are: StraightLine,TBlock,UBlock,Lblock </param>
///<param name="N_BoltRowParallel"> Number of bolt rows parallel to direction of load (for example number of rows when load is vertical) </param>
///<param name="N_BoltRowPerpendicular"> Number of bolt columns perpendicular to direction of load (for example number of columns when the load is vertical) </param>
///<param name="p_parallel"> Bolt spacing in the direction of load </param>
///<param name="p_perpendicular"> Bolt spacing perpendicular to the direction of load </param>
///<param name="d_hole"> Bolt hole diameter </param>
///<param name="t_p"> Thickness of plate </param>
///<param name="l_edgeParallel"> Edge distance measured parallel to direction of load (for example verical edge distance when the load is vertical) </param>
///<param name="l_edgePerpendicular"> Edge distance measured perpendicular to direction of load (for example horizontal edge distance when the load is vertical)</param>
///<returns>
/// name="A_gv" Gross area subject to shear
/// name="A_nv" Net area subject to shear
/// name="A_nt" Net area subject to tension
///</returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/BoltGroupShearAndTensionAreas")]
public Dictionary<string, object> BoltGroupShearAndTensionAreas(String ShearAreaCaseId, Double N_BoltRowParallel, Double N_BoltRowPerpendicular, Double p_parallel, Double p_perpendicular, Double d_hole, Double t_p, Double l_edgeParallel, Double l_edgePerpendicular)
{
return WosadSteel.AISC_10.Connection.AffectedElements.BoltGroupShearAndTensionAreas(ShearAreaCaseId, N_BoltRowParallel, N_BoltRowPerpendicular, p_parallel, p_perpendicular, d_hole, t_p, l_edgeParallel, l_edgePerpendicular);
}
///<summary> Connected element strength in flexure </summary>
///<param name="Shape"> Cross section shape </param>
///<param name="L_b"> Length between points that are either braced against lateral displacement of compression flange or braced against twist of the cross section </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="F_u"> Specified minimum tensile strength </param>
///<param name="HasHolesInTensionFlange"> Identifies if member has holes in tension flange, for checking tension rupture of flange per F13 </param>
///<param name="A_fg"> Gross area of tension flange </param>
///<param name="A_fn"> Net area of tension flange </param>
///<param name="IsCompactDoublySymmetricForFlexure"> Indicates whether shape is compact for flexure and doubly symmetric </param>
///<param name="C_b"> Lateral-torsional buckling modification factor for nonuniform moment diagrams </param>
///<returns name="phiM_n"> Moment strength </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/ConnectedElementStrengthInFlexure")]
public Dictionary<string, object> ConnectedElementStrengthInFlexure(CustomProfile Shape, Double L_b, Double F_y, Double F_u, Boolean HasHolesInTensionFlange, Double A_fg = 0, Double A_fn = 0, Boolean IsCompactDoublySymmetricForFlexure = false, Double C_b = 1)
{
return WosadSteel.AISC_10.Connection.AffectedElements.ConnectedElementStrengthInFlexure(Shape, L_b, F_y, F_u, HasHolesInTensionFlange, A_fg, A_fn, IsCompactDoublySymmetricForFlexure, C_b);
}
///<summary> Calculates Connected element strength in shear </summary>
///<param name="A_gv"> Gross area subject to shear </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="F_u"> Specified minimum tensile strength </param>
///<param name="A_nv"> Net area subject to shear </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/ConnectedElementStrengthInShear")]
public Dictionary<string, object> ConnectedElementStrengthInShear(Double A_gv, Double F_y, Double F_u, Double A_nv)
{
return WosadSteel.AISC_10.Connection.AffectedElements.ConnectedElementStrengthInShear(A_gv, F_y, F_u, A_nv);
}
///<summary> Calculates Connected element strength in tension </summary>
///<param name="A_g"> Gross cross-sectional area of member </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="F_u"> Specified minimum tensile strength </param>
///<param name="A_e"> Effective net area </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/ConnectedElementStrengthInTension")]
public Dictionary<string, object> ConnectedElementStrengthInTension(Double A_g, Double F_y, Double F_u, Double A_e)
{
return WosadSteel.AISC_10.Connection.AffectedElements.ConnectedElementStrengthInTension(A_g, F_y, F_u, A_e);
}
///<summary> Calculates Coped section strength in flexure </summary>
///<param name="d"> Full nominal depth of the section </param>
///<param name="b_f"> Width of flange </param>
///<param name="t_f"> Thickness of flange </param>
///<param name="d_cope"> Depth of cope </param>
///<param name="c"> Length of cope </param>
///<param name="t_w"> Thickness of web </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="F_u"> Specified minimum tensile strength </param>
///<param name="BeamCopeCase"> Identifies beam cope condition for stability calculations: single cope vs double cope </param>
///<returns name="phiM_n"> Moment strength </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/CopedSectionStrengthInFlexure")]
public Dictionary<string, object> CopedSectionStrengthInFlexure(Double d, Double b_f, Double t_f, Double d_cope, Double c, Double t_w, Double F_y, Double F_u, String BeamCopeCase)
{
return WosadSteel.AISC_10.Connection.AffectedElements.CopedSectionStrengthInFlexure(d, b_f, t_f, d_cope, c, t_w, F_y, F_u, BeamCopeCase);
}
///<summary> Calculates Concentrated force flange local bending </summary>
///<param name="F_yf"> Specified minimum yield stress </param>
///<param name="t_f"> Thickness of flange </param>
///<param name="l_edge"> Edge distance </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/FlangeLocalBending")]
public Dictionary<string, object> FlangeLocalBending(Double F_yf, Double t_f, Double l_edge)
{
return WosadSteel.AISC_10.Connection.AffectedElements.FlangeLocalBending(F_yf, t_f, l_edge);
}
///<summary> Calculates Gusset plate configuration compactness </summary>
///<param name="t_g"> Gusset plate thickness </param>
///<param name="c_Gusset"> Shortest distance between closest bolt and beam flange </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="E"> Modulus of elasticity of steel </param>
///<param name="l_1"> Gusset plate distance from beam to nearest row of bolts </param>
///<returns name="IsGussetCompactConfiguration"> Distinguishes between compact and noncompact configuration for gusset effective length factor </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/GussetPlateConfigurationCompactness")]
public Dictionary<string, object> GussetPlateConfigurationCompactness(Double t_g, Double c_Gusset, Double F_y, Double E, Double l_1)
{
return WosadSteel.AISC_10.Connection.AffectedElements.GussetPlateConfigurationCompactness(t_g, c_Gusset, F_y, E, l_1);
}
///<summary> Calculates Gusset plate effective compression length </summary>
///<param name="GussetPlateConfigurationId"> Type of gusset plate configuration for calculation of effective length </param>
///<param name="l_1"> Gusset plate distance from beam to nearest row of bolts </param>
///<param name="l_2"> Gusset plate distance from column to nearest row of bolts </param>
///<param name="IsGussetCompactConfiguration"> Indicates whether gusset plate configuration is compact (per Design Guide 29 Appendix C) </param>
///<returns name="KL_gusset"> Effective length of gusset plate </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/GussetPlateEffectiveCompressionLength")]
public Dictionary<string, object> GussetPlateEffectiveCompressionLength(String GussetPlateConfigurationId, Double l_1, Double l_2, Boolean IsGussetCompactConfiguration)
{
return WosadSteel.AISC_10.Connection.AffectedElements.GussetPlateEffectiveCompressionLength(GussetPlateConfigurationId, l_1, l_2, IsGussetCompactConfiguration);
}
///<summary> Calculates Connected element strength in compression </summary>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="KL"> Effective length of element in compression </param>
///<param name="b"> Width of stiffened or unstiffened compression element </param>
///<param name="t"> Thickness of element plate or element wall </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/PlateStrengthInCompression")]
public Dictionary<string, object> PlateStrengthInCompression(Double F_y, Double KL, Double b, Double t)
{
return WosadSteel.AISC_10.Connection.AffectedElements.PlateStrengthInCompression(F_y, KL, b, t);
}
///<summary> Calculates Concentrated force web compression buckling </summary>
///<param name="t_w"> Thickness of web </param>
///<param name="h_web"> Clear distance between flanges less the fillet or corner radius for rolled shapes </param>
///<param name="F_yw"> Specified minimum yield stress of the web material </param>
///<param name="E"> Modulus of elasticity of steel </param>
///<param name="d"> Full nominal depth of the section </param>
///<param name="l_edge"> Edge distance </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WebCompressionBuckling")]
public Dictionary<string, object> WebCompressionBuckling(Double t_w, Double h_web, Double F_yw, Double E, Double d, Double l_edge)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WebCompressionBuckling(t_w, h_web, F_yw, E, d, l_edge);
}
///<summary> Calculates Concentrated force web local crippling </summary>
///<param name="t_w"> Thickness of web </param>
///<param name="t_f"> Thickness of flange </param>
///<param name="l_b"> Length of bearing </param>
///<param name="d"> Full nominal depth of the section </param>
///<param name="F_yw"> Specified minimum yield stress of the web material </param>
///<param name="l_edge"> Edge distance </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WebLocalCrippling")]
public Dictionary<string, object> WebLocalCrippling(Double t_w, Double t_f, Double l_b, Double d, Double F_yw, Double l_edge)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WebLocalCrippling(t_w, t_f, l_b, d, F_yw, l_edge);
}
///<summary> Calculates Concentrated force web local yielding </summary>
///<param name="t_w"> Thickness of web </param>
///<param name="F_yw"> Specified minimum yield stress of the web material </param>
///<param name="k"> Distance from outer face of flange to the web toe of fillet </param>
///<param name="l_b"> Length of bearing </param>
///<param name="d"> Full nominal depth of the section </param>
///<param name="l_edge"> Edge distance </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WebLocalYielding")]
public Dictionary<string, object> WebLocalYielding(Double t_w, Double F_yw, Double k, Double l_b, Double d, Double l_edge)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WebLocalYielding(t_w, F_yw, k, l_b, d, l_edge);
}
///<summary> Calculates Concentrated force web panel zone shear </summary>
///<param name="t_w"> Thickness of web </param>
///<param name="t_cf"> Thickness of column flange </param>
///<param name="b_cf"> Width of column flange </param>
///<param name="d_b"> Nominal fastener diameter </param>
///<param name="d_c"> Depth of column </param>
///<param name="F_y"> Specified minimum yield stress </param>
///<param name="P_u"> Required axial strength </param>
///<param name="A_g"> Gross cross-sectional area of member </param>
///<param name="PanelDeformationConsideredInAnalysis"> Identifies whether the effect of panel-zone deformation on frame stability is considered in the analysis </param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WebPanelZoneShear")]
public Dictionary<string, object> WebPanelZoneShear(Double t_w, Double t_cf, Double b_cf, Double d_b, Double d_c, Double F_y, Double P_u, Double A_g, Boolean PanelDeformationConsideredInAnalysis)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WebPanelZoneShear(t_w, t_cf, b_cf, d_b, d_c, F_y, P_u, A_g, PanelDeformationConsideredInAnalysis);
}
///<summary> Calculates Concentrated force web sidesway buckling </summary>
///<param name="M_u"> Required flexural strength </param>
///<param name="M_y"> Moment at yielding of the extreme fiber </param>
///<param name="b_f"> Width of flange </param>
///<param name="t_f"> Thickness of flange </param>
///<param name="t_w"> Thickness of web </param>
///<param name="L_b_flange"> Largest laterally unbraced lengthalong either flange at the point of load </param>
///<param name="h_web"> Clear distance between flanges less the fillet or corner radius for rolled shapes </param>
///<param name="CompressionFlangeRestrained">Identifies whether comression flange is restrained</param>
///<returns name="phiR_n"> Strength of member or connection </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WebSideswayBuckling")]
public Dictionary<string, object> WebSideswayBuckling(Double M_u, Double M_y, Double b_f, Double t_f, Double t_w, Double L_b_flange, Double h_web, Boolean CompressionFlangeRestrained)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WebSideswayBuckling(M_u, M_y, b_f, t_f, t_w, L_b_flange, h_web, CompressionFlangeRestrained);
}
///<summary> Calculates Width of Whitmore section </summary>
///<param name="l"> Length of connection or weld </param>
///<param name="b_con"> Connection width </param>
///<returns name="b_Whitmore"> Whitmore section width </returns>
[HttpGet]
[Route("Steel/AISC_10/Connection/AffectedElements/WhitmoreSectionWidth")]
public Dictionary<string, object> WhitmoreSectionWidth(Double l, Double b_con)
{
return WosadSteel.AISC_10.Connection.AffectedElements.WhitmoreSectionWidth(l, b_con);
}
}
}
| |
/*
'===============================================================================
' Generated From - CSharp_dOOdads_BusinessEntity.vbgen
'
' ** IMPORTANT **
' How to Generate your stored procedures:
'
' SQL = SQL_StoredProcs.vbgen
' ACCESS = Access_StoredProcs.vbgen
' ORACLE = Oracle_StoredProcs.vbgen
' FIREBIRD = FirebirdStoredProcs.vbgen
' POSTGRESQL = PostgreSQL_StoredProcs.vbgen
'
' The supporting base class SqlClientEntity is in the Architecture directory in "dOOdads".
'
' This object is 'abstract' which means you need to inherit from it to be able
' to instantiate it. This is very easilly done. You can override properties and
' methods in your derived class, this allows you to regenerate this class at any
' time and not worry about overwriting custom code.
'
' NEVER EDIT THIS FILE.
'
' public class YourObject : _YourObject
' {
'
' }
'
'===============================================================================
*/
// Generated by MyGeneration Version # (1.3.0.3)
using System;
using System.Data;
using System.Data.SqlClient;
using System.Collections;
using System.Collections.Specialized;
using MyGeneration.dOOdads;
namespace nTier.Entity
{
public abstract class _Categories : SqlClientEntity
{
public _Categories()
{
this.QuerySource = "Categories";
this.MappingName = "Categories";
}
//=================================================================
// public Overrides void AddNew()
//=================================================================
//
//=================================================================
public override void AddNew()
{
base.AddNew();
}
public override void FlushData()
{
this._whereClause = null;
this._aggregateClause = null;
base.FlushData();
}
//=================================================================
// public Function LoadAll() As Boolean
//=================================================================
// Loads all of the records in the database, and sets the currentRow to the first row
//=================================================================
public bool LoadAll()
{
ListDictionary parameters = null;
return base.LoadFromSql("[" + this.SchemaStoredProcedure + "proc_CategoriesLoadAll]", parameters);
}
//=================================================================
// public Overridable Function LoadByPrimaryKey() As Boolean
//=================================================================
// Loads a single row of via the primary key
//=================================================================
public virtual bool LoadByPrimaryKey(int CategoryID)
{
ListDictionary parameters = new ListDictionary();
parameters.Add(Parameters.CategoryID, CategoryID);
return base.LoadFromSql("[" + this.SchemaStoredProcedure + "proc_CategoriesLoadByPrimaryKey]", parameters);
}
#region Parameters
protected class Parameters
{
public static SqlParameter CategoryID
{
get
{
return new SqlParameter("@CategoryID", SqlDbType.Int, 0);
}
}
public static SqlParameter CategoryName
{
get
{
return new SqlParameter("@CategoryName", SqlDbType.NVarChar, 15);
}
}
public static SqlParameter Description
{
get
{
return new SqlParameter("@Description", SqlDbType.NText, 1073741823);
}
}
public static SqlParameter Picture
{
get
{
return new SqlParameter("@Picture", SqlDbType.Image, 2147483647);
}
}
}
#endregion
#region ColumnNames
public class ColumnNames
{
public const string CategoryID = "CategoryID";
public const string CategoryName = "CategoryName";
public const string Description = "Description";
public const string Picture = "Picture";
static public string ToPropertyName(string columnName)
{
if(ht == null)
{
ht = new Hashtable();
ht[CategoryID] = _Categories.PropertyNames.CategoryID;
ht[CategoryName] = _Categories.PropertyNames.CategoryName;
ht[Description] = _Categories.PropertyNames.Description;
ht[Picture] = _Categories.PropertyNames.Picture;
}
return (string)ht[columnName];
}
static private Hashtable ht = null;
}
#endregion
#region PropertyNames
public class PropertyNames
{
public const string CategoryID = "CategoryID";
public const string CategoryName = "CategoryName";
public const string Description = "Description";
public const string Picture = "Picture";
static public string ToColumnName(string propertyName)
{
if(ht == null)
{
ht = new Hashtable();
ht[CategoryID] = _Categories.ColumnNames.CategoryID;
ht[CategoryName] = _Categories.ColumnNames.CategoryName;
ht[Description] = _Categories.ColumnNames.Description;
ht[Picture] = _Categories.ColumnNames.Picture;
}
return (string)ht[propertyName];
}
static private Hashtable ht = null;
}
#endregion
#region StringPropertyNames
public class StringPropertyNames
{
public const string CategoryID = "s_CategoryID";
public const string CategoryName = "s_CategoryName";
public const string Description = "s_Description";
}
#endregion
#region Properties
public virtual int? CategoryID
{
get
{
return base.Getint(ColumnNames.CategoryID);
}
set
{
base.Setint(ColumnNames.CategoryID, value);
}
}
public virtual string CategoryName
{
get
{
return base.Getstring(ColumnNames.CategoryName);
}
set
{
base.Setstring(ColumnNames.CategoryName, value);
}
}
public virtual string Description
{
get
{
return base.Getstring(ColumnNames.Description);
}
set
{
base.Setstring(ColumnNames.Description, value);
}
}
public virtual byte[] Picture
{
get
{
return base.GetByteArray(ColumnNames.Picture);
}
set
{
base.SetByteArray(ColumnNames.Picture, value);
}
}
#endregion
#region String Properties
public virtual string s_CategoryID
{
get
{
return this.IsColumnNull(ColumnNames.CategoryID) ? string.Empty : base.GetintAsString(ColumnNames.CategoryID);
}
set
{
if(string.Empty == value)
this.SetColumnNull(ColumnNames.CategoryID);
else
this.CategoryID = base.SetintAsString(ColumnNames.CategoryID, value);
}
}
public virtual string s_CategoryName
{
get
{
return this.IsColumnNull(ColumnNames.CategoryName) ? string.Empty : base.GetstringAsString(ColumnNames.CategoryName);
}
set
{
if(string.Empty == value)
this.SetColumnNull(ColumnNames.CategoryName);
else
this.CategoryName = base.SetstringAsString(ColumnNames.CategoryName, value);
}
}
public virtual string s_Description
{
get
{
return this.IsColumnNull(ColumnNames.Description) ? string.Empty : base.GetstringAsString(ColumnNames.Description);
}
set
{
if(string.Empty == value)
this.SetColumnNull(ColumnNames.Description);
else
this.Description = base.SetstringAsString(ColumnNames.Description, value);
}
}
#endregion
#region Where Clause
public class WhereClause
{
public WhereClause(BusinessEntity entity)
{
this._entity = entity;
}
public TearOffWhereParameter TearOff
{
get
{
if(_tearOff == null)
{
_tearOff = new TearOffWhereParameter(this);
}
return _tearOff;
}
}
#region WhereParameter TearOff's
public class TearOffWhereParameter
{
public TearOffWhereParameter(WhereClause clause)
{
this._clause = clause;
}
public WhereParameter CategoryID
{
get
{
WhereParameter where = new WhereParameter(ColumnNames.CategoryID, Parameters.CategoryID);
this._clause._entity.Query.AddWhereParameter(where);
return where;
}
}
public WhereParameter CategoryName
{
get
{
WhereParameter where = new WhereParameter(ColumnNames.CategoryName, Parameters.CategoryName);
this._clause._entity.Query.AddWhereParameter(where);
return where;
}
}
public WhereParameter Description
{
get
{
WhereParameter where = new WhereParameter(ColumnNames.Description, Parameters.Description);
this._clause._entity.Query.AddWhereParameter(where);
return where;
}
}
public WhereParameter Picture
{
get
{
WhereParameter where = new WhereParameter(ColumnNames.Picture, Parameters.Picture);
this._clause._entity.Query.AddWhereParameter(where);
return where;
}
}
private WhereClause _clause;
}
#endregion
public WhereParameter CategoryID
{
get
{
if(_CategoryID_W == null)
{
_CategoryID_W = TearOff.CategoryID;
}
return _CategoryID_W;
}
}
public WhereParameter CategoryName
{
get
{
if(_CategoryName_W == null)
{
_CategoryName_W = TearOff.CategoryName;
}
return _CategoryName_W;
}
}
public WhereParameter Description
{
get
{
if(_Description_W == null)
{
_Description_W = TearOff.Description;
}
return _Description_W;
}
}
public WhereParameter Picture
{
get
{
if(_Picture_W == null)
{
_Picture_W = TearOff.Picture;
}
return _Picture_W;
}
}
private WhereParameter _CategoryID_W = null;
private WhereParameter _CategoryName_W = null;
private WhereParameter _Description_W = null;
private WhereParameter _Picture_W = null;
public void WhereClauseReset()
{
_CategoryID_W = null;
_CategoryName_W = null;
_Description_W = null;
_Picture_W = null;
this._entity.Query.FlushWhereParameters();
}
private BusinessEntity _entity;
private TearOffWhereParameter _tearOff;
}
public WhereClause Where
{
get
{
if(_whereClause == null)
{
_whereClause = new WhereClause(this);
}
return _whereClause;
}
}
private WhereClause _whereClause = null;
#endregion
#region Aggregate Clause
public class AggregateClause
{
public AggregateClause(BusinessEntity entity)
{
this._entity = entity;
}
public TearOffAggregateParameter TearOff
{
get
{
if(_tearOff == null)
{
_tearOff = new TearOffAggregateParameter(this);
}
return _tearOff;
}
}
#region AggregateParameter TearOff's
public class TearOffAggregateParameter
{
public TearOffAggregateParameter(AggregateClause clause)
{
this._clause = clause;
}
public AggregateParameter CategoryID
{
get
{
AggregateParameter aggregate = new AggregateParameter(ColumnNames.CategoryID, Parameters.CategoryID);
this._clause._entity.Query.AddAggregateParameter(aggregate);
return aggregate;
}
}
public AggregateParameter CategoryName
{
get
{
AggregateParameter aggregate = new AggregateParameter(ColumnNames.CategoryName, Parameters.CategoryName);
this._clause._entity.Query.AddAggregateParameter(aggregate);
return aggregate;
}
}
public AggregateParameter Description
{
get
{
AggregateParameter aggregate = new AggregateParameter(ColumnNames.Description, Parameters.Description);
this._clause._entity.Query.AddAggregateParameter(aggregate);
return aggregate;
}
}
public AggregateParameter Picture
{
get
{
AggregateParameter aggregate = new AggregateParameter(ColumnNames.Picture, Parameters.Picture);
this._clause._entity.Query.AddAggregateParameter(aggregate);
return aggregate;
}
}
private AggregateClause _clause;
}
#endregion
public AggregateParameter CategoryID
{
get
{
if(_CategoryID_W == null)
{
_CategoryID_W = TearOff.CategoryID;
}
return _CategoryID_W;
}
}
public AggregateParameter CategoryName
{
get
{
if(_CategoryName_W == null)
{
_CategoryName_W = TearOff.CategoryName;
}
return _CategoryName_W;
}
}
public AggregateParameter Description
{
get
{
if(_Description_W == null)
{
_Description_W = TearOff.Description;
}
return _Description_W;
}
}
public AggregateParameter Picture
{
get
{
if(_Picture_W == null)
{
_Picture_W = TearOff.Picture;
}
return _Picture_W;
}
}
private AggregateParameter _CategoryID_W = null;
private AggregateParameter _CategoryName_W = null;
private AggregateParameter _Description_W = null;
private AggregateParameter _Picture_W = null;
public void AggregateClauseReset()
{
_CategoryID_W = null;
_CategoryName_W = null;
_Description_W = null;
_Picture_W = null;
this._entity.Query.FlushAggregateParameters();
}
private BusinessEntity _entity;
private TearOffAggregateParameter _tearOff;
}
public AggregateClause Aggregate
{
get
{
if(_aggregateClause == null)
{
_aggregateClause = new AggregateClause(this);
}
return _aggregateClause;
}
}
private AggregateClause _aggregateClause = null;
#endregion
protected override IDbCommand GetInsertCommand()
{
SqlCommand cmd = new SqlCommand();
cmd.CommandType = CommandType.StoredProcedure;
cmd.CommandText = "[" + this.SchemaStoredProcedure + "proc_CategoriesInsert]";
CreateParameters(cmd);
SqlParameter p;
p = cmd.Parameters[Parameters.CategoryID.ParameterName];
p.Direction = ParameterDirection.Output;
return cmd;
}
protected override IDbCommand GetUpdateCommand()
{
SqlCommand cmd = new SqlCommand();
cmd.CommandType = CommandType.StoredProcedure;
cmd.CommandText = "[" + this.SchemaStoredProcedure + "proc_CategoriesUpdate]";
CreateParameters(cmd);
return cmd;
}
protected override IDbCommand GetDeleteCommand()
{
SqlCommand cmd = new SqlCommand();
cmd.CommandType = CommandType.StoredProcedure;
cmd.CommandText = "[" + this.SchemaStoredProcedure + "proc_CategoriesDelete]";
SqlParameter p;
p = cmd.Parameters.Add(Parameters.CategoryID);
p.SourceColumn = ColumnNames.CategoryID;
p.SourceVersion = DataRowVersion.Current;
return cmd;
}
private IDbCommand CreateParameters(SqlCommand cmd)
{
SqlParameter p;
p = cmd.Parameters.Add(Parameters.CategoryID);
p.SourceColumn = ColumnNames.CategoryID;
p.SourceVersion = DataRowVersion.Current;
p = cmd.Parameters.Add(Parameters.CategoryName);
p.SourceColumn = ColumnNames.CategoryName;
p.SourceVersion = DataRowVersion.Current;
p = cmd.Parameters.Add(Parameters.Description);
p.SourceColumn = ColumnNames.Description;
p.SourceVersion = DataRowVersion.Current;
p = cmd.Parameters.Add(Parameters.Picture);
p.SourceColumn = ColumnNames.Picture;
p.SourceVersion = DataRowVersion.Current;
return cmd;
}
}
}
| |
using System;
using System.Collections;
namespace Tamir.SharpSsh.jsch
{
/* -*-mode:java; c-basic-offset:2; -*- */
/*
Copyright (c) 2002,2003,2004 ymnk, JCraft,Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
3. The names of the authors may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
internal class UserAuthPublicKey : UserAuth
{
internal UserInfo userinfo;
internal UserAuthPublicKey(UserInfo userinfo)
{
this.userinfo = userinfo;
}
public override bool start(Session session)
{
//super.start(session);
//Vector identities=JSch.identities;
ArrayList identities = session.jsch.identities;
Packet packet = session.packet;
Buffer buf = session.buf;
String passphrase = null;
String username = session.username;
byte[] _username = null;
try
{
_username = Util.getBytesUTF8(username);
}
catch
{
//(java.io.UnsupportedEncodingException e){
_username = Util.getBytes(username);
}
for (int i = 0; i < identities.Count; i++)
{
Identity identity = (Identity) (identities[i]);
byte[] pubkeyblob = identity.getPublicKeyBlob();
//System.out.println("UserAuthPublicKey: "+identity+" "+pubkeyblob);
if (pubkeyblob != null)
{
// send
// byte SSH_MSG_USERAUTH_REQUEST(50)
// string user name
// string service name ("ssh-connection")
// string "publickey"
// boolen FALSE
// string plaintext password (ISO-10646 UTF-8)
packet.reset();
buf.putByte((byte) Session.SSH_MSG_USERAUTH_REQUEST);
buf.putString(_username);
buf.putString(Util.getBytes("ssh-connection"));
buf.putString(Util.getBytes("publickey"));
buf.putByte((byte) 0);
buf.putString(Util.getBytes(identity.getAlgName()));
buf.putString(pubkeyblob);
session.write(packet);
loop1:
while (true)
{
// receive
// byte SSH_MSG_USERAUTH_PK_OK(52)
// string service name
buf = session.read(buf);
//System.out.println("read: 60 ? "+ buf.buffer[5]);
if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_PK_OK)
{
break;
}
else if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_FAILURE)
{
// System.out.println("USERAUTH publickey "+session.getIdentity()+
// " is not acceptable.");
break;
}
else if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_BANNER)
{
buf.getInt();
buf.getByte();
buf.getByte();
byte[] _message = buf.getString();
byte[] lang = buf.getString();
String message = null;
try
{
message = Util.getStringUTF8(_message);
}
catch
{
//(java.io.UnsupportedEncodingException e){
message = Util.getString(_message);
}
if (userinfo != null)
{
userinfo.showMessage(message);
}
goto loop1;
}
else
{
//System.out.println("USERAUTH fail ("+buf.buffer[5]+")");
//throw new JSchException("USERAUTH fail ("+buf.buffer[5]+")");
break;
}
}
if (buf.buffer[5] != Session.SSH_MSG_USERAUTH_PK_OK)
{
continue;
}
}
//System.out.println("UserAuthPublicKey: identity.isEncrypted()="+identity.isEncrypted());
int count = 5;
while (true)
{
if ((identity.isEncrypted() && passphrase == null))
{
if (userinfo == null) throw new JSchException("USERAUTH fail");
if (identity.isEncrypted() &&
!userinfo.promptPassphrase("Passphrase for " + identity.getName()))
{
throw new JSchAuthCancelException("publickey");
//throw new JSchException("USERAUTH cancel");
//break;
}
passphrase = userinfo.getPassphrase();
}
if (!identity.isEncrypted() || passphrase != null)
{
//System.out.println("UserAuthPublicKey: @1 "+passphrase);
if (identity.setPassphrase(passphrase))
break;
}
passphrase = null;
count--;
if (count == 0) break;
}
//System.out.println("UserAuthPublicKey: identity.isEncrypted()="+identity.isEncrypted());
if (identity.isEncrypted()) continue;
if (pubkeyblob == null) pubkeyblob = identity.getPublicKeyBlob();
//System.out.println("UserAuthPublicKey: pubkeyblob="+pubkeyblob);
if (pubkeyblob == null) continue;
// send
// byte SSH_MSG_USERAUTH_REQUEST(50)
// string user name
// string service name ("ssh-connection")
// string "publickey"
// boolen TRUE
// string plaintext password (ISO-10646 UTF-8)
packet.reset();
buf.putByte((byte) Session.SSH_MSG_USERAUTH_REQUEST);
buf.putString(_username);
buf.putString(Util.getBytes("ssh-connection"));
buf.putString(Util.getBytes("publickey"));
buf.putByte((byte) 1);
buf.putString(Util.getBytes(identity.getAlgName()));
buf.putString(pubkeyblob);
// byte[] tmp=new byte[buf.index-5];
// System.arraycopy(buf.buffer, 5, tmp, 0, tmp.length);
// buf.putString(signature);
byte[] sid = session.getSessionId();
uint sidlen = (uint) sid.Length;
byte[] tmp = new byte[4 + sidlen + buf.index - 5];
tmp[0] = (byte) (sidlen >> 24);
tmp[1] = (byte) (sidlen >> 16);
tmp[2] = (byte) (sidlen >> 8);
tmp[3] = (byte) (sidlen);
Array.Copy(sid, 0, tmp, 4, sidlen);
Array.Copy(buf.buffer, 5, tmp, 4 + sidlen, buf.index - 5);
byte[] signature = identity.getSignature(session, tmp);
if (signature == null)
{
// for example, too long key length.
break;
}
buf.putString(signature);
session.write(packet);
loop2:
while (true)
{
// receive
// byte SSH_MSG_USERAUTH_SUCCESS(52)
// string service name
buf = session.read(buf);
//System.out.println("read: 52 ? "+ buf.buffer[5]);
if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_SUCCESS)
{
return true;
}
else if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_BANNER)
{
buf.getInt();
buf.getByte();
buf.getByte();
byte[] _message = buf.getString();
byte[] lang = buf.getString();
String message = null;
try
{
message = Util.getStringUTF8(_message);
}
catch
{
//(java.io.UnsupportedEncodingException e){
message = Util.getString(_message);
}
if (userinfo != null)
{
userinfo.showMessage(message);
}
goto loop2;
}
else if (buf.buffer[5] == Session.SSH_MSG_USERAUTH_FAILURE)
{
buf.getInt();
buf.getByte();
buf.getByte();
byte[] foo = buf.getString();
int partial_success = buf.getByte();
//System.out.println(new String(foo)+
// " partial_success:"+(partial_success!=0));
if (partial_success != 0)
{
throw new JSchPartialAuthException(Util.getString(foo));
}
break;
}
//System.out.println("USERAUTH fail ("+buf.buffer[5]+")");
//throw new JSchException("USERAUTH fail ("+buf.buffer[5]+")");
break;
}
}
return false;
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2014 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Diagnostics;
using System.IO;
using System.Threading;
using NUnit.Framework.Constraints;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal.Execution;
#if !PORTABLE && !NETSTANDARD1_6
using System.Runtime.Remoting.Messaging;
using System.Security;
using System.Security.Principal;
using NUnit.Compatibility;
#endif
namespace NUnit.Framework.Internal
{
/// <summary>
/// Helper class used to save and restore certain static or
/// singleton settings in the environment that affect tests
/// or which might be changed by the user tests.
///
/// An internal class is used to hold settings and a stack
/// of these objects is pushed and popped as Save and Restore
/// are called.
/// </summary>
public class TestExecutionContext
#if !PORTABLE && !NETSTANDARD1_6
: LongLivedMarshalByRefObject, ILogicalThreadAffinative
#endif
{
// NOTE: Be very careful when modifying this class. It uses
// conditional compilation extensively and you must give
// thought to whether any new features will be supported
// on each platform. In particular, instance fields,
// properties, initialization and restoration must all
// use the same conditions for each feature.
#region Instance Fields
/// <summary>
/// Link to a prior saved context
/// </summary>
private TestExecutionContext _priorContext;
/// <summary>
/// Indicates that a stop has been requested
/// </summary>
private TestExecutionStatus _executionStatus;
/// <summary>
/// The event listener currently receiving notifications
/// </summary>
private ITestListener _listener = TestListener.NULL;
/// <summary>
/// The number of assertions for the current test
/// </summary>
private int _assertCount;
private Randomizer _randomGenerator;
/// <summary>
/// The current culture
/// </summary>
private CultureInfo _currentCulture;
/// <summary>
/// The current UI culture
/// </summary>
private CultureInfo _currentUICulture;
/// <summary>
/// The current test result
/// </summary>
private TestResult _currentResult;
#if !PORTABLE && !NETSTANDARD1_6
/// <summary>
/// The current Principal.
/// </summary>
private IPrincipal _currentPrincipal;
#endif
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="TestExecutionContext"/> class.
/// </summary>
public TestExecutionContext()
{
_priorContext = null;
TestCaseTimeout = 0;
UpstreamActions = new List<ITestAction>();
_currentCulture = CultureInfo.CurrentCulture;
_currentUICulture = CultureInfo.CurrentUICulture;
#if !PORTABLE && !NETSTANDARD1_6
_currentPrincipal = Thread.CurrentPrincipal;
#endif
CurrentValueFormatter = (val) => MsgUtils.DefaultValueFormatter(val);
IsSingleThreaded = false;
}
/// <summary>
/// Initializes a new instance of the <see cref="TestExecutionContext"/> class.
/// </summary>
/// <param name="other">An existing instance of TestExecutionContext.</param>
public TestExecutionContext(TestExecutionContext other)
{
_priorContext = other;
CurrentTest = other.CurrentTest;
CurrentResult = other.CurrentResult;
TestObject = other.TestObject;
WorkDirectory = other.WorkDirectory;
_listener = other._listener;
StopOnError = other.StopOnError;
TestCaseTimeout = other.TestCaseTimeout;
UpstreamActions = new List<ITestAction>(other.UpstreamActions);
_currentCulture = other.CurrentCulture;
_currentUICulture = other.CurrentUICulture;
#if !PORTABLE && !NETSTANDARD1_6
_currentPrincipal = other.CurrentPrincipal;
#endif
CurrentValueFormatter = other.CurrentValueFormatter;
Dispatcher = other.Dispatcher;
ParallelScope = other.ParallelScope;
IsSingleThreaded = other.IsSingleThreaded;
}
#endregion
#region Static Singleton Instance
// NOTE: We use different implementations for various platforms
// If a user creates a thread then the current context
// will be null. This also happens when the compiler
// automatically creates threads for async methods.
// We create a new context, which is automatically
// populated with values taken from the current thread.
#if NETSTANDARD1_6
private static readonly AsyncLocal<TestExecutionContext> _currentContext = new AsyncLocal<TestExecutionContext>();
/// <summary>
/// Gets and sets the current context.
/// </summary>
public static TestExecutionContext CurrentContext
{
get
{
return _currentContext.Value ?? (_currentContext.Value = new TestExecutionContext());
}
private set
{
_currentContext.Value = value;
}
}
/// <summary>
/// Get the current context or return null if none is found.
/// </summary>
/// <remarks></remarks>
public static TestExecutionContext GetTestExecutionContext()
{
return _currentContext.Value;
}
#elif PORTABLE
// In the Silverlight and portable builds, we use a ThreadStatic
// field to hold the current TestExecutionContext.
[ThreadStatic]
private static TestExecutionContext _currentContext;
/// <summary>
/// Gets and sets the current context.
/// </summary>
public static TestExecutionContext CurrentContext
{
get
{
if (_currentContext == null)
_currentContext = new TestExecutionContext();
return _currentContext;
}
private set
{
_currentContext = value;
}
}
/// <summary>
/// Get the current context or return null if none is found.
/// </summary>
/// <remarks></remarks>
public static TestExecutionContext GetTestExecutionContext()
{
// TODO: This will need to be reworked if we re-introduce threading in .NET Standard
return _currentContext;
}
#else
// In all other builds, we use the CallContext
private static readonly string CONTEXT_KEY = "NUnit.Framework.TestContext";
/// <summary>
/// Gets and sets the current context.
/// </summary>
public static TestExecutionContext CurrentContext
{
// This getter invokes security critical members on the 'System.Runtime.Remoting.Messaging.CallContext' class.
// Callers of this method have no influence on how these methods are used so we define a 'SecuritySafeCriticalAttribute'
// rather than a 'SecurityCriticalAttribute' to enable use by security transparent callers.
[SecuritySafeCritical]
get
{
var context = GetTestExecutionContext();
if (context == null) // This can happen on Mono
{
context = new TestExecutionContext();
CallContext.SetData(CONTEXT_KEY, context);
}
return context;
}
// This setter invokes security critical members on the 'System.Runtime.Remoting.Messaging.CallContext' class.
// Callers of this method have no influence on how these methods are used so we define a 'SecuritySafeCriticalAttribute'
// rather than a 'SecurityCriticalAttribute' to enable use by security transparent callers.
[SecuritySafeCritical]
private set
{
if (value == null)
CallContext.FreeNamedDataSlot(CONTEXT_KEY);
else
CallContext.SetData(CONTEXT_KEY, value);
}
}
/// <summary>
/// Get the current context or return null if none is found.
/// </summary>
/// <remarks></remarks>
// This setter invokes security critical members on the 'System.Runtime.Remoting.Messaging.CallContext' class.
// Callers of this method have no influence on how these methods are used so we define a 'SecuritySafeCriticalAttribute'
// rather than a 'SecurityCriticalAttribute' to enable use by security transparent callers.
[SecuritySafeCritical]
public static TestExecutionContext GetTestExecutionContext()
{
return CallContext.GetData(CONTEXT_KEY) as TestExecutionContext;
}
#endif
#endregion
#region Static Methods
/// <summary>
/// Clear the current context. This is provided to
/// prevent "leakage" of the CallContext containing
/// the current context back to any runners.
/// </summary>
public static void ClearCurrentContext()
{
CurrentContext = null;
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the current test
/// </summary>
public Test CurrentTest { get; set; }
/// <summary>
/// The time the current test started execution
/// </summary>
public DateTime StartTime { get; set; }
/// <summary>
/// The time the current test started in Ticks
/// </summary>
public long StartTicks { get; set; }
/// <summary>
/// Gets or sets the current test result
/// </summary>
public TestResult CurrentResult
{
get { return _currentResult; }
set
{
_currentResult = value;
if (value != null)
OutWriter = value.OutWriter;
}
}
/// <summary>
/// Gets a TextWriter that will send output to the current test result.
/// </summary>
public TextWriter OutWriter { get; private set; }
/// <summary>
/// The current test object - that is the user fixture
/// object on which tests are being executed.
/// </summary>
public object TestObject { get; set; }
/// <summary>
/// Get or set the working directory
/// </summary>
public string WorkDirectory { get; set; }
/// <summary>
/// Get or set indicator that run should stop on the first error
/// </summary>
public bool StopOnError { get; set; }
/// <summary>
/// Gets an enum indicating whether a stop has been requested.
/// </summary>
public TestExecutionStatus ExecutionStatus
{
get
{
// ExecutionStatus may have been set to StopRequested or AbortRequested
// in a prior context. If so, reflect the same setting in this context.
if (_executionStatus == TestExecutionStatus.Running && _priorContext != null)
_executionStatus = _priorContext.ExecutionStatus;
return _executionStatus;
}
set
{
_executionStatus = value;
// Push the same setting up to all prior contexts
if (_priorContext != null)
_priorContext.ExecutionStatus = value;
}
}
/// <summary>
/// The current test event listener
/// </summary>
internal ITestListener Listener
{
get { return _listener; }
set { _listener = value; }
}
/// <summary>
/// The current WorkItemDispatcher. Made public for
/// use by nunitlite.tests
/// </summary>
public IWorkItemDispatcher Dispatcher { get; set; }
/// <summary>
/// The ParallelScope to be used by tests running in this context.
/// For builds with out the parallel feature, it has no effect.
/// </summary>
public ParallelScope ParallelScope { get; set; }
#if PARALLEL
/// <summary>
/// The worker that spawned the context.
/// For builds without the parallel feature, it is null.
/// </summary>
public TestWorker TestWorker {get; internal set;}
#endif
/// <summary>
/// Gets the RandomGenerator specific to this Test
/// </summary>
public Randomizer RandomGenerator
{
get
{
if (_randomGenerator == null)
_randomGenerator = new Randomizer(CurrentTest.Seed);
return _randomGenerator;
}
}
/// <summary>
/// Gets the assert count.
/// </summary>
/// <value>The assert count.</value>
internal int AssertCount
{
get { return _assertCount; }
}
/// <summary>
/// The current nesting level of multiple assert blocks
/// </summary>
internal int MultipleAssertLevel { get; set; }
/// <summary>
/// Gets or sets the test case timeout value
/// </summary>
public int TestCaseTimeout { get; set; }
/// <summary>
/// Gets a list of ITestActions set by upstream tests
/// </summary>
public List<ITestAction> UpstreamActions { get; private set; }
// TODO: Put in checks on all of these settings
// with side effects so we only change them
// if the value is different
/// <summary>
/// Saves or restores the CurrentCulture
/// </summary>
public CultureInfo CurrentCulture
{
get { return _currentCulture; }
set
{
_currentCulture = value;
#if !PORTABLE && !NETSTANDARD1_6
Thread.CurrentThread.CurrentCulture = _currentCulture;
#endif
}
}
/// <summary>
/// Saves or restores the CurrentUICulture
/// </summary>
public CultureInfo CurrentUICulture
{
get { return _currentUICulture; }
set
{
_currentUICulture = value;
#if !PORTABLE && !NETSTANDARD1_6
Thread.CurrentThread.CurrentUICulture = _currentUICulture;
#endif
}
}
#if !PORTABLE && !NETSTANDARD1_6
/// <summary>
/// Gets or sets the current <see cref="IPrincipal"/> for the Thread.
/// </summary>
public IPrincipal CurrentPrincipal
{
get { return _currentPrincipal; }
set
{
_currentPrincipal = value;
Thread.CurrentPrincipal = _currentPrincipal;
}
}
#endif
/// <summary>
/// The current head of the ValueFormatter chain, copied from MsgUtils.ValueFormatter
/// </summary>
public ValueFormatter CurrentValueFormatter { get; private set; }
/// <summary>
/// If true, all tests must run on the same thread. No new thread may be spawned.
/// </summary>
public bool IsSingleThreaded { get; set; }
#endregion
#region Instance Methods
/// <summary>
/// Record any changes in the environment made by
/// the test code in the execution context so it
/// will be passed on to lower level tests.
/// </summary>
public void UpdateContextFromEnvironment()
{
_currentCulture = CultureInfo.CurrentCulture;
_currentUICulture = CultureInfo.CurrentUICulture;
#if !PORTABLE && !NETSTANDARD1_6
_currentPrincipal = Thread.CurrentPrincipal;
#endif
}
/// <summary>
/// Set up the execution environment to match a context.
/// Note that we may be running on the same thread where the
/// context was initially created or on a different thread.
/// </summary>
public void EstablishExecutionEnvironment()
{
#if !PORTABLE && !NETSTANDARD1_6
Thread.CurrentThread.CurrentCulture = _currentCulture;
Thread.CurrentThread.CurrentUICulture = _currentUICulture;
Thread.CurrentPrincipal = _currentPrincipal;
#endif
CurrentContext = this;
}
/// <summary>
/// Increments the assert count by one.
/// </summary>
public void IncrementAssertCount()
{
Interlocked.Increment(ref _assertCount);
}
/// <summary>
/// Increments the assert count by a specified amount.
/// </summary>
public void IncrementAssertCount(int count)
{
// TODO: Temporary implementation
while (count-- > 0)
Interlocked.Increment(ref _assertCount);
}
/// <summary>
/// Adds a new ValueFormatterFactory to the chain of formatters
/// </summary>
/// <param name="formatterFactory">The new factory</param>
public void AddFormatter(ValueFormatterFactory formatterFactory)
{
CurrentValueFormatter = formatterFactory(CurrentValueFormatter);
}
private TestExecutionContext CreateIsolatedContext()
{
var context = new TestExecutionContext(this);
if (context.CurrentTest != null)
context.CurrentResult = context.CurrentTest.MakeTestResult();
return context;
}
#endregion
#region InitializeLifetimeService
#if !PORTABLE && !NETSTANDARD1_6
/// <summary>
/// Obtain lifetime service object
/// </summary>
/// <returns></returns>
[SecurityCritical] // Override of security critical method must be security critical itself
public override object InitializeLifetimeService()
{
return null;
}
#endif
#endregion
#region Nested IsolatedContext Class
/// <summary>
/// An IsolatedContext is used when running code
/// that may effect the current result in ways that
/// should not impact the final result of the test.
/// A new TestExecutionContext is created with an
/// initially clear result, which is discarded on
/// exiting the context.
/// </summary>
/// <example>
/// using (new TestExecutionContext.IsolatedContext())
/// {
/// // Code that should not impact the result
/// }
/// </example>
public class IsolatedContext : IDisposable
{
private TestExecutionContext _originalContext;
/// <summary>
/// Save the original current TestExecutionContext and
/// make a new isolated context current.
/// </summary>
public IsolatedContext()
{
_originalContext = CurrentContext;
CurrentContext = _originalContext.CreateIsolatedContext();
}
/// <summary>
/// Restore the original TestExecutionContext.
/// </summary>
public void Dispose()
{
CurrentContext = _originalContext;
}
}
#endregion
}
}
| |
using System.Collections.Generic;
using bv.common.Configuration;
using bv.common.Resources;
#if !MONO
using System.Web;
using System.Windows.Forms;
#endif
namespace bv.common.Core
{
public class Localizer
{
public const string lngEn = "en";
public const string lngRu = "ru";
public const string lngGe = "ka";
public const string lngKz = "kk";
public const string lngUzCyr = "uz-C";
public const string lngUzLat = "uz-L";
public const string lngAzLat = "az-L";
public const string lngAr = "hy";
public const string lngUk = "uk";
public const string lngIraq = "ar";
public const string lngVietnam = "vi";
public const string lngLaos = "lo";
public static BaseStringsStorage MenuMessages { get; set; }
//public static string Language { get; set; }
private static Dictionary<string,string> m_SupportedLanguages;
public static Dictionary<string,string> SupportedLanguages
{
get
{
if (m_SupportedLanguages == null)
{
InitSupportedLanguages();
}
return m_SupportedLanguages;
}
set { m_SupportedLanguages = value; }
}
private static void AddLanguage(string langID, string cultureName)
{
if (!m_SupportedLanguages.ContainsKey(langID))
m_SupportedLanguages.Add(langID, cultureName);
}
private static void InitSupportedLanguages()
{
m_SupportedLanguages = new Dictionary<string, string>();
#if !MONO
if (HttpContext.Current == null && InputLanguage.InstalledInputLanguages.Count >0)
{
foreach (InputLanguage language in InputLanguage.InstalledInputLanguages)
{
switch (language.Culture.Name)
{
case "en-US":
AddLanguage(lngEn, language.Culture.Name);
break;
case "ru-RU":
AddLanguage(lngRu, language.Culture.Name);
break;
case "az-Latn-AZ":
AddLanguage(lngAzLat, language.Culture.Name);
break;
case "uz-Latn-UZ":
AddLanguage(lngUzLat, language.Culture.Name);
break;
case "uz-Cyrl-UZ":
AddLanguage(lngUzCyr, language.Culture.Name);
break;
case "ka-GE":
AddLanguage(lngGe, language.Culture.Name);
break;
case "uk-UA":
AddLanguage(lngUk, language.Culture.Name);
break;
case "kk-KZ":
AddLanguage(lngKz, language.Culture.Name);
break;
case "hy-AM":
AddLanguage(lngAr, language.Culture.Name);
break;
case "lo-LA":
AddLanguage(lngLaos, language.Culture.Name);
break;
case "vi-VN":
AddLanguage(lngVietnam, language.Culture.Name);
break;
}
}
}
else
{
#endif
AddLanguage(lngEn, "en-US");
AddLanguage(lngRu, "ru-RU");
AddLanguage(lngAzLat, "az-Latn-AZ");
AddLanguage(lngUzLat, "uz-Latn-UZ");
AddLanguage(lngUzCyr, "uz-Cyrl-UZ");
AddLanguage(lngGe, "ka-GE");
AddLanguage(lngUk, "uk-UA");
AddLanguage(lngKz, "kk-KZ");
AddLanguage(lngAr, "hy-AM");
AddLanguage(lngIraq, "ar-IQ");
AddLanguage(lngLaos, "lo-LA");
AddLanguage(lngVietnam, "vi-VN");
#if !MONO
}
#endif
}
/// -----------------------------------------------------------------------------
/// <summary>
/// Converts passed <b>CultureInfo</b> object to the application language identifier
/// </summary>
/// <param name="culture">
/// <b>CultureInfo</b> object
/// </param>
/// <returns>
/// Returns application language identifier related with passed <i>culture</i>.
/// </returns>
/// <remarks>
/// Use this method to retrieve application language identifier for the specific <b>CultureInfo</b>.
/// Application language identifier is used to set/get current application language and
/// perform related application translation to this language.
/// </remarks>
/// <history>
/// [Mike] 30.03.2006 Created
/// </history>
/// -----------------------------------------------------------------------------
public static string GetLanguageID(System.Globalization.CultureInfo culture)
{
switch (culture.TwoLetterISOLanguageName)
{
case "uz":
if (culture.Name.IndexOf("Cyrl", System.StringComparison.Ordinal) > 0)
return lngUzCyr;
return lngUzLat;
case "az":
return lngAzLat;
case "ar":
return lngIraq;
default:
return culture.TwoLetterISOLanguageName;
}
}
/// -----------------------------------------------------------------------------
/// <summary>
/// Gets the translated human readable language name related with specific application language identifier
/// </summary>
/// <param name="LangID">
/// application language identifier for which the human readable language name should be retrieved
/// </param>
/// <param name="DisplayLangID">
/// application language identifier of language to which the language name should be translated
/// </param>
/// <remarks>
/// <b>Note:</b> only English, Russian, Georgian, Kazakh, Uzbek Cyrillic and Uzbek Latin languages are supported by the system
/// </remarks>
/// <history>
/// [Mike] 30.03.2006 Created
/// </history>
/// -----------------------------------------------------------------------------
public static string GetLanguageName(string LangID, string DisplayLangID = null)
{
if (MenuMessages == null)
MenuMessages = BvMessages.Instance;
switch (LangID)
{
case lngEn:
return MenuMessages.GetString("MenuEnglish", "&English", DisplayLangID).Replace("&", "");
case lngRu:
return MenuMessages.GetString("MenuRussian", "&Russian", DisplayLangID).Replace("&", "");
case lngGe:
return MenuMessages.GetString("MenuGeorgian", "&Georgian", DisplayLangID).Replace("&", "");
case lngKz:
return MenuMessages.GetString("MenuKazakh", "&Kazakh", DisplayLangID).Replace("&", "");
case lngUzCyr:
return MenuMessages.GetString("MenuUzbekCyr", "Uzbeck (&Cyr)", DisplayLangID).Replace("&", "");
case lngUzLat:
return MenuMessages.GetString("MenuUzbekLat", "Uzbek (&Lat)", DisplayLangID).Replace("&", "");
case lngAzLat:
return MenuMessages.GetString("MenuAzeriLat", "Azeri (&Lat)", DisplayLangID).Replace("&", "");
case lngAr:
return MenuMessages.GetString("MenuArmenian", "Armenian", DisplayLangID).Replace("&", "");
case lngUk:
return MenuMessages.GetString("MenuUkrainian", "Ukrainian", DisplayLangID).Replace("&", "");
case lngIraq:
return MenuMessages.GetString("MenuIraq", "Arabic (Iraq)", DisplayLangID).Replace("&", "");
case lngLaos:
return MenuMessages.GetString("MenuLaos", "Laos", DisplayLangID).Replace("&", "");
case lngVietnam:
return MenuMessages.GetString("MenuVietnam", "Vietnam", DisplayLangID).Replace("&", "");
}
return MenuMessages.GetString("MenuEnglish", "&English", DisplayLangID).Replace("&", "");
}
public static string GetMenuLanguageName(string langID, string displayLangID = null)
{
if (MenuMessages == null)
MenuMessages = BvMessages.Instance;
switch (langID)
{
case lngEn:
return MenuMessages.GetString("MenuEnglish", "&English", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuEnglish", "&English", lngEn).Replace("&", "") + ")";
case lngRu:
return MenuMessages.GetString("MenuRussian", "&Russian", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuRussian", "&Russian", lngRu).Replace("&", "") + ")";
case lngGe:
return MenuMessages.GetString("MenuGeorgian", "&Georgian", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuGeorgian", "&Georgian", lngGe).Replace("&", "") + ")";
case lngKz:
return MenuMessages.GetString("MenuKazakh", "&Kazakh", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuKazakh", "&Kazakh", lngKz).Replace("&", "") + ")";
case lngUzCyr:
return MenuMessages.GetString("MenuUzbekCyr", "Uzbeck (&Cyr)", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuUzbekCyr", "Uzbeck (&Cyr)", lngUzCyr).Replace("&", "") + ")";
case lngUzLat:
return MenuMessages.GetString("MenuUzbekLat", "Uzbek (&Lat)", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuUzbekLat", "Uzbek (&Lat)", lngUzLat).Replace("&", "") + ")";
case lngAzLat:
return MenuMessages.GetString("MenuAzeriLat", "Azeri (&Lat)", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuAzeriLat", "Azeri (&Lat)", lngAzLat).Replace("&", "") + ")";
case lngAr:
return MenuMessages.GetString("MenuArmenian", "Armenian", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuArmenian", "Armenian", lngAr).Replace("&", "") + ")";
case lngUk:
return MenuMessages.GetString("MenuUkrainian", "Ukrainian", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuUkrainian", "Ukrainian", lngUk).Replace("&", "") + ")";
case lngIraq:
return MenuMessages.GetString("MenuIraq", "Arabic (Iraq)", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuIraq", "Arabic (Iraq)", lngIraq).Replace("&", "") + ")";
case lngLaos:
return MenuMessages.GetString("MenuLaos", "Laos", displayLangID).Replace("&", "")+ " (" + MenuMessages.GetString("MenuLaos", "Laos", lngLaos).Replace("&", "") + ")";;
case lngVietnam:
return MenuMessages.GetString("MenuVietnam", "Vietnam", displayLangID).Replace("&", "")+ " (" + MenuMessages.GetString("MenuVietnam", "Vietnam", lngVietnam).Replace("&", "") + ")";;
}
return MenuMessages.GetString("MenuEnglish", "&English", displayLangID).Replace("&", "") + " (" + MenuMessages.GetString("MenuEnglish", "&English", lngEn).Replace("&", "") + ")";
}
public static string DefaultLanguage
{
get
{
return Config.GetSetting("DefaultLanguage", "en");
}
}
public static string DefaultLanguageLocale
{
get
{
return SupportedLanguages[DefaultLanguage];
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace Example.API.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
namespace java.nio
{
[global::MonoJavaBridge.JavaClass(typeof(global::java.nio.Buffer_))]
public abstract partial class Buffer : java.lang.Object
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
static Buffer()
{
InitJNI();
}
protected Buffer(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
internal static global::MonoJavaBridge.MethodId _limit13980;
public virtual int limit()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.nio.Buffer._limit13980);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._limit13980);
}
internal static global::MonoJavaBridge.MethodId _limit13981;
public virtual global::java.nio.Buffer limit(int arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._limit13981, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._limit13981, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _clear13982;
public virtual global::java.nio.Buffer clear()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._clear13982)) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._clear13982)) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _remaining13983;
public virtual int remaining()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.nio.Buffer._remaining13983);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._remaining13983);
}
internal static global::MonoJavaBridge.MethodId _hasArray13984;
public abstract bool hasArray();
internal static global::MonoJavaBridge.MethodId _array13985;
public abstract global::java.lang.Object array();
internal static global::MonoJavaBridge.MethodId _position13986;
public virtual global::java.nio.Buffer position(int arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._position13986, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._position13986, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _position13987;
public virtual int position()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.nio.Buffer._position13987);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._position13987);
}
internal static global::MonoJavaBridge.MethodId _arrayOffset13988;
public abstract int arrayOffset();
internal static global::MonoJavaBridge.MethodId _capacity13989;
public virtual int capacity()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.nio.Buffer._capacity13989);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._capacity13989);
}
internal static global::MonoJavaBridge.MethodId _mark13990;
public virtual global::java.nio.Buffer mark()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._mark13990)) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._mark13990)) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _reset13991;
public virtual global::java.nio.Buffer reset()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._reset13991)) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._reset13991)) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _flip13992;
public virtual global::java.nio.Buffer flip()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._flip13992)) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._flip13992)) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _rewind13993;
public virtual global::java.nio.Buffer rewind()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer._rewind13993)) as java.nio.Buffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._rewind13993)) as java.nio.Buffer;
}
internal static global::MonoJavaBridge.MethodId _hasRemaining13994;
public virtual bool hasRemaining()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallBooleanMethod(this.JvmHandle, global::java.nio.Buffer._hasRemaining13994);
else
return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::java.nio.Buffer.staticClass, global::java.nio.Buffer._hasRemaining13994);
}
internal static global::MonoJavaBridge.MethodId _isReadOnly13995;
public abstract bool isReadOnly();
private static void InitJNI()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::java.nio.Buffer.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/nio/Buffer"));
global::java.nio.Buffer._limit13980 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "limit", "()I");
global::java.nio.Buffer._limit13981 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "limit", "(I)Ljava/nio/Buffer;");
global::java.nio.Buffer._clear13982 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "clear", "()Ljava/nio/Buffer;");
global::java.nio.Buffer._remaining13983 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "remaining", "()I");
global::java.nio.Buffer._hasArray13984 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "hasArray", "()Z");
global::java.nio.Buffer._array13985 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "array", "()Ljava/lang/Object;");
global::java.nio.Buffer._position13986 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "position", "(I)Ljava/nio/Buffer;");
global::java.nio.Buffer._position13987 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "position", "()I");
global::java.nio.Buffer._arrayOffset13988 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "arrayOffset", "()I");
global::java.nio.Buffer._capacity13989 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "capacity", "()I");
global::java.nio.Buffer._mark13990 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "mark", "()Ljava/nio/Buffer;");
global::java.nio.Buffer._reset13991 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "reset", "()Ljava/nio/Buffer;");
global::java.nio.Buffer._flip13992 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "flip", "()Ljava/nio/Buffer;");
global::java.nio.Buffer._rewind13993 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "rewind", "()Ljava/nio/Buffer;");
global::java.nio.Buffer._hasRemaining13994 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "hasRemaining", "()Z");
global::java.nio.Buffer._isReadOnly13995 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer.staticClass, "isReadOnly", "()Z");
}
}
[global::MonoJavaBridge.JavaProxy(typeof(global::java.nio.Buffer))]
public sealed partial class Buffer_ : java.nio.Buffer
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
static Buffer_()
{
InitJNI();
}
internal Buffer_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
internal static global::MonoJavaBridge.MethodId _hasArray13996;
public override bool hasArray()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallBooleanMethod(this.JvmHandle, global::java.nio.Buffer_._hasArray13996);
else
return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::java.nio.Buffer_.staticClass, global::java.nio.Buffer_._hasArray13996);
}
internal static global::MonoJavaBridge.MethodId _array13997;
public override global::java.lang.Object array()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.nio.Buffer_._array13997)) as java.lang.Object;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.nio.Buffer_.staticClass, global::java.nio.Buffer_._array13997)) as java.lang.Object;
}
internal static global::MonoJavaBridge.MethodId _arrayOffset13998;
public override int arrayOffset()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.nio.Buffer_._arrayOffset13998);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.nio.Buffer_.staticClass, global::java.nio.Buffer_._arrayOffset13998);
}
internal static global::MonoJavaBridge.MethodId _isReadOnly13999;
public override bool isReadOnly()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallBooleanMethod(this.JvmHandle, global::java.nio.Buffer_._isReadOnly13999);
else
return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::java.nio.Buffer_.staticClass, global::java.nio.Buffer_._isReadOnly13999);
}
private static void InitJNI()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::java.nio.Buffer_.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/nio/Buffer"));
global::java.nio.Buffer_._hasArray13996 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer_.staticClass, "hasArray", "()Z");
global::java.nio.Buffer_._array13997 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer_.staticClass, "array", "()Ljava/lang/Object;");
global::java.nio.Buffer_._arrayOffset13998 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer_.staticClass, "arrayOffset", "()I");
global::java.nio.Buffer_._isReadOnly13999 = @__env.GetMethodIDNoThrow(global::java.nio.Buffer_.staticClass, "isReadOnly", "()Z");
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Trollbridge.WebApi.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
using Orleans.Concurrency;
using Orleans.MultiCluster;
using Orleans.LogConsistency;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Orleans.Core;
using Orleans.Runtime;
using Orleans.Storage;
namespace Orleans.EventSourcing
{
/// <summary>
/// A base class for log-consistent grains using standard event-sourcing terminology.
/// All operations are reentrancy-safe.
/// <typeparam name="TGrainState">The type for the grain state, i.e. the aggregate view of the event log.</typeparam>
/// </summary>
public abstract class JournaledGrain<TGrainState> : JournaledGrain<TGrainState, object>
where TGrainState : class, new()
{
protected JournaledGrain() { }
/// <summary>
/// This constructor is particularly useful for unit testing where test code can create a Grain and replace
/// the IGrainIdentity, IGrainRuntime and State with test doubles (mocks/stubs).
/// </summary>
protected JournaledGrain(IGrainIdentity identity, IGrainRuntime runtime)
: base(identity, runtime)
{
}
}
/// <summary>
/// A base class for log-consistent grains using standard event-sourcing terminology.
/// All operations are reentrancy-safe.
/// <typeparam name="TGrainState">The type for the grain state, i.e. the aggregate view of the event log.</typeparam>
/// <typeparam name="TEventBase">The common base class for the events</typeparam>
/// </summary>
public abstract class JournaledGrain<TGrainState,TEventBase> :
LogConsistentGrain<TGrainState>,
ILogConsistencyProtocolParticipant,
ILogViewAdaptorHost<TGrainState, TEventBase>
where TGrainState : class, new()
where TEventBase: class
{
protected JournaledGrain() { }
/// <summary>
/// This constructor is particularly useful for unit testing where test code can create a Grain and replace
/// the IGrainIdentity, IGrainRuntime and State with test doubles (mocks/stubs).
/// </summary>
protected JournaledGrain(IGrainIdentity identity, IGrainRuntime runtime)
: base(identity, runtime)
{
}
/// <summary>
/// Raise an event.
/// </summary>
/// <param name="event">Event to raise</param>
/// <returns></returns>
protected virtual void RaiseEvent<TEvent>(TEvent @event)
where TEvent : TEventBase
{
if (@event == null) throw new ArgumentNullException("event");
LogViewAdaptor.Submit(@event);
}
/// <summary>
/// Raise multiple events, as an atomic sequence.
/// </summary>
/// <param name="events">Events to raise</param>
/// <returns></returns>
protected virtual void RaiseEvents<TEvent>(IEnumerable<TEvent> events)
where TEvent : TEventBase
{
if (events == null) throw new ArgumentNullException("events");
LogViewAdaptor.SubmitRange((IEnumerable<TEventBase>) events);
}
/// <summary>
/// Raise an event conditionally.
/// Succeeds only if there are no conflicts, that is, no other events were raised in the meantime.
/// </summary>
/// <param name="event">Event to raise</param>
/// <returns>true if successful, false if there was a conflict.</returns>
protected virtual Task<bool> RaiseConditionalEvent<TEvent>(TEvent @event)
where TEvent : TEventBase
{
if (@event == null) throw new ArgumentNullException("event");
return LogViewAdaptor.TryAppend(@event);
}
/// <summary>
/// Raise multiple events, as an atomic sequence, conditionally.
/// Succeeds only if there are no conflicts, that is, no other events were raised in the meantime.
/// </summary>
/// <param name="events">Events to raise</param>
/// <returns>true if successful, false if there was a conflict.</returns>
protected virtual Task<bool> RaiseConditionalEvents<TEvent>(IEnumerable<TEvent> events)
where TEvent : TEventBase
{
if (events == null) throw new ArgumentNullException("events");
return LogViewAdaptor.TryAppendRange((IEnumerable<TEventBase>) events);
}
/// <summary>
/// The current confirmed state.
/// Includes only confirmed events.
/// </summary>
protected TGrainState State
{
get { return this.LogViewAdaptor.ConfirmedView; }
}
/// <summary>
/// The version of the current confirmed state.
/// Equals the total number of confirmed events.
/// </summary>
protected int Version
{
get { return this.LogViewAdaptor.ConfirmedVersion; }
}
/// <summary>
/// Called whenever the tentative state may have changed due to local or remote events.
/// <para>Override this to react to changes of the state.</para>
/// </summary>
protected virtual void OnTentativeStateChanged()
{
}
/// <summary>
/// The current tentative state.
/// Includes both confirmed and unconfirmed events.
/// </summary>
protected TGrainState TentativeState
{
get { return this.LogViewAdaptor.TentativeView; }
}
/// <summary>
/// Called after the confirmed state may have changed (i.e. the confirmed version number is larger).
/// <para>Override this to react to changes of the confirmed state.</para>
/// </summary>
protected virtual void OnStateChanged()
{
// overridden by journaled grains that want to react to state changes
}
/// <summary>
/// Waits until all previously raised events have been confirmed.
/// <para>await this after raising one or more events, to ensure events are persisted before proceeding, or to guarantee strong consistency (linearizability) even if there are multiple instances of this grain</para>
/// </summary>
/// <returns>a task that completes once the events have been confirmed.</returns>
protected Task ConfirmEvents()
{
return LogViewAdaptor.ConfirmSubmittedEntries();
}
/// <summary>
/// Retrieves the latest state now, and confirms all previously raised events.
/// Effectively, this enforces synchronization with the global state.
/// <para>Await this before reading the state to ensure strong consistency (linearizability) even if there are multiple instances of this grain</para>
/// </summary>
/// <returns>a task that completes once the log has been refreshed and the events have been confirmed.</returns>
protected Task RefreshNow()
{
return LogViewAdaptor.Synchronize();
}
/// <summary>
/// Returns the current queue of unconfirmed events.
/// </summary>
public IEnumerable<TEventBase> UnconfirmedEvents
{
get { return LogViewAdaptor.UnconfirmedSuffix; }
}
/// <summary>
/// By default, upon activation, the journaled grain waits until it has loaded the latest
/// view from storage. Subclasses can override this behavior,
/// and skip the wait if desired.
/// </summary>
public override Task OnActivateAsync()
{
return LogViewAdaptor.Synchronize();
}
/// <summary>
/// Retrieves a segment of the confirmed event sequence, possibly from storage.
/// Throws <see cref="NotSupportedException"/> if the events are not available to read.
/// Whether events are available, and for how long, depends on the providers used and how they are configured.
/// </summary>
/// <param name="fromVersion">the position of the event sequence from which to start</param>
/// <param name="toVersion">the position of the event sequence on which to end</param>
/// <returns>a task which returns the sequence of events between the two versions</returns>
protected Task<IReadOnlyList<TEventBase>> RetrieveConfirmedEvents(int fromVersion, int toVersion)
{
if (fromVersion < 0)
throw new ArgumentException("invalid range", nameof(fromVersion));
if (toVersion < fromVersion || toVersion > LogViewAdaptor.ConfirmedVersion)
throw new ArgumentException("invalid range", nameof(toVersion));
return LogViewAdaptor.RetrieveLogSegment(fromVersion, toVersion);
}
/// <summary>
/// Called when the underlying persistence or replication protocol is running into some sort of connection trouble.
/// <para>Override this to monitor the health of the log-consistency protocol and/or
/// to customize retry delays.
/// Any exceptions thrown are caught and logged by the <see cref="ILogViewAdaptorFactory"/>.</para>
/// </summary>
/// <returns>The time to wait before retrying</returns>
protected virtual void OnConnectionIssue(ConnectionIssue issue)
{
}
/// <summary>
/// Called when a previously reported connection issue has been resolved.
/// <para>Override this to monitor the health of the log-consistency protocol.
/// Any exceptions thrown are caught and logged by the <see cref="ILogViewAdaptorFactory"/>.</para>
/// </summary>
protected virtual void OnConnectionIssueResolved(ConnectionIssue issue)
{
}
/// <inheritdoc />
protected IEnumerable<ConnectionIssue> UnresolvedConnectionIssues
{
get
{
return LogViewAdaptor.UnresolvedConnectionIssues;
}
}
/// <inheritdoc />
protected void EnableStatsCollection()
{
LogViewAdaptor.EnableStatsCollection();
}
/// <inheritdoc />
protected void DisableStatsCollection()
{
LogViewAdaptor.DisableStatsCollection();
}
/// <inheritdoc />
protected LogConsistencyStatistics GetStats()
{
return LogViewAdaptor.GetStats();
}
/// <summary>
/// Defines how to apply events to the state. Unless it is overridden in the subclass, it calls
/// a dynamic "Apply" function on the state, with the event as a parameter.
/// All exceptions thrown by this method are caught and logged by the log view provider.
/// <para>Override this to customize how to transition the state for a given event.</para>
/// </summary>
/// <param name="state"></param>
/// <param name="event"></param>
protected virtual void TransitionState(TGrainState state, TEventBase @event)
{
dynamic s = state;
dynamic e = @event;
s.Apply(e);
}
#region internal plumbing
/// <summary>
/// Adaptor for log consistency protocol.
/// Is installed by the log-consistency provider.
/// </summary>
internal ILogViewAdaptor<TGrainState, TEventBase> LogViewAdaptor { get; private set; }
/// <summary>
/// Called right after grain is constructed, to install the adaptor.
/// The log-consistency provider contains a factory method that constructs the adaptor with chosen types for this grain
/// </summary>
protected override void InstallAdaptor(ILogViewAdaptorFactory factory, object initialState, string graintypename, IGrainStorage grainStorage, ILogConsistencyProtocolServices services)
{
// call the log consistency provider to construct the adaptor, passing the type argument
LogViewAdaptor = factory.MakeLogViewAdaptor<TGrainState, TEventBase>(this, (TGrainState)initialState, graintypename, grainStorage, services);
}
/// <summary>
/// If there is no log-consistency provider specified, store versioned state using default storage provider
/// </summary>
protected override ILogViewAdaptorFactory DefaultAdaptorFactory
{
get
{
return new StateStorage.DefaultAdaptorFactory();
}
}
/// <summary>
/// called by adaptor to update the view when entries are appended.
/// </summary>
/// <param name="view">log view</param>
/// <param name="entry">log entry</param>
void ILogViewAdaptorHost<TGrainState, TEventBase>.UpdateView(TGrainState view, TEventBase entry)
{
TransitionState(view, entry);
}
/// <summary>
/// Notify log view adaptor of activation (called before user-level OnActivate)
/// </summary>
async Task ILogConsistencyProtocolParticipant.PreActivateProtocolParticipant()
{
await LogViewAdaptor.PreOnActivate();
}
/// <summary>
/// Notify log view adaptor of activation (called after user-level OnActivate)
/// </summary>
async Task ILogConsistencyProtocolParticipant.PostActivateProtocolParticipant()
{
await LogViewAdaptor.PostOnActivate();
}
/// <summary>
/// Notify log view adaptor of deactivation
/// </summary>
Task ILogConsistencyProtocolParticipant.DeactivateProtocolParticipant()
{
return LogViewAdaptor.PostOnDeactivate();
}
/// <summary>
/// Receive a protocol message from other clusters, passed on to log view adaptor.
/// </summary>
[AlwaysInterleave]
Task<ILogConsistencyProtocolMessage> ILogConsistencyProtocolParticipant.OnProtocolMessageReceived(ILogConsistencyProtocolMessage payload)
{
return LogViewAdaptor.OnProtocolMessageReceived(payload);
}
/// <summary>
/// Receive a configuration change, pass on to log view adaptor.
/// </summary>
[AlwaysInterleave]
Task ILogConsistencyProtocolParticipant.OnMultiClusterConfigurationChange(MultiCluster.MultiClusterConfiguration next)
{
return LogViewAdaptor.OnMultiClusterConfigurationChange(next);
}
/// <summary>
/// called by adaptor on state change.
/// </summary>
void ILogViewAdaptorHost<TGrainState, TEventBase>.OnViewChanged(bool tentative, bool confirmed)
{
if (tentative)
OnTentativeStateChanged();
if (confirmed)
OnStateChanged();
}
/// <summary>
/// called by adaptor on connection issues.
/// </summary>
void IConnectionIssueListener.OnConnectionIssue(ConnectionIssue connectionIssue)
{
OnConnectionIssue(connectionIssue);
}
/// <summary>
/// called by adaptor when a connection issue is resolved.
/// </summary>
void IConnectionIssueListener.OnConnectionIssueResolved(ConnectionIssue connectionIssue)
{
OnConnectionIssueResolved(connectionIssue);
}
#endregion
}
}
| |
using Abp.Auditing;
using Abp.Authorization;
using Abp.Authorization.Roles;
using Abp.Authorization.Users;
using Abp.Configuration;
using Abp.EntityFrameworkCore;
using Abp.EntityHistory;
using Abp.Localization;
using Abp.Notifications;
using Abp.Organizations;
using Microsoft.EntityFrameworkCore;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Abp.Webhooks;
namespace Abp.Zero.EntityFrameworkCore
{
public abstract class AbpZeroCommonDbContext<TRole, TUser, TSelf> : AbpDbContext
where TRole : AbpRole<TUser>
where TUser : AbpUser<TUser>
where TSelf : AbpZeroCommonDbContext<TRole, TUser, TSelf>
{
/// <summary>
/// Roles.
/// </summary>
public virtual DbSet<TRole> Roles { get; set; }
/// <summary>
/// Users.
/// </summary>
public virtual DbSet<TUser> Users { get; set; }
/// <summary>
/// User logins.
/// </summary>
public virtual DbSet<UserLogin> UserLogins { get; set; }
/// <summary>
/// User login attempts.
/// </summary>
public virtual DbSet<UserLoginAttempt> UserLoginAttempts { get; set; }
/// <summary>
/// User roles.
/// </summary>
public virtual DbSet<UserRole> UserRoles { get; set; }
/// <summary>
/// User claims.
/// </summary>
public virtual DbSet<UserClaim> UserClaims { get; set; }
/// <summary>
/// User tokens.
/// </summary>
public virtual DbSet<UserToken> UserTokens { get; set; }
/// <summary>
/// Role claims.
/// </summary>
public virtual DbSet<RoleClaim> RoleClaims { get; set; }
/// <summary>
/// Permissions.
/// </summary>
public virtual DbSet<PermissionSetting> Permissions { get; set; }
/// <summary>
/// Role permissions.
/// </summary>
public virtual DbSet<RolePermissionSetting> RolePermissions { get; set; }
/// <summary>
/// User permissions.
/// </summary>
public virtual DbSet<UserPermissionSetting> UserPermissions { get; set; }
/// <summary>
/// Settings.
/// </summary>
public virtual DbSet<Setting> Settings { get; set; }
/// <summary>
/// Audit logs.
/// </summary>
public virtual DbSet<AuditLog> AuditLogs { get; set; }
/// <summary>
/// Languages.
/// </summary>
public virtual DbSet<ApplicationLanguage> Languages { get; set; }
/// <summary>
/// LanguageTexts.
/// </summary>
public virtual DbSet<ApplicationLanguageText> LanguageTexts { get; set; }
/// <summary>
/// OrganizationUnits.
/// </summary>
public virtual DbSet<OrganizationUnit> OrganizationUnits { get; set; }
/// <summary>
/// UserOrganizationUnits.
/// </summary>
public virtual DbSet<UserOrganizationUnit> UserOrganizationUnits { get; set; }
/// <summary>
/// OrganizationUnitRoles.
/// </summary>
public virtual DbSet<OrganizationUnitRole> OrganizationUnitRoles { get; set; }
/// <summary>
/// Tenant notifications.
/// </summary>
public virtual DbSet<TenantNotificationInfo> TenantNotifications { get; set; }
/// <summary>
/// User notifications.
/// </summary>
public virtual DbSet<UserNotificationInfo> UserNotifications { get; set; }
/// <summary>
/// Notification subscriptions.
/// </summary>
public virtual DbSet<NotificationSubscriptionInfo> NotificationSubscriptions { get; set; }
/// <summary>
/// Entity changes.
/// </summary>
public virtual DbSet<EntityChange> EntityChanges { get; set; }
/// <summary>
/// Entity change sets.
/// </summary>
public virtual DbSet<EntityChangeSet> EntityChangeSets { get; set; }
/// <summary>
/// Entity property changes.
/// </summary>
public virtual DbSet<EntityPropertyChange> EntityPropertyChanges { get; set; }
/// <summary>
/// Webhook information
/// </summary>
public virtual DbSet<WebhookEvent> WebhookEvents { get; set; }
/// <summary>
/// Web subscriptions
/// </summary>
public virtual DbSet<WebhookSubscriptionInfo> WebhookSubscriptions { get; set; }
/// <summary>
/// Webhook work items
/// </summary>
public virtual DbSet<WebhookSendAttempt> WebhookSendAttempts { get; set; }
public IEntityHistoryHelper EntityHistoryHelper { get; set; }
/// <summary>
///
/// </summary>
/// <param name="options"></param>
protected AbpZeroCommonDbContext(DbContextOptions<TSelf> options)
: base(options)
{
}
public override int SaveChanges()
{
var changeSet = EntityHistoryHelper?.CreateEntityChangeSet(ChangeTracker.Entries().ToList());
var result = base.SaveChanges();
EntityHistoryHelper?.Save(changeSet);
return result;
}
public override async Task<int> SaveChangesAsync(CancellationToken cancellationToken = default(CancellationToken))
{
var changeSet = EntityHistoryHelper?.CreateEntityChangeSet(ChangeTracker.Entries().ToList());
var result = await base.SaveChangesAsync(cancellationToken);
if (EntityHistoryHelper != null)
{
await EntityHistoryHelper.SaveAsync(changeSet);
}
return result;
}
/// <summary>
///
/// </summary>
/// <param name="modelBuilder"></param>
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.Entity<TUser>(b =>
{
b.Property(u => u.ConcurrencyStamp).IsConcurrencyToken();
b.HasOne(p => p.DeleterUser)
.WithMany()
.HasForeignKey(p => p.DeleterUserId);
b.HasOne(p => p.CreatorUser)
.WithMany()
.HasForeignKey(p => p.CreatorUserId);
b.HasOne(p => p.LastModifierUser)
.WithMany()
.HasForeignKey(p => p.LastModifierUserId);
});
modelBuilder.Entity<TRole>(b =>
{
b.Property(r => r.ConcurrencyStamp).IsConcurrencyToken();
});
modelBuilder.Entity<AuditLog>(b =>
{
b.HasIndex(e => new { e.TenantId, e.UserId });
b.HasIndex(e => new { e.TenantId, e.ExecutionTime });
b.HasIndex(e => new { e.TenantId, e.ExecutionDuration });
});
modelBuilder.Entity<ApplicationLanguage>(b =>
{
b.HasIndex(e => new { e.TenantId, e.Name });
});
modelBuilder.Entity<ApplicationLanguageText>(b =>
{
b.HasIndex(e => new { e.TenantId, e.Source, e.LanguageName, e.Key });
});
modelBuilder.Entity<EntityChange>(b =>
{
b.HasMany(p => p.PropertyChanges)
.WithOne()
.HasForeignKey(p => p.EntityChangeId);
b.HasIndex(e => new { e.EntityChangeSetId });
b.HasIndex(e => new { e.EntityTypeFullName, e.EntityId });
});
modelBuilder.Entity<EntityChangeSet>(b =>
{
b.HasMany(p => p.EntityChanges)
.WithOne()
.HasForeignKey(p => p.EntityChangeSetId);
b.HasIndex(e => new { e.TenantId, e.UserId });
b.HasIndex(e => new { e.TenantId, e.CreationTime });
b.HasIndex(e => new { e.TenantId, e.Reason });
});
modelBuilder.Entity<EntityPropertyChange>(b =>
{
b.HasIndex(e => e.EntityChangeId);
});
modelBuilder.Entity<NotificationSubscriptionInfo>(b =>
{
b.HasIndex(e => new { e.NotificationName, e.EntityTypeName, e.EntityId, e.UserId });
b.HasIndex(e => new { e.TenantId, e.NotificationName, e.EntityTypeName, e.EntityId, e.UserId });
});
modelBuilder.Entity<OrganizationUnit>(b =>
{
b.HasIndex(e => new { e.TenantId, e.Code });
});
modelBuilder.Entity<PermissionSetting>(b =>
{
b.HasIndex(e => new { e.TenantId, e.Name });
});
modelBuilder.Entity<RoleClaim>(b =>
{
b.HasIndex(e => new { e.RoleId });
b.HasIndex(e => new { e.TenantId, e.ClaimType });
});
modelBuilder.Entity<TRole>(b =>
{
b.HasIndex(e => new { e.TenantId, e.NormalizedName });
});
modelBuilder.Entity<Setting>(b =>
{
b.HasIndex(e => new { e.TenantId, e.Name, e.UserId }).IsUnique().HasFilter(null);
});
modelBuilder.Entity<TenantNotificationInfo>(b =>
{
b.HasIndex(e => new { e.TenantId });
});
modelBuilder.Entity<UserClaim>(b =>
{
b.HasIndex(e => new { e.TenantId, e.ClaimType });
});
modelBuilder.Entity<UserLoginAttempt>(b =>
{
b.HasIndex(e => new { e.TenancyName, e.UserNameOrEmailAddress, e.Result });
b.HasIndex(ula => new { ula.UserId, ula.TenantId });
});
modelBuilder.Entity<UserLogin>(b =>
{
b.HasIndex(e => new { e.TenantId, e.LoginProvider, e.ProviderKey });
b.HasIndex(e => new { e.TenantId, e.UserId });
});
modelBuilder.Entity<UserNotificationInfo>(b =>
{
b.HasIndex(e => new { e.UserId, e.State, e.CreationTime });
});
modelBuilder.Entity<UserOrganizationUnit>(b =>
{
b.HasIndex(e => new { e.TenantId, e.UserId });
b.HasIndex(e => new { e.TenantId, e.OrganizationUnitId });
});
modelBuilder.Entity<OrganizationUnitRole>(b =>
{
b.HasIndex(e => new { e.TenantId, e.RoleId });
b.HasIndex(e => new { e.TenantId, e.OrganizationUnitId });
});
modelBuilder.Entity<UserRole>(b =>
{
b.HasIndex(e => new { e.TenantId, e.UserId });
b.HasIndex(e => new { e.TenantId, e.RoleId });
});
modelBuilder.Entity<TUser>(b =>
{
b.HasIndex(e => new { e.TenantId, e.NormalizedUserName });
b.HasIndex(e => new { e.TenantId, e.NormalizedEmailAddress });
});
modelBuilder.Entity<UserToken>(b =>
{
b.HasIndex(e => new { e.TenantId, e.UserId });
});
}
}
}
| |
//-----------------------------------------------------------------------
// This file is part of Microsoft Robotics Developer Studio Code Samples.
//
// Copyright (C) Microsoft Corporation. All rights reserved.
//
// $File: Drive.cs $ $Revision: 1 $
//-----------------------------------------------------------------------
using Microsoft.Ccr.Core;
using Microsoft.Dss.Core;
using Microsoft.Dss.Core.Attributes;
using Microsoft.Dss.ServiceModel.Dssp;
using Microsoft.Dss.ServiceModel.DsspServiceBase;
using System;
using System.Collections.Generic;
using System.Security.Permissions;
using W3C.Soap;
//for XSLT
using Microsoft.Dss.Core.DsspHttpUtilities;
using Microsoft.Dss.Core.DsspHttp;
using System.Net;
using System.Collections.Specialized;
//for construction
using cons = Microsoft.Dss.Services.Constructor;
using System.ComponentModel;
using submgr = Microsoft.Dss.Services.SubscriptionManager;
using coord = Microsoft.Robotics.Services.Coordination.Proxy;
using drive = Microsoft.Robotics.Services.Drive.Proxy;
using encoder = Microsoft.Robotics.Services.Encoder.Proxy;
using motor = Microsoft.Robotics.Services.Motor.Proxy;
//using bumper = Microsoft.Robotics.Services.ContactSensor.Proxy;
using trackroamerbot = TrackRoamer.Robotics.Services.TrackRoamerBot.Proxy;
namespace TrackRoamer.Robotics.Services.TrackRoamerServices.Drive
{
/// <summary>
/// TrackRoamer Drive Service - Drive Differential Two Wheel Service Implementation
/// </summary>
[Contract(Contract.Identifier)]
//[AlternateContract(drive.Contract.Identifier)]
[DisplayName("TrackRoamer Differential Drive")]
[Description("Provides access to an TrackRoamer differential motor drive - coordinates two motors that function together.\n(Uses the Generic Differential Drive contract.)\n(Partner with the 'TrackRoamerBot' service.)")]
[DssServiceDescription("http://msdn.microsoft.com/library/dd145254.aspx")]
public class TrackRoamerDriveService : DsspServiceBase
{
/// <summary>
/// Default Left Motor Name
/// </summary>
public const string DefaultLeftMotorName = "/LeftMotor";
/// <summary>
/// Default Right Motor Name
/// </summary>
public const string DefaultRightMotorName = "/RightMotor";
[InitialStatePartner(Optional = true)]
private TrackRoamerDriveState _state = new TrackRoamerDriveState();
[ServicePort("/trackroamer/drive", AllowMultipleInstances = true)]
private DriveOperations _mainPort = new DriveOperations();
// This is the internal drive port for excuting the drive operations:
// driveDistance, and rotateDegrees.
private PortSet<drive.DriveDistance, drive.RotateDegrees> _internalDriveOperationsPort = new PortSet<drive.DriveDistance, drive.RotateDegrees>();
// Port used for canceling a driveDistance or RotateDegrees operation.
private Port<drive.CancelPendingDriveOperation> _internalDriveCancalOperationPort = new Port<drive.CancelPendingDriveOperation>();
[Partner("SubMgr", Contract = submgr.Contract.Identifier, CreationPolicy = PartnerCreationPolicy.CreateAlways)]
submgr.SubscriptionManagerPort _subMgrPort = new submgr.SubscriptionManagerPort();
[Partner(Partners.LeftMotor,
Contract = motor.Contract.Identifier,
CreationPolicy = PartnerCreationPolicy.UsePartnerListEntry)]
motor.MotorOperations _leftMotorPort = new motor.MotorOperations();
[Partner(Partners.RightMotor,
Contract = motor.Contract.Identifier,
CreationPolicy = PartnerCreationPolicy.UsePartnerListEntry)]
motor.MotorOperations _rightMotorPort = new motor.MotorOperations();
[Partner(Partners.LeftEncoder,
Optional = true,
Contract = encoder.Contract.Identifier,
CreationPolicy = PartnerCreationPolicy.UsePartnerListEntry)]
encoder.EncoderOperations _leftEncoderCmdPort = new encoder.EncoderOperations();
encoder.EncoderOperations _leftEncoderTickPort = new encoder.EncoderOperations();
bool _leftEncoderTickEnabled = false;
[Partner(Partners.RightEncoder,
Optional = true,
Contract = encoder.Contract.Identifier,
CreationPolicy = PartnerCreationPolicy.UsePartnerListEntry)]
encoder.EncoderOperations _rightEncoderCmdPort = new encoder.EncoderOperations();
encoder.EncoderOperations _rightEncoderTickPort = new encoder.EncoderOperations();
bool _rightEncoderTickEnabled = false;
//For XSLT
DsspHttpUtilitiesPort _httpUtilities = new DsspHttpUtilitiesPort();
[EmbeddedResource("TrackRoamer.Robotics.Services.TrackRoamerServices.TrackRoamerGenericDriveState.xslt")]
string _transform = null;
/// <summary>
/// Default Service Constructor
/// </summary>
public TrackRoamerDriveService(DsspServiceCreationPort creationPort) :
base(creationPort)
{
LogInfo("TrackRoamerDriveService:TrackRoamerDriveService() -- port: " + creationPort.ToString());
}
/// <summary>
/// Service Startup Handler
/// </summary>
protected override void Start()
{
LogInfo("TrackRoamerDriveService:: Start() ");
InitState();
// send configuration commands to partner services
SpawnIterator(ConfigureDrive);
_state.TimeStamp = DateTime.Now;
//needed for HttpPost
_httpUtilities = DsspHttpUtilitiesService.Create(Environment);
// Listen for each operation type and call its Service Handler
//ActivateDsspOperationHandlers();
// Publish the service to the local Node Directory
//DirectoryInsert();
base.Start();
// display HTTP service Uri
LogInfo("TrackRoamerDriveService:: Service uri: ");
//for XSLT and images
MountEmbeddedResources("/RoboticsCommon");
// Interleave to manage internal drive operations (driveDistance and RotateDegrees)
Activate(
new Interleave(
new ExclusiveReceiverGroup(
Arbiter.ReceiveWithIteratorFromPortSet<drive.DriveDistance>(true, _internalDriveOperationsPort, InternalDriveDistanceHandler),
Arbiter.ReceiveWithIteratorFromPortSet<drive.RotateDegrees>(true, _internalDriveOperationsPort, InternalRotateDegreesHandler)
),
new ConcurrentReceiverGroup())
);
}
private void InitState()
{
LogInfo("TrackRoamerDriveService:: InitState() _state=" + _state);
if (_state == null)
{
LogInfo("TrackRoamerDriveService:: InitState() (null)");
_state = new TrackRoamerDriveState();
_state.DistanceBetweenWheels = 0.5715;
_state.LeftWheel = new motor.WheeledMotorState();
_state.LeftWheel.Radius = 0.1805;
_state.LeftWheel.GearRatio = 0.136;
_state.LeftWheel.MotorState = new motor.MotorState();
_state.LeftWheel.MotorState.HardwareIdentifier = 1;
_state.LeftWheel.MotorState.Name = "Left Motor";
_state.LeftWheel.MotorState.PowerScalingFactor = 30;
_state.LeftWheel.MotorState.ReversePolarity = false;
_state.RightWheel = new motor.WheeledMotorState();
_state.RightWheel.Radius = 0.1805;
_state.RightWheel.GearRatio = 0.136;
_state.RightWheel.MotorState = new motor.MotorState();
_state.RightWheel.MotorState.HardwareIdentifier = 2;
_state.RightWheel.MotorState.Name = "Right Motor";
_state.RightWheel.MotorState.PowerScalingFactor = 30;
_state.RightWheel.MotorState.ReversePolarity = false;
_state.LeftWheel.EncoderState = new encoder.EncoderState();
_state.LeftWheel.EncoderState.HardwareIdentifier = 1;
_state.LeftWheel.EncoderState.TicksPerRevolution = 2993;
_state.RightWheel.EncoderState = new encoder.EncoderState();
_state.RightWheel.EncoderState.HardwareIdentifier = 2;
_state.RightWheel.EncoderState.TicksPerRevolution = 2993;
_state.IsEnabled = true;
_state.TimeStamp = DateTime.Now;
LogInfo("TrackRoamerDriveService:: InitState(): saving state");
SaveState(_state);
}
else
{
LogInfo("TrackRoamerDriveService:: InitState() (not null) _state.DistanceBetweenWheels=" + _state.DistanceBetweenWheels);
}
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.NotSpecified;
_state.TimeStamp = DateTime.Now;
}
private IEnumerator<ITask> ConfigureDrive()
{
LogInfo("TrackRoamerDriveService:: ConfigureDrive()");
bool noError = true;
// Configure motor connections
motor.Replace configureLeftMotor = new motor.Replace();
configureLeftMotor.Body = _state.LeftWheel.MotorState;
_leftMotorPort.Post(configureLeftMotor);
motor.Replace configureRightMotor = new motor.Replace();
configureRightMotor.Body = _state.RightWheel.MotorState;
_rightMotorPort.Post(configureRightMotor);
yield return Arbiter.Choice(configureLeftMotor.ResponsePort,
delegate(DefaultReplaceResponseType success) { LogInfo(" Left Motor Port set"); },
delegate(Fault fault) { LogError(fault); noError = false; });
yield return Arbiter.Choice(configureRightMotor.ResponsePort,
delegate(DefaultReplaceResponseType success) { LogInfo(" Right Motor Port set"); },
delegate(Fault fault) { LogError(fault); noError = false; });
// Configure encoder connections
if (_leftEncoderCmdPort != null)
{
encoder.Replace configureLeftEncoder = new encoder.Replace();
configureLeftEncoder.Body = _state.LeftWheel.EncoderState;
_leftEncoderCmdPort.Post(configureLeftEncoder);
yield return Arbiter.Choice(configureLeftEncoder.ResponsePort,
delegate(DefaultReplaceResponseType success) { LogInfo(" Left Encoder Port set"); },
delegate(Fault fault) { LogError(fault); noError = false; });
encoder.Subscribe op = new encoder.Subscribe();
op.Body = new SubscribeRequestType();
op.NotificationPort = _leftEncoderTickPort;
_leftEncoderCmdPort.Post(op);
yield return (Arbiter.Choice(op.ResponsePort,
delegate(SubscribeResponseType response)
{
//subscription was successful, start listening for encoder replace messages
Activate(Arbiter.Receive<encoder.UpdateTickCount>(true, _leftEncoderTickPort,
delegate(encoder.UpdateTickCount update)
{
StopMotorWithEncoderHandler(_leftEncoderTickPort, "left", update, _leftMotorPort);
}));
},
delegate(Fault fault) { LogError(fault); }
));
}
if (_rightEncoderCmdPort != null)
{
encoder.Replace configureRightEncoder = new encoder.Replace();
configureRightEncoder.Body = _state.RightWheel.EncoderState;
_rightEncoderCmdPort.Post(configureRightEncoder);
yield return Arbiter.Choice(configureRightEncoder.ResponsePort,
delegate(DefaultReplaceResponseType success) { LogInfo(" Right Encoder Port set"); },
delegate(Fault fault) { LogError(fault); noError = false; });
encoder.Subscribe op2 = new encoder.Subscribe();
op2.Body = new SubscribeRequestType();
op2.NotificationPort = _rightEncoderTickPort;
_leftEncoderCmdPort.Post(op2);
yield return (Arbiter.Choice(op2.ResponsePort,
delegate(SubscribeResponseType response)
{
//subscription was successful, start listening for encoder replace messages
Activate(Arbiter.Receive<encoder.UpdateTickCount>(true, _rightEncoderTickPort,
delegate(encoder.UpdateTickCount update)
{
StopMotorWithEncoderHandler(_rightEncoderTickPort, "right", update, _rightMotorPort);
}
));
},
delegate(Fault fault) { LogError(fault); }
));
}
if (noError)
{
LogInfo("TrackRoamerDriveService:: ConfigureDrive() - success");
_state.IsEnabled = true;
}
yield break;
}
#region Operation Handlers
/// <summary>
/// Get Handler
/// </summary>
/// <param name="get"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Concurrent)]
public IEnumerator<ITask> GetHandler(drive.Get get)
{
get.ResponsePort.Post(_state);
yield break;
}
/// <summary>
/// Update Handler
/// </summary>
/// <param name="update"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> UpdateHandler(Update update)
{
_state = update.Body;
_state.TimeStamp = DateTime.Now;
update.ResponsePort.Post(new DefaultUpdateResponseType());
yield break;
}
/// <summary>
/// Subscribe Handler
/// </summary>
/// <param name="subscribe"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> SubscribeHandler(drive.Subscribe subscribe)
{
yield return Arbiter.Choice(
SubscribeHelper(_subMgrPort, subscribe.Body, subscribe.ResponsePort),
delegate(SuccessResult success)
{
Update update = new Update(_state);
SendNotificationToTarget<Update>(subscribe.Body.Subscriber, _subMgrPort, update);
},
delegate(Exception ex)
{
LogError(ex);
throw ex;
}
);
}
/// <summary>
/// Reliable Subscribe Handler
/// </summary>
/// <param name="subscribe"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> ReliableSubscribeHandler(drive.ReliableSubscribe subscribe)
{
yield return Arbiter.Choice(
SubscribeHelper(_subMgrPort, subscribe.Body, subscribe.ResponsePort),
delegate(SuccessResult success)
{
Update update = new Update(_state);
SendNotificationToTarget<Update>(subscribe.Body.Subscriber, _subMgrPort, update);
},
delegate(Exception ex)
{
LogError(ex);
throw ex;
}
);
}
[ServiceHandler(ServiceHandlerBehavior.Teardown)]
public virtual IEnumerator<ITask> DropHandler(DsspDefaultDrop drop)
{
LogInfo("TrackRoamerDriveService:DropHandler()");
base.DefaultDropHandler(drop);
yield break;
}
/// <summary>
/// Enable Drive Handler
/// </summary>
/// <param name="enableDrive"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> EnableDriveHandler(drive.EnableDrive enableDrive)
{
_state.IsEnabled = enableDrive.Body.Enable;
_state.TimeStamp = DateTime.Now;
// if we are enabling the drive, validate that the motors are configured.
if (enableDrive.Body.Enable)
{
try
{
ValidateDriveConfiguration(false);
}
catch (InvalidOperationException)
{
// If validation fails,
// force the state to not be enabled.
_state.IsEnabled = false;
// rethrow the fault
throw;
}
}
// send notification to subscription manager
Update update = new Update(_state);
SendNotification<Update>(_subMgrPort, update);
enableDrive.ResponsePort.Post(DefaultUpdateResponseType.Instance);
yield break;
}
#region Drive Power
/// <summary>
/// Set Drive Power Handler
/// </summary>
/// <param name="setDrivePower"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> SetDrivePowerHandler(drive.SetDrivePower setDrivePower)
{
ValidateDriveConfiguration(false);
_state.TimeStamp = DateTime.Now;
PortSet<DefaultUpdateResponseType, Fault> responsePort = new PortSet<DefaultUpdateResponseType, Fault>();
// Add a coordination header to our motor requests
// so that advanced motor implementations can
// coordinate the individual motor reqests.
coord.ActuatorCoordination coordination = new coord.ActuatorCoordination();
motor.SetMotorPower leftPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = setDrivePower.Body.LeftWheelPower } );
leftPower.ResponsePort = responsePort;
leftPower.AddHeader(coordination);
_leftMotorPort.Post(leftPower);
motor.SetMotorPower rightPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = setDrivePower.Body.RightWheelPower } );
rightPower.ResponsePort = responsePort;
rightPower.AddHeader(coordination);
_rightMotorPort.Post(rightPower);
// send notification to subscription manager
Update update = new Update(_state);
SendNotification<Update>(_subMgrPort, update);
Activate(Arbiter.MultipleItemReceive<DefaultUpdateResponseType, Fault>(responsePort, 2,
delegate(ICollection<DefaultUpdateResponseType> successList, ICollection<Fault> failureList)
{
if (successList.Count == 2)
setDrivePower.ResponsePort.Post(new DefaultUpdateResponseType());
foreach (Fault fault in failureList)
{
setDrivePower.ResponsePort.Post(fault);
break;
}
}));
yield break;
}
#endregion
#region Drive Speed
/// <summary>
/// Set Drive Speed Handler
/// </summary>
/// <param name="setDriveSpeed"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> SetDriveSpeedHandler(drive.SetDriveSpeed setDriveSpeed)
{
ValidateDriveConfiguration(true);
_state.TimeStamp = DateTime.Now;
LogError("Drive speed is not implemented");
throw new NotImplementedException();
}
#endregion
#region Rotate Degrees
/// <summary>
/// Rotate Degrees Handler (positive degrees turn counterclockwise)
/// </summary>
/// <param name="rotateDegrees"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> RotateDegreesHandler(drive.RotateDegrees rotateDegrees)
{
ValidateDriveConfiguration(true);
if (_state.DistanceBetweenWheels <= 0)
{
rotateDegrees.ResponsePort.Post(new Fault());
throw new InvalidOperationException("The wheel encoders are not properly configured");
}
else
{
_state.TimeStamp = DateTime.Now;
// send immediate response
rotateDegrees.ResponsePort.Post(DefaultUpdateResponseType.Instance);
// send immediate response
rotateDegrees.ResponsePort.Post(DefaultUpdateResponseType.Instance);
// post request to internal port.
_internalDriveOperationsPort.Post(rotateDegrees);
}
yield break;
}
int stopLeftWheelAt;
int stopRightWheelAt;
/// <summary>
/// Rotate the the drive (positive degrees turn counterclockwise)
/// </summary>
/// <param name="degrees">(positive degrees turn counterclockwise)</param>
/// <param name="power">(-1.0 to 1.0)</param>
IEnumerator<ITask> RotateUntilDegrees(double degrees, double power)
{
LogInfo("^^^^^^^^^^^^^^^^^^^^^ TrackRoamerDriveService:: RotateUntilDegrees(degrees=" + degrees + ", power=" + power + ")");
_leftEncoderTickEnabled = false;
_rightEncoderTickEnabled = false;
//reset encoders
encoder.Reset Lreset = new encoder.Reset();
_leftEncoderCmdPort.Post(Lreset);
yield return (Arbiter.Choice(Lreset.ResponsePort,
delegate(DefaultUpdateResponseType response) { },
delegate(Fault fault) { LogError(fault); }
));
encoder.Reset Rreset = new encoder.Reset();
_rightEncoderCmdPort.Post(Rreset);
yield return (Arbiter.Choice(Rreset.ResponsePort,
delegate(DefaultUpdateResponseType response) { },
delegate(Fault fault) { LogError(fault); }
));
double arcDistance = Math.Abs(degrees) * _state.DistanceBetweenWheels * Math.PI / 360.0d;
//compute tick to stop at
stopLeftWheelAt = (int)Math.Round(arcDistance / (2.0d * Math.PI * _state.LeftWheel.Radius / _state.LeftWheel.EncoderState.TicksPerRevolution));
stopRightWheelAt = (int)Math.Round(arcDistance / (2.0d * Math.PI * _state.RightWheel.Radius / _state.RightWheel.EncoderState.TicksPerRevolution));
_leftEncoderTickEnabled = true;
_rightEncoderTickEnabled = true;
//start moving
// start rotate operation
_state.RotateDegreesStage = drive.DriveStage.Started;
drive.RotateDegrees rotateUpdate = new drive.RotateDegrees();
rotateUpdate.Body.RotateDegreesStage = drive.DriveStage.Started;
_internalDriveOperationsPort.Post(rotateUpdate);
PortSet<DefaultUpdateResponseType, Fault> responsePort = new PortSet<DefaultUpdateResponseType, Fault>();
double rightPow;
double leftPow;
if (degrees > 0)
{
rightPow = power;
leftPow = -power;
}
else
{
rightPow = -power;
leftPow = power;
}
motor.SetMotorPower leftPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = leftPow } );
leftPower.ResponsePort = responsePort;
_leftMotorPort.Post(leftPower);
motor.SetMotorPower rightPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = rightPow } );
rightPower.ResponsePort = responsePort;
_rightMotorPort.Post(rightPower);
LogInfo("=============== TrackRoamerDriveService:: RotateUntilDegrees() start moving: degrees=" + degrees);
LogInfo("=============== TrackRoamerDriveService:: RotateUntilDegrees() will stop wheels at: Left=" + stopLeftWheelAt + " Right=" + stopRightWheelAt);
Activate(Arbiter.MultipleItemReceive<DefaultUpdateResponseType, Fault>(responsePort, 2,
delegate(ICollection<DefaultUpdateResponseType> successList, ICollection<Fault> failureList)
{
foreach (Fault fault in failureList)
{
LogError(fault);
}
}
));
LogInfo("=============== TrackRoamerDriveService:: RotateUntilDegrees() calling WaitForCompletion() - waiting for the first side to complete...");
yield return WaitForCompletion();
LogInfo("=============== TrackRoamerDriveService:: RotateUntilDegrees() calling WaitForCompletion() - other side should complete too...");
yield return WaitForCompletion();
LogInfo("=============== TrackRoamerDriveService:: RotateUntilDegrees() - both sides completed, send notification of RotateDegrees complete to subscription manager");
// send notification of RotateDegrees complete to subscription manager
rotateUpdate.Body.RotateDegreesStage = drive.DriveStage.Completed;
_internalDriveOperationsPort.Post(rotateUpdate);
_state.RotateDegreesStage = drive.DriveStage.Completed;
}
#endregion
#region Drive Distance
/// <summary>
/// Drive Distance Handler
/// </summary>
/// <param name="driveDistance"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> DriveDistanceHandler(drive.DriveDistance driveDistance)
{
// If configuration is invalid, an InvalidException is thrown.
ValidateDriveConfiguration(true);
_state.TimeStamp = DateTime.Now;
// send immediate response
driveDistance.ResponsePort.Post(DefaultUpdateResponseType.Instance);
// post request to internal port.
_internalDriveOperationsPort.Post(driveDistance);
yield break;
}
/// <summary>
/// drives a specified number of meters
/// </summary>
IEnumerator<ITask> DriveUntilDistance(double distance, double power)
{
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance(distance=" + distance + " meters, power=" + power + ")");
_leftEncoderTickEnabled = false;
_rightEncoderTickEnabled = false;
//reset encoders
encoder.Reset Lreset = new encoder.Reset();
_leftEncoderCmdPort.Post(Lreset);
yield return (Arbiter.Choice(Lreset.ResponsePort,
delegate(DefaultUpdateResponseType response) { },
delegate(Fault fault) { LogError(fault); }
));
encoder.Reset Rreset = new encoder.Reset();
_rightEncoderCmdPort.Post(Rreset);
yield return (Arbiter.Choice(Rreset.ResponsePort,
delegate(DefaultUpdateResponseType response) { },
delegate(Fault fault) { LogError(fault); }
));
//compute tick to stop at
stopLeftWheelAt = (int)Math.Round(Math.Abs(distance) / (2.0 * 3.14159 * _state.LeftWheel.Radius / _state.LeftWheel.EncoderState.TicksPerRevolution));
stopRightWheelAt = (int)Math.Round(Math.Abs(distance) / (2.0 * 3.14159 * _state.RightWheel.Radius / _state.RightWheel.EncoderState.TicksPerRevolution));
_leftEncoderTickEnabled = true;
_rightEncoderTickEnabled = true;
//start moving
double Pow;
if (distance > 0)
Pow = power;
else
Pow = -power;
PortSet<DefaultUpdateResponseType, Fault> responsePort = new PortSet<DefaultUpdateResponseType, Fault>();
// send notification of driveDistance start to subscription manager
_state.DriveDistanceStage = drive.DriveStage.Started;
drive.DriveDistance driveUpdate = new drive.DriveDistance();
driveUpdate.Body.DriveDistanceStage = drive.DriveStage.Started;
_internalDriveOperationsPort.Post(driveUpdate);
motor.SetMotorPower leftPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = Pow } );
leftPower.ResponsePort = responsePort;
_leftMotorPort.Post(leftPower);
motor.SetMotorPower rightPower = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = Pow } );
rightPower.ResponsePort = responsePort;
_rightMotorPort.Post(rightPower);
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance() start moving: distance=" + distance + " meters");
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance() will stop wheel at: Left=" + stopLeftWheelAt + " Right=" + stopRightWheelAt);
Activate(Arbiter.MultipleItemReceive<DefaultUpdateResponseType, Fault>(responsePort, 2,
delegate(ICollection<DefaultUpdateResponseType> successList, ICollection<Fault> failureList)
{
foreach (Fault fault in failureList)
{
LogError(fault);
}
}
));
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance() calling WaitForCompletion() - waiting for the first side to complete...");
yield return WaitForCompletion();
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance() calling WaitForCompletion() - other side should complete too...");
yield return WaitForCompletion();
LogInfo("=============== TrackRoamerDriveService:: DriveUntilDistance() - both sides completed, send notification of driveDistance complete to subscription manager");
// send notification of driveDistance complete to subscription manager
driveUpdate.Body.DriveDistanceStage = drive.DriveStage.Completed;
_internalDriveOperationsPort.Post(driveUpdate);
_state.DriveDistanceStage = drive.DriveStage.Completed;
}
#endregion
// This port is sent a message every time that there is a
// Canceled or Complete message from the Drive, so it can
// be used to wait for completion.
private Port<drive.DriveStage> completionPort = new Port<drive.DriveStage>();
/// <summary>
/// WaitForCompletion - Helper function to wait on Completion Port
/// </summary>
/// <returns>Receiver suitable for waiting on</returns>
private Receiver<drive.DriveStage> WaitForCompletion()
{
// Note that this method does nothing with the drive status
return Arbiter.Receive(false, completionPort, EmptyHandler<drive.DriveStage>);
}
void StopMotorWithEncoderHandler(encoder.EncoderOperations encoderNotificationPort, string side, encoder.UpdateTickCount update, motor.MotorOperations motorPort)
{
//LogInfo("^^^^^^^^^^^^^^^^^^^^^ TrackRoamerDriveService:: StopMotorWithEncoderHandler() " + side + " encoder at=" + update.Body.Count + " will stop wheel at=" + stopWheelAt);
int stopWheelAt;
bool ignore;
switch (side)
{
case "left":
stopWheelAt = stopLeftWheelAt;
ignore = !_leftEncoderTickEnabled;
break;
default:
case "right":
stopWheelAt = stopRightWheelAt;
ignore = !_rightEncoderTickEnabled;
break;
}
if (!ignore && update.Body.Count >= stopWheelAt)
{
switch (side)
{
case "left":
_leftEncoderTickEnabled = false;
break;
default:
case "right":
_rightEncoderTickEnabled = false;
break;
}
// whatever else got stuck there, we are not interested. Keep the port clear.
//Port<encoder.UpdateTickCount> port = (Port<encoder.UpdateTickCount>)encoderNotificationPort[typeof(encoder.UpdateTickCount)];
//port.Clear();
motor.SetMotorPower stop = new motor.SetMotorPower(new motor.SetMotorPowerRequest() { TargetPower = 0 } );
motorPort.Post(stop);
Arbiter.Choice(stop.ResponsePort,
delegate(DefaultUpdateResponseType resp) {
LogInfo("^^^^^^^^^^^^^^^^^^^^^ TrackRoamerDriveService:: StopMotorWithEncoderHandler() " + side + " - motor stopped !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
},
delegate(Fault fault) { LogError(fault); }
);
LogInfo("^^^^^^^^^^^^^^^^^^^^^ TrackRoamerDriveService:: StopMotorWithEncoderHandler() " + side + " - Sending internal DriveStage.Completed !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
completionPort.Post(drive.DriveStage.Completed);
}
}
/// <summary>
/// All Stop Handler
/// </summary>
/// <param name="allStop"></param>
/// <returns></returns>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public IEnumerator<ITask> AllStopHandler(drive.AllStop allStop)
{
drive.SetDrivePower zeroPower = new drive.SetDrivePower();
zeroPower.Body = new drive.SetDrivePowerRequest(0.0d, 0.0d);
zeroPower.ResponsePort = allStop.ResponsePort;
_mainPort.Post(zeroPower);
yield break;
}
#region HTTP Get / Post
/// <summary>
/// Http Get Handler. Needed for XSLT transform
/// </summary>
[ServiceHandler(ServiceHandlerBehavior.Concurrent)]
public virtual IEnumerator<ITask> HttpGetHandler(HttpGet httpGet)
{
HttpListenerRequest request = httpGet.Body.Context.Request;
HttpListenerResponse response = httpGet.Body.Context.Response;
string path = request.Url.AbsolutePath;
HttpResponseType rsp = new HttpResponseType(HttpStatusCode.OK, _state, _transform);
httpGet.ResponsePort.Post(rsp);
yield break;
}
/// <summary>
/// Http Post Handler. Handles http form inputs
/// </summary>
[ServiceHandler(ServiceHandlerBehavior.Exclusive)]
public void HttpPostHandler(HttpPost httpPost)
{
HttpPostRequestData formData = httpPost.GetHeader<HttpPostRequestData>();
try
{
DsspOperation operation = formData.TranslatedOperation;
if (operation is drive.DriveDistance)
{
_mainPort.Post((drive.DriveDistance)operation);
}
else if (operation is drive.SetDrivePower)
{
_mainPort.Post((drive.SetDrivePower)operation);
}
else if (operation is drive.RotateDegrees)
{
_mainPort.Post((drive.RotateDegrees)operation);
}
else if (operation is drive.EnableDrive)
{
_mainPort.Post((drive.EnableDrive)operation);
}
else if (operation is drive.AllStop)
{
_mainPort.Post((drive.AllStop)operation);
}
else
{
NameValueCollection parameters = formData.Parameters;
if (parameters["StartDashboard"] != null)
{
string Dashboardcontract = "http://schemas.microsoft.com/robotics/2006/01/simpledashboard.html";
ServiceInfoType info = new ServiceInfoType(Dashboardcontract);
cons.Create create = new cons.Create(info);
create.TimeSpan = DsspOperation.DefaultShortTimeSpan;
ConstructorPort.Post(create);
Arbiter.Choice(
create.ResponsePort,
delegate(CreateResponse createResponse) { },
delegate(Fault f) { LogError(f); }
);
}
else if (parameters["DrivePower"] != null)
{
double power = double.Parse(parameters["Power"]);
drive.SetDrivePowerRequest drivepower = new drive.SetDrivePowerRequest();
drivepower.LeftWheelPower = power;
drivepower.RightWheelPower = power;
_mainPort.Post(new drive.SetDrivePower(drivepower));
}
else
{
throw new InvalidOperationException();
}
}
HttpPostSuccess(httpPost);
}
catch
{
HttpPostFailure(httpPost);
}
}
/// <summary>
/// Send Http Post Success Response
/// </summary>
private void HttpPostSuccess(HttpPost httpPost)
{
HttpResponseType rsp =
new HttpResponseType(HttpStatusCode.OK, _state, _transform);
httpPost.ResponsePort.Post(rsp);
}
/// <summary>
/// Send Http Post Failure Response
/// </summary>
private void HttpPostFailure(HttpPost httpPost)
{
HttpResponseType rsp =
new HttpResponseType(HttpStatusCode.BadRequest, _state, _transform);
httpPost.ResponsePort.Post(rsp);
}
#endregion
#endregion
/// <summary>
/// Validate that the motors are configured and the drive is enabled.
/// <remarks>Throws an exception (converted to fault)
/// when the service is not properly configured.</remarks>
/// </summary>
/// <param name="requireEncoders">validate encoder configuration</param>
private void ValidateDriveConfiguration(bool requireEncoders)
{
if (_leftMotorPort == null || _rightMotorPort == null)
{
LogError("The motors are not connected.");
throw new InvalidOperationException();
}
if (!_state.IsEnabled)
{
LogError("The differential drive is not enabled.");
throw new InvalidOperationException();
}
if (requireEncoders)
{
if (_state.LeftWheel == null
|| _state.LeftWheel.Radius == 0.0
|| _state.LeftWheel.EncoderState == null
|| _state.LeftWheel.EncoderState.TicksPerRevolution == 0
|| _state.RightWheel == null
|| _state.RightWheel.Radius == 0.0
|| _state.RightWheel.EncoderState == null
|| _state.RightWheel.EncoderState.TicksPerRevolution == 0
)
{
LogError("The wheel encoders are not properly configured.");
throw new InvalidOperationException();
}
}
}
#region Internal Drive Handlers
/// <summary>
/// Internal drive distance operation handler
/// </summary>
/// <param name="driveDistance"></param>
/// <returns></returns>
public virtual IEnumerator<ITask> InternalDriveDistanceHandler(drive.DriveDistance driveDistance)
{
switch (driveDistance.Body.DriveDistanceStage)
{
case drive.DriveStage.InitialRequest:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.DriveDistance;
SpawnIterator<double, double>(driveDistance.Body.Distance, driveDistance.Body.Power, DriveUntilDistance);
break;
case drive.DriveStage.Started:
SendNotification<drive.DriveDistance>(_subMgrPort, driveDistance.Body);
break;
case drive.DriveStage.Completed:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.NotSpecified;
SendNotification<drive.DriveDistance>(_subMgrPort, driveDistance.Body);
break;
case drive.DriveStage.Canceled:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.NotSpecified;
SendNotification<drive.DriveDistance>(_subMgrPort, driveDistance.Body);
break;
}
yield break;
}
/// <summary>
/// Internal rotate degrees handler
/// </summary>
/// <param name="rotateDegrees"></param>
/// <returns></returns>
public virtual IEnumerator<ITask> InternalRotateDegreesHandler(drive.RotateDegrees rotateDegrees)
{
switch (rotateDegrees.Body.RotateDegreesStage)
{
case drive.DriveStage.InitialRequest:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.RotateDegrees;
SpawnIterator<double, double>(rotateDegrees.Body.Degrees, rotateDegrees.Body.Power, RotateUntilDegrees);
break;
case drive.DriveStage.Started:
SendNotification<drive.RotateDegrees>(_subMgrPort, rotateDegrees.Body);
break;
case drive.DriveStage.Completed:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.NotSpecified;
SendNotification<drive.RotateDegrees>(_subMgrPort, rotateDegrees.Body);
break;
case drive.DriveStage.Canceled:
_state.InternalPendingDriveOperation = drive.DriveRequestOperation.NotSpecified;
SendNotification<drive.RotateDegrees>(_subMgrPort, rotateDegrees.Body);
break;
}
yield break;
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Threading;
using System.Collections.Generic;
// TODO when we upgrade to C# V6 you can remove this.
// warning CS0420: 'P.x': a reference to a volatile field will not be treated as volatile
// This happens when you pass a _subcribers (a volatile field) to interlocked operations (which are byref).
// This was fixed in C# V6.
#pragma warning disable 0420
namespace System.Diagnostics
{
/// <summary>
/// A DiagnosticListener is something that forwards on events written with DiagnosticSource.
/// It is an IObservable (has Subscribe method), and it also has a Subscribe overloads that
/// lets you specify a 'IsEnabled' predicate that users of DiagnosticSource will use for
/// 'quick checks'.
///
/// The item in the stream is a KeyValuePair[string, object] where the string is the name
/// of the diagnostic item and the object is the payload (typically an anonymous type).
///
/// There may be many DiagnosticListeners in the system, but we encourage the use of
/// The DiagnosticSource.DefaultSource which goes to the DiagnosticListener.DefaultListener.
///
/// If you need to see 'everything' you can subscribe to the 'AllListeners' event that
/// will fire for every live DiagnosticListener in the appdomain (past or present).
///
/// Please See the DiagnosticSource Users Guide
/// https://github.com/dotnet/corefx/blob/master/src/System.Diagnostics.DiagnosticSource/src/DiagnosticSourceUsersGuide.md
/// for instructions on its use.
/// </summary>
public class DiagnosticListener : DiagnosticSource, IObservable<KeyValuePair<string, object>>, IDisposable
{
/// <summary>
/// When you subscribe to this you get callbacks for all NotificationListeners in the appdomain
/// as well as those that occurred in the past, and all future Listeners created in the future.
/// </summary>
public static IObservable<DiagnosticListener> AllListeners
{
get
{
if (s_allListenerObservable == null)
{
s_allListenerObservable = new AllListenerObservable();
}
return s_allListenerObservable;
}
}
// Subscription implementation
/// <summary>
/// Add a subscriber (Observer). If 'IsEnabled' == null (or not present), then the Source's IsEnabled
/// will always return true.
/// </summary>
public virtual IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Predicate<string> isEnabled)
{
IDisposable subscription;
if (isEnabled == null)
{
subscription = SubscribeInternal(observer, null, null);
}
else
{
Predicate<string> localIsEnabled = isEnabled;
subscription = SubscribeInternal(observer, isEnabled, (name, arg1, arg2) => localIsEnabled(name));
}
return subscription;
}
/// <summary>
/// Add a subscriber (Observer). If 'IsEnabled' == null (or not present), then the Source's IsEnabled
/// will always return true.
/// </summary>
/// <param name="observer">Subscriber (IObserver)</param>
/// <param name="isEnabled">Filters events based on their name (string) and context objects that could be null.
/// Note that producer may first call filter with event name only and null context arguments and filter should
/// return true if consumer is interested in any of such events. Producers that support
/// context-based filtering will invoke isEnabled again with context for more prcise filtering.
/// Use Subscribe overload with name-based filtering if producer does NOT support context-based filtering</param>
public virtual IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer, Func<string, object, object, bool> isEnabled)
{
return isEnabled == null ?
SubscribeInternal(observer, null, null) :
SubscribeInternal(observer, name => IsEnabled(name, null, null), isEnabled);
}
/// <summary>
/// Same as other Subscribe overload where the predicate is assumed to always return true.
/// </summary>
public IDisposable Subscribe(IObserver<KeyValuePair<string, object>> observer)
{
return SubscribeInternal(observer, null, null);
}
/// <summary>
/// Make a new DiagnosticListener, it is a NotificationSource, which means the returned result can be used to
/// log notifications, but it also has a Subscribe method so notifications can be forwarded
/// arbitrarily. Thus its job is to forward things from the producer to all the listeners
/// (multi-casting). Generally you should not be making your own DiagnosticListener but use the
/// DiagnosticListener.Default, so that notifications are as 'public' as possible.
/// </summary>
public DiagnosticListener(string name)
{
Name = name;
// Insert myself into the list of all Listeners.
lock (s_lock)
{
// Issue the callback for this new diagnostic listener.
var allListenerObservable = s_allListenerObservable;
if (allListenerObservable != null)
allListenerObservable.OnNewDiagnosticListener(this);
// And add it to the list of all past listeners.
_next = s_allListeners;
s_allListeners = this;
}
// Call IsEnabled just so we insure that the DiagnosticSourceEventSource has been
// constructed (and thus is responsive to ETW requests to be enabled).
DiagnosticSourceEventSource.Logger.IsEnabled();
}
/// <summary>
/// Clean up the NotificationListeners. Notification listeners do NOT DIE ON THEIR OWN
/// because they are in a global list (for discoverability). You must dispose them explicitly.
/// Note that we do not do the Dispose(bool) pattern because we frankly don't want to support
/// subclasses that have non-managed state.
/// </summary>
virtual public void Dispose()
{
// Remove myself from the list of all listeners.
lock (s_lock)
{
if (_disposed)
{
return;
}
_disposed = true;
if (s_allListeners == this)
s_allListeners = s_allListeners._next;
else
{
var cur = s_allListeners;
while (cur != null)
{
if (cur._next == this)
{
cur._next = _next;
break;
}
cur = cur._next;
}
}
_next = null;
}
// Indicate completion to all subscribers.
DiagnosticSubscription subscriber = null;
Interlocked.Exchange(ref subscriber, _subscriptions);
while (subscriber != null)
{
subscriber.Observer.OnCompleted();
subscriber = subscriber.Next;
}
// The code above also nulled out all subscriptions.
}
/// <summary>
/// When a DiagnosticListener is created it is given a name. Return this.
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Return the name for the ToString() to aid in debugging.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return Name;
}
#region private
// NotificationSource implementation
/// <summary>
/// Determines whether there are any registered subscribers
/// </summary>
/// <remarks> If there is an expensive setup for the notification,
/// you may call IsEnabled() as the first and most efficient check before doing this setup.
/// Producers may optionally use this check before IsEnabled(string) in the most performance-critical parts of the system
/// to ensure somebody listens to the DiagnosticListener at all.</remarks>
public bool IsEnabled()
{
return _subscriptions != null;
}
/// <summary>
/// Override abstract method
/// </summary>
public override bool IsEnabled(string name)
{
for (DiagnosticSubscription curSubscription = _subscriptions; curSubscription != null; curSubscription = curSubscription.Next)
{
if (curSubscription.IsEnabled1Arg == null || curSubscription.IsEnabled1Arg(name))
return true;
}
return false;
}
// NotificationSource implementation
/// <summary>
/// Override abstract method
/// </summary>
public override bool IsEnabled(string name, object arg1, object arg2 = null)
{
for (DiagnosticSubscription curSubscription = _subscriptions; curSubscription != null; curSubscription = curSubscription.Next)
{
if (curSubscription.IsEnabled3Arg == null || curSubscription.IsEnabled3Arg(name, arg1, arg2))
return true;
}
return false;
}
/// <summary>
/// Override abstract method
/// </summary>
public override void Write(string name, object value)
{
for (DiagnosticSubscription curSubscription = _subscriptions; curSubscription != null; curSubscription = curSubscription.Next)
curSubscription.Observer.OnNext(new KeyValuePair<string, object>(name, value));
}
// Note that Subscriptions are READ ONLY. This means you never update any fields (even on removal!)
private class DiagnosticSubscription : IDisposable
{
internal IObserver<KeyValuePair<string, object>> Observer;
// IsEnabled1Arg and IsEnabled3Arg represent IsEnabled callbacks.
// - IsEnabled1Arg invoked for DiagnosticSource.IsEnabled(string)
// - IsEnabled3Arg invoked for DiagnosticSource.IsEnabled(string, obj, obj)
// Subscriber MUST set both IsEnabled1Arg and IsEnabled3Arg or none of them:
// when Predicate<string> is provided in DiagosticListener.Subscribe,
// - IsEnabled1Arg is set to predicate
// - IsEnabled3Arg falls back to predicate ignoring extra arguments.
// similarly, when Func<string, obj, obj, bool> is provided,
// IsEnabled1Arg falls back to IsEnabled3Arg with null context
// Thus, dispatching is very efficient when producer and consumer agree on number of IsEnabled arguments
// Argument number mismatch between producer/consumer adds extra cost of adding or omitting context parameters
internal Predicate<string> IsEnabled1Arg;
internal Func<string, object, object, bool> IsEnabled3Arg;
internal DiagnosticListener Owner; // The DiagnosticListener this is a subscription for.
internal DiagnosticSubscription Next; // Linked list of subscribers
public void Dispose()
{
// TO keep this lock free and easy to analyze, the linked list is READ ONLY. Thus we copy
for (;;)
{
DiagnosticSubscription subscriptions = Owner._subscriptions;
DiagnosticSubscription newSubscriptions = Remove(subscriptions, this); // Make a new list, with myself removed.
// try to update, but if someone beat us to it, then retry.
if (Interlocked.CompareExchange(ref Owner._subscriptions, newSubscriptions, subscriptions) == subscriptions)
{
#if DEBUG
var cur = newSubscriptions;
while (cur != null)
{
Debug.Assert(!(cur.Observer == Observer && cur.IsEnabled1Arg == IsEnabled1Arg && cur.IsEnabled3Arg == IsEnabled3Arg), "Did not remove subscription!");
cur = cur.Next;
}
#endif
break;
}
}
}
// Create a new linked list where 'subscription has been removed from the linked list of 'subscriptions'.
private static DiagnosticSubscription Remove(DiagnosticSubscription subscriptions, DiagnosticSubscription subscription)
{
if (subscriptions == null)
{
// May happen if the IDisposable returned from Subscribe is Dispose'd again
return null;
}
if (subscriptions.Observer == subscription.Observer &&
subscriptions.IsEnabled1Arg == subscription.IsEnabled1Arg &&
subscriptions.IsEnabled3Arg == subscription.IsEnabled3Arg)
return subscriptions.Next;
#if DEBUG
// Delay a bit. This makes it more likely that races will happen.
for (int i = 0; i < 100; i++)
GC.KeepAlive("");
#endif
return new DiagnosticSubscription() { Observer = subscriptions.Observer, Owner = subscriptions.Owner, IsEnabled1Arg = subscriptions.IsEnabled1Arg, IsEnabled3Arg = subscriptions.IsEnabled3Arg, Next = Remove(subscriptions.Next, subscription) };
}
}
#region AllListenerObservable
/// <summary>
/// Logically AllListenerObservable has a very simple task. It has a linked list of subscribers that want
/// a callback when a new listener gets created. When a new DiagnosticListener gets created it should call
/// OnNewDiagnosticListener so that AllListenerObservable can forward it on to all the subscribers.
/// </summary>
private class AllListenerObservable : IObservable<DiagnosticListener>
{
public IDisposable Subscribe(IObserver<DiagnosticListener> observer)
{
lock (s_lock)
{
// Call back for each existing listener on the new callback (catch-up).
for (DiagnosticListener cur = s_allListeners; cur != null; cur = cur._next)
observer.OnNext(cur);
// Add the observer to the list of subscribers.
_subscriptions = new AllListenerSubscription(this, observer, _subscriptions);
return _subscriptions;
}
}
/// <summary>
/// Called when a new DiagnosticListener gets created to tell anyone who subscribed that this happened.
/// </summary>
/// <param name="diagnosticListener"></param>
internal void OnNewDiagnosticListener(DiagnosticListener diagnosticListener)
{
Debug.Assert(Monitor.IsEntered(s_lock)); // We should only be called when we hold this lock
// Simply send a callback to every subscriber that we have a new listener
for (var cur = _subscriptions; cur != null; cur = cur.Next)
cur.Subscriber.OnNext(diagnosticListener);
}
#region private
/// <summary>
/// Remove 'subscription from the list of subscriptions that the observable has. Called when
/// subscriptions are disposed. Returns true if the subscription was removed.
/// </summary>
private bool Remove(AllListenerSubscription subscription)
{
lock (s_lock)
{
if (_subscriptions == subscription)
{
_subscriptions = subscription.Next;
return true;
}
else if (_subscriptions != null)
{
for (var cur = _subscriptions; cur.Next != null; cur = cur.Next)
{
if (cur.Next == subscription)
{
cur.Next = cur.Next.Next;
return true;
}
}
}
// Subscriber likely disposed multiple times
return false;
}
}
/// <summary>
/// One node in the linked list of subscriptions that AllListenerObservable keeps. It is
/// IDisposable, and when that is called it removes itself from the list.
/// </summary>
internal class AllListenerSubscription : IDisposable
{
internal AllListenerSubscription(AllListenerObservable owner, IObserver<DiagnosticListener> subscriber, AllListenerSubscription next)
{
this._owner = owner;
this.Subscriber = subscriber;
this.Next = next;
}
public void Dispose()
{
if (_owner.Remove(this))
{
Subscriber.OnCompleted(); // Called outside of a lock
}
}
private readonly AllListenerObservable _owner; // the list this is a member of.
internal readonly IObserver<DiagnosticListener> Subscriber;
internal AllListenerSubscription Next;
}
private AllListenerSubscription _subscriptions;
#endregion
}
#endregion
private IDisposable SubscribeInternal(IObserver<KeyValuePair<string, object>> observer, Predicate<string> isEnabled1Arg, Func<string, object, object, bool> isEnabled3Arg)
{
// If we have been disposed, we silently ignore any subscriptions.
if (_disposed)
{
return new DiagnosticSubscription() { Owner = this };
}
DiagnosticSubscription newSubscription = new DiagnosticSubscription()
{
Observer = observer,
IsEnabled1Arg = isEnabled1Arg,
IsEnabled3Arg = isEnabled3Arg,
Owner = this,
Next = _subscriptions
};
while (Interlocked.CompareExchange(ref _subscriptions, newSubscription, newSubscription.Next) != newSubscription.Next)
newSubscription.Next = _subscriptions;
return newSubscription;
}
private volatile DiagnosticSubscription _subscriptions;
private DiagnosticListener _next; // We keep a linked list of all NotificationListeners (s_allListeners)
private bool _disposed; // Has Dispose been called?
private static DiagnosticListener s_allListeners; // linked list of all instances of DiagnosticListeners.
private static AllListenerObservable s_allListenerObservable; // to make callbacks to this object when listeners come into existence.
private static object s_lock = new object(); // A lock for
#if false
private static readonly DiagnosticListener s_default = new DiagnosticListener("DiagnosticListener.DefaultListener");
#endif
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Http
{
internal partial class CurlHandler : HttpMessageHandler
{
private sealed class CurlResponseMessage : HttpResponseMessage
{
internal readonly EasyRequest _easy;
private readonly CurlResponseStream _responseStream;
internal CurlResponseMessage(EasyRequest easy)
{
Debug.Assert(easy != null, "Expected non-null EasyRequest");
_easy = easy;
_responseStream = new CurlResponseStream(easy);
RequestMessage = easy._requestMessage;
Content = new StreamContent(_responseStream);
}
internal CurlResponseStream ResponseStream
{
get { return _responseStream; }
}
}
/// <summary>
/// Provides a response stream that allows libcurl to transfer data asynchronously to a reader.
/// When writing data to the response stream, either all or none of the data will be transferred,
/// and if none, libcurl will pause the connection until a reader is waiting to consume the data.
/// Readers consume the data via ReadAsync, which registers a read state with the stream, to be
/// filled in by a pending write. Read is just a thin wrapper around ReadAsync, since the underlying
/// mechanism must be asynchronous to prevent blocking libcurl's processing.
/// </summary>
private sealed class CurlResponseStream : Stream
{
/// <summary>A cached task storing the Int32 value 0.</summary>
private static readonly Task<int> s_zeroTask = Task.FromResult(0);
/// <summary>
/// A sentinel object used in the <see cref="_completed"/> field to indicate that
/// the stream completed successfully.
/// </summary>
private static readonly Exception s_completionSentinel = new Exception("s_completionSentinel");
/// <summary>A object used to synchronize all access to state on this response stream.</summary>
private readonly object _lockObject = new object();
/// <summary>The associated EasyRequest.</summary>
private readonly EasyRequest _easy;
/// <summary>Stores whether Dispose has been called on the stream.</summary>
private bool _disposed = false;
/// <summary>
/// Null if the Stream has not been completed, non-null if it has been completed.
/// If non-null, it'll either be the <see cref="s_completionSentinel"/> object, meaning the stream completed
/// successfully, or it'll be an Exception object representing the error that caused completion.
/// That error will be transferred to any subsequent read requests.
/// </summary>
private Exception _completed;
/// <summary>
/// The state associated with a pending read request. When a reader requests data, it puts
/// its state here for the writer to fill in when data is available.
/// </summary>
private ReadState _pendingReadRequest;
/// <summary>
/// When data is provided by libcurl, it must be consumed all or nothing: either all of the data is consumed, or
/// we must pause the connection. Since a read could need to be satisfied with only some of the data provided,
/// we store the rest here until all reads can consume it. If a subsequent write callback comes in to provide
/// more data, the connection will then be paused until this buffer is entirely consumed.
/// </summary>
private byte[] _remainingData;
/// <summary>
/// The offset into <see cref="_remainingData"/> from which the next read should occur.
/// </summary>
private int _remainingDataOffset;
/// <summary>
/// The remaining number of bytes in <see cref="_remainingData"/> available to be read.
/// </summary>
private int _remainingDataCount;
internal CurlResponseStream(EasyRequest easy)
{
Debug.Assert(easy != null, "Expected non-null associated EasyRequest");
_easy = easy;
}
public override bool CanRead { get { return !_disposed; } }
public override bool CanWrite { get { return false; } }
public override bool CanSeek { get { return false; } }
public override long Length
{
get
{
CheckDisposed();
throw new NotSupportedException();
}
}
public override long Position
{
get
{
CheckDisposed();
throw new NotSupportedException();
}
set
{
CheckDisposed();
throw new NotSupportedException();
}
}
public override long Seek(long offset, SeekOrigin origin)
{
CheckDisposed();
throw new NotSupportedException();
}
public override void SetLength(long value)
{
CheckDisposed();
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
CheckDisposed();
throw new NotSupportedException();
}
public override void Flush()
{
// Nothing to do.
}
/// <summary>
/// Writes the <paramref name="length"/> bytes starting at <paramref name="pointer"/> to the stream.
/// </summary>
/// <returns>
/// <paramref name="length"/> if all of the data was written, or
/// <see cref="Interop.libcurl.CURL_WRITEFUNC_PAUSE"/> if the data wasn't copied and the connection
/// should be paused until a reader is available.
/// </returns>
internal ulong TransferDataToStream(IntPtr pointer, long length)
{
Debug.Assert(pointer != IntPtr.Zero, "Expected a non-null pointer");
Debug.Assert(length >= 0, "Expected a non-negative length");
VerboseTrace("length: " + length);
CheckDisposed();
// If there's no data to write, consider everything transferred.
if (length == 0)
{
return 0;
}
lock (_lockObject)
{
VerifyInvariants();
// If there's existing data in the remaining data buffer, or if there's no pending read request,
// we need to pause until the existing data is consumed or until there's a waiting read.
if (_remainingDataCount > 0 || _pendingReadRequest == null)
{
VerboseTrace("Pausing due to _remainingDataCount: " + _remainingDataCount + ", _pendingReadRequest: " + (_pendingReadRequest != null));
return Interop.Http.CURL_WRITEFUNC_PAUSE;
}
// There's no data in the buffer and there is a pending read request.
// Transfer as much data as we can to the read request, completing it.
int numBytesForTask = (int)Math.Min(length, _pendingReadRequest._count);
Debug.Assert(numBytesForTask > 0, "We must be copying a positive amount.");
Marshal.Copy(pointer, _pendingReadRequest._buffer, _pendingReadRequest._offset, numBytesForTask);
_pendingReadRequest.SetResult(numBytesForTask);
ClearPendingReadRequest();
VerboseTrace("Copied to task: " + numBytesForTask);
// If there's any data left, transfer it to our remaining buffer. libcurl does not support
// partial transfers of data, so since we just took some of it to satisfy the read request
// we must take the rest of it. (If libcurl then comes back to us subsequently with more data
// before this buffered data has been consumed, at that point we won't consume any of the
// subsequent offering and will ask libcurl to pause.)
if (numBytesForTask < length)
{
IntPtr remainingPointer = pointer + numBytesForTask;
_remainingDataCount = checked((int)(length - numBytesForTask));
_remainingDataOffset = 0;
// Make sure our remaining data buffer exists and is big enough to hold the data
if (_remainingData == null)
{
_remainingData = new byte[_remainingDataCount];
}
else if (_remainingData.Length < _remainingDataCount)
{
_remainingData = new byte[Math.Max(_remainingData.Length * 2, _remainingDataCount)];
}
VerboseTrace("Allocated new remainingData array of length: " + _remainingData.Length);
// Copy the remaining data to the buffer
Marshal.Copy(remainingPointer, _remainingData, 0, _remainingDataCount);
VerboseTrace("Copied to buffer: " + _remainingDataCount);
}
// All of the data from libcurl was consumed.
return (ulong)length;
}
}
public override int Read(byte[] buffer, int offset, int count)
{
return ReadAsync(buffer, offset, count, CancellationToken.None).GetAwaiter().GetResult();
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (buffer == null) throw new ArgumentNullException("buffer");
if (offset < 0) throw new ArgumentOutOfRangeException("offset");
if (count < 0) throw new ArgumentOutOfRangeException("count");
if (offset > buffer.Length - count) throw new ArgumentException("buffer");
CheckDisposed();
VerboseTrace("buffer: " + buffer.Length + ", offset: " + offset + ", count: " + count);
// Check for cancellation
if (cancellationToken.IsCancellationRequested)
{
VerboseTrace("Canceled");
return Task.FromCanceled<int>(cancellationToken);
}
lock (_lockObject)
{
VerifyInvariants();
// If there's currently a pending read, fail this read, as we don't support concurrent reads.
if (_pendingReadRequest != null)
{
VerboseTrace("Existing pending read");
return Task.FromException<int>(new InvalidOperationException(SR.net_http_content_no_concurrent_reads));
}
// If the stream was already completed with failure, complete the read as a failure.
if (_completed != null && _completed != s_completionSentinel)
{
VerboseTrace("Failing read with " + _completed);
OperationCanceledException oce = _completed as OperationCanceledException;
return (oce != null && oce.CancellationToken.IsCancellationRequested) ?
Task.FromCanceled<int>(oce.CancellationToken) :
Task.FromException<int>(_completed);
}
// Quick check for if no data was actually requested. We do this after the check
// for errors so that we can still fail the read and transfer the exception if we should.
if (count == 0)
{
VerboseTrace("Zero count");
return s_zeroTask;
}
// If there's any data left over from a previous call, grab as much as we can.
if (_remainingDataCount > 0)
{
int bytesToCopy = Math.Min(count, _remainingDataCount);
Array.Copy(_remainingData, _remainingDataOffset, buffer, offset, bytesToCopy);
_remainingDataOffset += bytesToCopy;
_remainingDataCount -= bytesToCopy;
Debug.Assert(_remainingDataCount >= 0, "The remaining count should never go negative");
Debug.Assert(_remainingDataOffset <= _remainingData.Length, "The remaining offset should never exceed the buffer size");
VerboseTrace("Copied to task: " + bytesToCopy);
return Task.FromResult(bytesToCopy);
}
// If the stream has already been completed, complete the read immediately.
if (_completed == s_completionSentinel)
{
VerboseTrace("Completed successfully after stream completion");
return s_zeroTask;
}
// Finally, the stream is still alive, and we want to read some data, but there's no data
// in the buffer so we need to register ourself to get the next write.
if (cancellationToken.CanBeCanceled)
{
// If the cancellation token is cancelable, then we need to register for cancellation.
// We creat a special CancelableReadState that carries with it additional info:
// the cancellation token and the registration with that token. When cancellation
// is requested, we schedule a work item that tries to remove the read state
// from being pending, canceling it in the process. This needs to happen under the
// lock, which is why we schedule the operation to run asynchronously: if it ran
// synchronously, it could deadlock due to code on another thread holding the lock
// and calling Dispose on the registration concurrently with the call to Cancel
// the cancellation token. Dispose on the registration won't return until the action
// associated with the registration has completed, but if that action is currently
// executing and is blocked on the lock that's held while calling Dispose... deadlock.
var crs = new CancelableReadState(buffer, offset, count, this, cancellationToken);
crs._registration = cancellationToken.Register(s1 =>
{
((CancelableReadState)s1)._stream.VerboseTrace("Cancellation invoked. Queueing work item to cancel read state.");
Task.Factory.StartNew(s2 =>
{
var crsRef = (CancelableReadState)s2;
Debug.Assert(crsRef._token.IsCancellationRequested, "We should only be here if cancellation was requested.");
lock (crsRef._stream._lockObject)
{
if (crsRef._stream._pendingReadRequest == crsRef)
{
crsRef.TrySetCanceled(crsRef._token);
crsRef._stream.ClearPendingReadRequest();
}
}
}, s1, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default);
}, crs);
_pendingReadRequest = crs;
VerboseTrace("Created pending cancelable read");
}
else
{
// The token isn't cancelable. Just create a normal read state.
_pendingReadRequest = new ReadState(buffer, offset, count);
VerboseTrace("Created pending read");
}
_easy._associatedMultiAgent.RequestUnpause(_easy);
return _pendingReadRequest.Task;
}
}
/// <summary>Notifies the stream that no more data will be written.</summary>
internal void SignalComplete(Exception error = null)
{
lock (_lockObject)
{
VerifyInvariants();
// If we already completed, nothing more to do
if (_completed != null)
{
return;
}
// Mark ourselves as being completed
_completed = error != null ?
error :
s_completionSentinel;
// If there's a pending read request, complete it, either with 0 bytes for success
// or with the exception/CancellationToken for failure.
if (_pendingReadRequest != null)
{
if (_completed == s_completionSentinel)
{
VerboseTrace("Completed pending read task with 0 bytes.");
_pendingReadRequest.TrySetResult(0);
}
else
{
VerboseTrace("Completed pending read task with " + _completed);
OperationCanceledException oce = _completed as OperationCanceledException;
if (oce != null)
{
_pendingReadRequest.TrySetCanceled(oce.CancellationToken);
}
else
{
_pendingReadRequest.TrySetException(_completed);
}
}
ClearPendingReadRequest();
}
}
}
/// <summary>Clears a pending read request, making sure any cancellation registration is unregistered.</summary>
private void ClearPendingReadRequest()
{
Debug.Assert(Monitor.IsEntered(_lockObject), "Lock object must be held to manipulate _pendingReadRequest");
Debug.Assert(_pendingReadRequest != null, "Should only be clearing the pending read request if there is one");
var crs = _pendingReadRequest as CancelableReadState;
if (crs != null)
{
crs._registration.Dispose();
}
_pendingReadRequest = null;
}
protected override void Dispose(bool disposing)
{
if (disposing && !_disposed)
{
_disposed = true;
SignalComplete();
}
base.Dispose(disposing);
}
private void CheckDisposed()
{
if (_disposed)
{
throw new ObjectDisposedException(GetType().FullName);
}
}
[Conditional(VerboseDebuggingConditional)]
private void VerboseTrace(string text = null, [CallerMemberName] string memberName = null)
{
CurlHandler.VerboseTrace(text, memberName, _easy);
}
/// <summary>Verifies various invariants that must be true about our state.</summary>
[Conditional("DEBUG")]
private void VerifyInvariants()
{
Debug.Assert(Monitor.IsEntered(_lockObject), "Can only verify invariants while holding the lock");
Debug.Assert(_remainingDataCount >= 0, "Remaining data count should never be negative.");
Debug.Assert(_remainingDataCount == 0 || _remainingData != null, "If there's remaining data, there must be a buffer to store it.");
Debug.Assert(_remainingData == null || _remainingDataCount <= _remainingData.Length, "The buffer must be large enough for the data length.");
Debug.Assert(_remainingData == null || _remainingDataOffset <= _remainingData.Length, "The offset must not walk off the buffer.");
Debug.Assert(!((_remainingDataCount > 0) && (_pendingReadRequest != null)), "We can't have both remaining data and a pending request.");
Debug.Assert(!((_completed != null) && (_pendingReadRequest != null)), "We can't both be completed and have a pending request.");
Debug.Assert(_pendingReadRequest == null || !_pendingReadRequest.Task.IsCompleted, "A pending read request must not have been completed yet.");
}
/// <summary>State associated with a pending read request.</summary>
private class ReadState : TaskCompletionSource<int>
{
internal readonly byte[] _buffer;
internal readonly int _offset;
internal readonly int _count;
internal ReadState(byte[] buffer, int offset, int count) :
base(TaskCreationOptions.RunContinuationsAsynchronously)
{
Debug.Assert(buffer != null, "Need non-null buffer");
Debug.Assert(offset >= 0, "Need non-negative offset");
Debug.Assert(count > 0, "Need positive count");
_buffer = buffer;
_offset = offset;
_count = count;
}
}
/// <summary>State associated with a pending read request that's cancelable.</summary>
private sealed class CancelableReadState : ReadState
{
internal readonly CurlResponseStream _stream;
internal readonly CancellationToken _token;
internal CancellationTokenRegistration _registration;
internal CancelableReadState(byte[] buffer, int offset, int count,
CurlResponseStream responseStream, CancellationToken cancellationToken) :
base(buffer, offset, count)
{
_stream = responseStream;
_token = cancellationToken;
}
}
}
}
}
| |
//
// Community.CsharpSqlite.SQLiteClient.SqliteParameterCollection.cs
//
// Represents a collection of parameters relevant to a SqliteCommand as well as
// their respective mappings to columns in a DataSet.
//
//Author(s): Vladimir Vukicevic <vladimir@pobox.com>
// Everaldo Canuto <everaldo_canuto@yahoo.com.br>
// Chris Turchin <chris@turchin.net>
// Jeroen Zwartepoorte <jeroen@xs4all.nl>
// Thomas Zoechling <thomas.zoechling@gmx.at>
// Alex West <alxwest@gmail.com>
//
// Copyright (C) 2002 Vladimir Vukicevic
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Data;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic ;
namespace Community.CsharpSqlite.SQLiteClient
{
public class SqliteParameterCollection : DbParameterCollection
{
#region Fields
List<SqliteParameter> numeric_param_list = new List<SqliteParameter>();
Dictionary<string, int> named_param_hash = new Dictionary<string, int>();
#endregion
#region Private Methods
private void CheckSqliteParam (object value)
{
if (!(value is SqliteParameter))
throw new InvalidCastException ("Can only use SqliteParameter objects");
SqliteParameter sqlp = value as SqliteParameter;
if (sqlp.ParameterName == null || sqlp.ParameterName.Length == 0)
sqlp.ParameterName = this.GenerateParameterName();
}
private void RecreateNamedHash ()
{
for (int i = 0; i < numeric_param_list.Count; i++)
{
named_param_hash[((SqliteParameter) numeric_param_list[i]).ParameterName] = i;
}
}
//FIXME: if the user is calling Insert at various locations with unnamed parameters, this is not going to work....
private string GenerateParameterName()
{
int index = this.Count + 1;
string name = String.Empty;
while (index > 0)
{
name = ":" + index.ToString();
if (this.IndexOf(name) == -1)
index = -1;
else
index++;
}
return name;
}
#endregion
#region Properties
private bool isPrefixed (string parameterName)
{
return parameterName.Length > 1 && (parameterName[0] == ':' || parameterName[0] == '$');
}
protected override DbParameter GetParameter (int parameterIndex)
{
if (this.Count >= parameterIndex+1)
return (SqliteParameter) numeric_param_list[parameterIndex];
else
throw new IndexOutOfRangeException("The specified parameter index does not exist: " + parameterIndex.ToString());
}
protected override DbParameter GetParameter (string parameterName)
{
if (this.Contains(parameterName))
return this[(int) named_param_hash[parameterName]];
else if (isPrefixed(parameterName) && this.Contains(parameterName.Substring(1)))
return this[(int) named_param_hash[parameterName.Substring(1)]];
else
throw new IndexOutOfRangeException("The specified name does not exist: " + parameterName);
}
protected override void SetParameter (int parameterIndex, DbParameter parameter)
{
if (this.Count >= parameterIndex+1)
numeric_param_list[parameterIndex] = (SqliteParameter)parameter;
else
throw new IndexOutOfRangeException("The specified parameter index does not exist: " + parameterIndex.ToString());
}
protected override void SetParameter (string parameterName, DbParameter parameter)
{
if (this.Contains(parameterName))
numeric_param_list[(int)named_param_hash[parameterName]] = (SqliteParameter)parameter;
else if (parameterName.Length > 1 && this.Contains(parameterName.Substring(1)))
numeric_param_list[(int) named_param_hash[parameterName.Substring(1)]] = (SqliteParameter)parameter;
else
throw new IndexOutOfRangeException("The specified name does not exist: " + parameterName);
}
public override int Count
{
get
{
return this.numeric_param_list.Count;
}
}
public override bool IsSynchronized
{
get { return ((IList)this.numeric_param_list).IsSynchronized ; }
}
public override bool IsFixedSize
{
get { return ((IList)this.numeric_param_list).IsFixedSize; }
}
public override bool IsReadOnly
{
get { return ((IList)this.numeric_param_list).IsReadOnly; }
}
public override object SyncRoot
{
get { return ((IList)this.numeric_param_list).SyncRoot ; }
}
#endregion
#region Public Methods
public override void AddRange (Array values)
{
if (values == null || values.Length == 0)
return;
foreach (object value in values)
Add (value);
}
public override int Add (object value)
{
CheckSqliteParam (value);
SqliteParameter sqlp = value as SqliteParameter;
if (named_param_hash.ContainsKey(sqlp.ParameterName))
throw new DuplicateNameException ("Parameter collection already contains the a SqliteParameter with the given ParameterName.");
numeric_param_list.Add(sqlp);
named_param_hash.Add(sqlp.ParameterName, numeric_param_list.IndexOf(sqlp));
return (int) named_param_hash[sqlp.ParameterName];
}
public SqliteParameter Add (SqliteParameter param)
{
Add ((object)param);
return param;
}
public SqliteParameter Add (string name, object value)
{
return Add (new SqliteParameter (name, value));
}
public SqliteParameter Add (string name, DbType type)
{
return Add (new SqliteParameter (name, type));
}
public override void Clear ()
{
numeric_param_list.Clear ();
named_param_hash.Clear ();
}
public override void CopyTo (Array array, int index)
{
this.numeric_param_list.CopyTo((SqliteParameter[])array, index);
}
public override bool Contains (object value)
{
return Contains ((SqliteParameter) value);
}
public override bool Contains (string parameterName)
{
return named_param_hash.ContainsKey(parameterName);
}
public bool Contains (SqliteParameter param)
{
return Contains (param.ParameterName);
}
public override IEnumerator GetEnumerator()
{
return this.numeric_param_list.GetEnumerator();
}
public override int IndexOf (object param)
{
return IndexOf ((SqliteParameter) param);
}
public override int IndexOf (string parameterName)
{
if (isPrefixed (parameterName)){
string sub = parameterName.Substring (1);
if (named_param_hash.ContainsKey(sub))
return (int) named_param_hash [sub];
}
if (named_param_hash.ContainsKey(parameterName))
return (int) named_param_hash[parameterName];
else
return -1;
}
public int IndexOf (SqliteParameter param)
{
return IndexOf (param.ParameterName);
}
public override void Insert (int index, object value)
{
CheckSqliteParam (value);
if (numeric_param_list.Count == index)
{
Add (value);
return;
}
numeric_param_list.Insert(index,(SqliteParameter) value);
RecreateNamedHash ();
}
public override void Remove (object value)
{
CheckSqliteParam (value);
RemoveAt ((SqliteParameter) value);
}
public override void RemoveAt (int index)
{
RemoveAt (((SqliteParameter) numeric_param_list[index]).ParameterName);
}
public override void RemoveAt (string parameterName)
{
if (!named_param_hash.ContainsKey (parameterName))
throw new ApplicationException ("Parameter " + parameterName + " not found");
numeric_param_list.RemoveAt((int) named_param_hash[parameterName]);
named_param_hash.Remove (parameterName);
RecreateNamedHash ();
}
public void RemoveAt (SqliteParameter param)
{
RemoveAt (param.ParameterName);
}
#endregion
}
}
| |
using Apache.NMS.Util;
using System;
using System.Threading;
using Lucene.Net.Documents;
namespace Lucene.Net.Index
{
using Lucene.Net.Randomized.Generators;
using Lucene.Net.Support;
using NUnit.Framework;
using System.IO;
using BytesRef = Lucene.Net.Util.BytesRef;
using Directory = Lucene.Net.Store.Directory;
using Document = Documents.Document;
using Field = Field;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using TextField = TextField;
[TestFixture]
public class TestIndexWriterNRTIsCurrent : LuceneTestCase
{
public class ReaderHolder
{
internal volatile DirectoryReader Reader;
internal volatile bool Stop = false;
}
[Test]
public virtual void TestIsCurrentWithThreads()
{
Directory dir = NewDirectory();
IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
IndexWriter writer = new IndexWriter(dir, conf);
ReaderHolder holder = new ReaderHolder();
ReaderThread[] threads = new ReaderThread[AtLeast(3)];
CountDownLatch latch = new CountDownLatch(1);
WriterThread writerThread = new WriterThread(holder, writer, AtLeast(500), Random(), latch);
for (int i = 0; i < threads.Length; i++)
{
threads[i] = new ReaderThread(holder, latch);
threads[i].Start();
}
writerThread.Start();
writerThread.Join();
bool failed = writerThread.Failed != null;
if (failed)
{
Console.WriteLine(writerThread.Failed.ToString());
Console.Write(writerThread.Failed.StackTrace);
}
for (int i = 0; i < threads.Length; i++)
{
threads[i].Join();
if (threads[i].Failed != null)
{
Console.WriteLine(threads[i].Failed.ToString());
Console.Write(threads[i].Failed.StackTrace);
failed = true;
}
}
Assert.IsFalse(failed);
writer.Dispose();
dir.Dispose();
}
public class WriterThread : ThreadClass
{
internal readonly ReaderHolder Holder;
internal readonly IndexWriter Writer;
internal readonly int NumOps;
internal bool Countdown = true;
internal readonly CountDownLatch Latch;
internal Exception Failed;
internal WriterThread(ReaderHolder holder, IndexWriter writer, int numOps, Random random, CountDownLatch latch)
: base()
{
this.Holder = holder;
this.Writer = writer;
this.NumOps = numOps;
this.Latch = latch;
}
public override void Run()
{
DirectoryReader currentReader = null;
Random random = LuceneTestCase.Random();
try
{
Document doc = new Document();
doc.Add(new TextField("id", "1", Field.Store.NO));
Writer.AddDocument(doc);
Holder.Reader = currentReader = Writer.GetReader(true);
Term term = new Term("id");
for (int i = 0; i < NumOps && !Holder.Stop; i++)
{
float nextOp = (float)random.NextDouble();
if (nextOp < 0.3)
{
term.Set("id", new BytesRef("1"));
Writer.UpdateDocument(term, doc);
}
else if (nextOp < 0.5)
{
Writer.AddDocument(doc);
}
else
{
term.Set("id", new BytesRef("1"));
Writer.DeleteDocuments(term);
}
if (Holder.Reader != currentReader)
{
Holder.Reader = currentReader;
if (Countdown)
{
Countdown = false;
Latch.countDown();
}
}
if (random.NextBoolean())
{
Writer.Commit();
DirectoryReader newReader = DirectoryReader.OpenIfChanged(currentReader);
if (newReader != null)
{
currentReader.DecRef();
currentReader = newReader;
}
if (currentReader.NumDocs == 0)
{
Writer.AddDocument(doc);
}
}
}
}
catch (Exception e)
{
Failed = e;
}
finally
{
Holder.Reader = null;
if (Countdown)
{
Latch.countDown();
}
if (currentReader != null)
{
try
{
currentReader.DecRef();
}
catch (IOException e)
{
}
}
}
if (VERBOSE)
{
Console.WriteLine("writer stopped - forced by reader: " + Holder.Stop);
}
}
}
public sealed class ReaderThread : ThreadClass
{
internal readonly ReaderHolder Holder;
internal readonly CountDownLatch Latch;
internal Exception Failed;
internal ReaderThread(ReaderHolder holder, CountDownLatch latch)
: base()
{
this.Holder = holder;
this.Latch = latch;
}
public override void Run()
{
try
{
Latch.@await();
}
catch (ThreadInterruptedException e)
{
Failed = e;
return;
}
DirectoryReader reader;
while ((reader = Holder.Reader) != null)
{
if (reader.TryIncRef())
{
try
{
bool current = reader.Current;
if (VERBOSE)
{
Console.WriteLine("Thread: " + Thread.CurrentThread + " Reader: " + reader + " isCurrent:" + current);
}
Assert.IsFalse(current);
}
catch (Exception e)
{
if (VERBOSE)
{
Console.WriteLine("FAILED Thread: " + Thread.CurrentThread + " Reader: " + reader + " isCurrent: false");
}
Failed = e;
Holder.Stop = true;
return;
}
finally
{
try
{
reader.DecRef();
}
catch (IOException e)
{
if (Failed == null)
{
Failed = e;
}
}
}
return;
}
}
}
}
}
}
| |
#if UNITY_WINRT && !UNITY_EDITOR && !UNITY_WP8
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Newtonsoft.Json.Linq
{
internal class JPropertyKeyedCollection : Collection<JToken>
{
private static readonly IEqualityComparer<string> Comparer = StringComparer.Ordinal;
private Dictionary<string, JToken> _dictionary;
private void AddKey(string key, JToken item)
{
EnsureDictionary();
_dictionary[key] = item;
}
protected void ChangeItemKey(JToken item, string newKey)
{
if (!ContainsItem(item))
throw new ArgumentException("The specified item does not exist in this KeyedCollection.");
string keyForItem = GetKeyForItem(item);
if (!Comparer.Equals(keyForItem, newKey))
{
if (newKey != null)
AddKey(newKey, item);
if (keyForItem != null)
RemoveKey(keyForItem);
}
}
protected override void ClearItems()
{
base.ClearItems();
if (_dictionary != null)
_dictionary.Clear();
}
public bool Contains(string key)
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary.ContainsKey(key);
return false;
}
private bool ContainsItem(JToken item)
{
if (_dictionary == null)
return false;
string key = GetKeyForItem(item);
JToken value;
return _dictionary.TryGetValue(key, out value);
}
private void EnsureDictionary()
{
if (_dictionary == null)
_dictionary = new Dictionary<string, JToken>(Comparer);
}
private string GetKeyForItem(JToken item)
{
return ((JProperty)item).Name;
}
protected override void InsertItem(int index, JToken item)
{
AddKey(GetKeyForItem(item), item);
base.InsertItem(index, item);
}
public bool Remove(string key)
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary.ContainsKey(key) && Remove(_dictionary[key]);
return false;
}
protected override void RemoveItem(int index)
{
string keyForItem = GetKeyForItem(Items[index]);
RemoveKey(keyForItem);
base.RemoveItem(index);
}
private void RemoveKey(string key)
{
if (_dictionary != null)
_dictionary.Remove(key);
}
protected override void SetItem(int index, JToken item)
{
string keyForItem = GetKeyForItem(item);
string keyAtIndex = GetKeyForItem(Items[index]);
if (Comparer.Equals(keyAtIndex, keyForItem))
{
if (_dictionary != null)
_dictionary[keyForItem] = item;
}
else
{
AddKey(keyForItem, item);
if (keyAtIndex != null)
RemoveKey(keyAtIndex);
}
base.SetItem(index, item);
}
public JToken this[string key]
{
get
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary[key];
throw new KeyNotFoundException();
}
}
public bool TryGetValue(string key, out JToken value)
{
if (_dictionary == null)
{
value = null;
return false;
}
return _dictionary.TryGetValue(key, out value);
}
public ICollection<string> Keys
{
get
{
EnsureDictionary();
return _dictionary.Keys;
}
}
public ICollection<JToken> Values
{
get
{
EnsureDictionary();
return _dictionary.Values;
}
}
public bool Compare(JPropertyKeyedCollection other)
{
if (this == other)
return true;
// dictionaries in JavaScript aren't ordered
// ignore order when comparing properties
Dictionary<string, JToken> d1 = _dictionary;
Dictionary<string, JToken> d2 = other._dictionary;
if (d1 == null && d2 == null)
return true;
if (d1 == null)
return (d2.Count == 0);
if (d2 == null)
return (d1.Count == 0);
if (d1.Count != d2.Count)
return false;
foreach (KeyValuePair<string, JToken> keyAndProperty in d1)
{
JToken secondValue;
if (!d2.TryGetValue(keyAndProperty.Key, out secondValue))
return false;
JProperty p1 = (JProperty)keyAndProperty.Value;
JProperty p2 = (JProperty)secondValue;
if (!p1.Value.DeepEquals(p2.Value))
return false;
}
return true;
}
}
}
#endif
| |
using System.Linq;
using System.Threading.Tasks;
using OmniSharp.Models;
using OmniSharp.Models.FindSymbols;
using OmniSharp.Roslyn.CSharp.Services.Navigation;
using Microsoft.CodeAnalysis;
using TestUtility;
using Xunit;
using Xunit.Abstractions;
namespace OmniSharp.Roslyn.CSharp.Tests
{
public class FindSymbolsFacts : AbstractSingleRequestHandlerTestFixture<FindSymbolsService>
{
public FindSymbolsFacts(ITestOutputHelper output, SharedOmniSharpHostFixture sharedOmniSharpHostFixture)
: base(output, sharedOmniSharpHostFixture)
{
}
protected override string EndpointName => OmniSharpEndpoints.FindSymbols;
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Can_find_symbols(string filename)
{
string code = @"
public class Foo
{
private string _field = 0;
private string AutoProperty { get; }
private string Property
{
get { return _field; }
set { _field = value; }
}
private string Method() {}
private string Method(string param) {}
private class Nested
{
private string NestedMethod() {}
}
}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Select(q => q.Text);
var expected = new[]
{
"Foo",
"_field",
"AutoProperty",
"Property",
"Method()",
"Method(string param)",
"Nested",
"NestedMethod()"
};
Assert.Equal(expected, symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Does_not_return_event_keyword(string filename)
{
const string code = @"
public static class Game
{
public static event GameEvent GameResumed;
}";
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Select(q => q.Text);
var expected = new[]
{
"Game",
"GameResumed"
};
Assert.Equal(expected, symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Can_find_symbols_kinds(string filename)
{
string code = @"
public class Foo
{
private string _field = 0;
private string AutoProperty { get; }
private string Property
{
get { return _field; }
set { _field = value; }
}
private string Method() {}
private string Method(string param) {}
private class Nested
{
private string NestedMethod() {}
}
}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Cast<SymbolLocation>().Select(q => q.Kind);
var expected = new[]
{
"Class",
"Field",
"Property",
"Property",
"Method",
"Method",
"Class",
"Method"
};
Assert.Equal(expected, symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Returns_interface_kind(string filename)
{
const string code = @"public interface Foo {}";
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Cast<SymbolLocation>().Select(q => q.Kind);
Assert.Equal("Interface", symbols.First());
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Returns_enum_kind(string filename)
{
const string code = @"public enum Foo {}";
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Cast<SymbolLocation>().Select(q => q.Kind);
Assert.Equal("Enum", symbols.First());
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Returns_struct_kind(string filename)
{
const string code = @"public struct Foo {}";
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Cast<SymbolLocation>().Select(q => q.Kind);
Assert.Equal("Struct", symbols.First());
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Returns_delegate_kind(string filename)
{
const string code = @"public delegate void Foo();";
var usages = await FindSymbolsAsync(code, filename);
var symbols = usages.QuickFixes.Cast<SymbolLocation>().Select(q => q.Kind);
Assert.Equal("Delegate", symbols.First());
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Finds_partial_method_with_body(string filename)
{
const string code = @"
public partial class MyClass
{
partial void Method();
}
public partial class MyClass
{
partial void Method()
{
// do stuff
}
}";
var usages = await FindSymbolsAsync(code, filename);
var methodSymbol = usages.QuickFixes.Cast<SymbolLocation>().First(x => x.Kind == SymbolKind.Method.ToString());
// should find the occurrance with body
Assert.Equal(8, methodSymbol.Line);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Can_find_symbols_using_filter(string filename)
{
string code = @"
public class Foo
{
private string _field = 0;
private string AutoProperty { get; }
private string Property
{
get { return _field; }
set { _field = value; }
}
private void Method() {}
private string Method(string param) {}
private class Nested
{
private string NestedMethod() {}
}
}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsWithFilterAsync(code, filename, "meth", minFilterLength: null, maxItemsToReturn: null);
var symbols = usages.QuickFixes.Select(q => q.Text);
var expected = new[]
{
"Method()",
"Method(string param)",
"NestedMethod()"
};
Assert.Equal(expected, symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task Can_find_symbols_using_filter_with_subset_match(string filename)
{
string code = @"
public class Options {}
public class Opossum {}
public interface IConfigurationOptions { }
public class ConfigurationOptions : IConfigurationOptions { }";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsWithFilterAsync(code, filename, "opti", minFilterLength: 0, maxItemsToReturn: 0);
var symbols = usages.QuickFixes.Select(q => q.Text);
var expected = new[]
{
"Options",
"IConfigurationOptions",
"ConfigurationOptions"
};
Assert.Equal(expected, symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task no_symbols_returned_when_filter_too_short(string filename)
{
string code = @"
public class Options {}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsWithFilterAsync(code, filename, "op", minFilterLength: 3, maxItemsToReturn: 0);
var symbols = usages.QuickFixes.Select(q => q.Text);
Assert.Empty(symbols);
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task limit_number_of_returned_symbols(string filename)
{
string code = @"
public class Options1 {}
public class Options2 {}
public class Options3 {}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsWithFilterAsync(code, filename, "op", minFilterLength: 0, maxItemsToReturn: 2);
var symbols = usages.QuickFixes.Select(q => q.Text);
Assert.Equal(2, symbols.Count());
}
[Theory]
[InlineData("dummy.cs")]
[InlineData("dummy.csx")]
public async Task fuzzy_search(string filename)
{
string code = @"
public class ProjectManager {}
public class CoolProjectManager {}
public class ProbabilityManager {}";
code = WrapInNamespaceIfNeeded(code, filename);
var usages = await FindSymbolsWithFilterAsync(code, filename, "ProjMana", minFilterLength: 0, maxItemsToReturn: 0);
var symbols = usages.QuickFixes.Select(q => q.Text);
Assert.Contains("ProjectManager", symbols);
Assert.Contains("CoolProjectManager", symbols);
Assert.DoesNotContain("ProbabilityManager", symbols);
}
private async Task<QuickFixResponse> FindSymbolsAsync(string code, string filename)
{
var testFile = new TestFile(filename, code);
SharedOmniSharpTestHost.AddFilesToWorkspace(testFile);
var requestHandler = GetRequestHandler(SharedOmniSharpTestHost);
return await requestHandler.Handle(null);
}
private async Task<QuickFixResponse> FindSymbolsWithFilterAsync(string code, string filename, string filter, int? minFilterLength, int? maxItemsToReturn)
{
var testFile = new TestFile(filename, code);
SharedOmniSharpTestHost.AddFilesToWorkspace(testFile);
var requestHandler = GetRequestHandler(SharedOmniSharpTestHost);
return await requestHandler.Handle(new FindSymbolsRequest {
Filter = filter,
MinFilterLength = minFilterLength,
MaxItemsToReturn = maxItemsToReturn
});
}
private string WrapInNamespaceIfNeeded(string code, string filename)
{
if (filename.EndsWith(".cs"))
{
code = @"
namespace Some.Long.Namespace
{
" + code + @"
}";
}
return code;
}
}
}
| |
#region License
/*
* Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
using System;
using System.Diagnostics;
using Microsoft.Practices.EnterpriseLibrary.Logging;
using Microsoft.Practices.EnterpriseLibrary.Logging.Formatters;
using Microsoft.Practices.EnterpriseLibrary.Logging.TraceListeners;
namespace Common.Logging.EntLib
{
/// <summary>
/// Use this <see cref="TraceListener"/> implementation to route all Entlib logging events to the
/// Common.Logging infrastructure.
/// </summary>
/// <remarks>
/// See <see cref="CommonLoggingEntlibTraceListenerData"/> for a list of properties to configure.
/// </remarks>
/// <example>
/// To route all <see cref="Logger"/> events to Common.Logging, configure <see cref="CommonLoggingEntlibTraceListener"/>:
/// <code lang="XML">
/// <?xml version="1.0" encoding="utf-8"?>
/// <configuration>
/// <configSections>
/// <section name="loggingConfiguration"
/// type="Microsoft.Practices.EnterpriseLibrary.Logging.Configuration.LoggingSettings,
/// Microsoft.Practices.EnterpriseLibrary.Logging, Version=4.1.0.0, Culture=neutral,
/// PublicKeyToken=b03f5f7f11d50a3a" />
/// </configSections>
/// <loggingConfiguration name="Logging Application Block" tracingEnabled="true"
/// defaultCategory="General" logWarningsWhenNoCategoriesMatch="true">
/// <listeners>
/// <add name="Common.Logging Listener"
/// type="Common.Logging.EntLib.CommonLoggingEntlibTraceListener, Common.Logging.EntLib"
/// listenerDataType="Common.Logging.EntLib.CommonLoggingEntlibTraceListenerData,
/// Common.Logging.EntLib"
/// loggerNameFormat="{listenerName}.{sourceName}"
/// formatter="Text Formatter"
/// />
/// </listeners>
/// <formatters>
/// <add template="Timestamp: {timestamp}&#xD;&#xA;Message: {message}&#xD;&#xA;Category:
/// {category}&#xD;&#xA;Priority: {priority}&#xD;&#xA;EventId: {eventid}&#xD;&#xA;Severity:
/// {severity}&#xD;&#xA;Title:{title}&#xD;&#xA;Machine: {machine}&#xD;&#xA;Application Domain:
/// {appDomain}&#xD;&#xA;Process Id: {processId}&#xD;&#xA;Process Name: {processName}&#xD;&#xA;Win32
/// Thread Id: {win32ThreadId}&#xD;&#xA;Thread Name: {threadName}&#xD;&#xA;Extended Properties:
/// {dictionary({key} - {value}&#xD;&#xA;)}"
/// type="Microsoft.Practices.EnterpriseLibrary.Logging.Formatters.TextFormatter,
/// Microsoft.Practices.EnterpriseLibrary.Logging, Version=4.1.0.0, Culture=neutral,
/// PublicKeyToken=b03f5f7f11d50a3a"
/// name="Text Formatter" />
/// </formatters>
/// <specialSources>
/// <allEvents switchValue="All" name="All Events">
/// <listeners>
/// <add name="Test Capturing Listener" />
/// </listeners>
/// </allEvents>
/// </specialSources>
/// </loggingConfiguration>
/// </configuration>
/// </code>
/// </example>
/// <author>Erich Eichinger</author>
public class CommonLoggingEntlibTraceListener : FormattedTraceListenerBase
{
/// <summary>
/// The message object to be logged. Overrides <see cref="ToString"/> to format
/// the associated <see cref="LogEntry"/>
/// </summary>
private class LogEntryMessage
{
private readonly ILogFormatter _logFormatter;
private readonly LogEntry _logEntry;
private string _cachedResult;
public LogEntryMessage(ILogFormatter logFormatter, LogEntry logEntry)
{
_logFormatter = logFormatter;
_logEntry = logEntry;
}
public override string ToString()
{
if (_cachedResult == null)
{
if (_logFormatter == null)
{
_cachedResult = _logEntry.ToString();
}
else
{
_cachedResult = _logFormatter.Format(_logEntry);
}
}
return _cachedResult;
}
}
// used to format the loggername from listener + source names
private readonly string _loggerNameFormat = "{listenerName}.{sourceName}";
private string _loggerName;
/// <summary>
/// Initializes this instance from <see cref="CommonLoggingEntlibTraceListenerData"/> configuration
/// information.
/// </summary>
public CommonLoggingEntlibTraceListener(CommonLoggingEntlibTraceListenerData data, ILogFormatter formatter)
:base(formatter)
{
if (data.LoggerNameFormat != null)
{
_loggerNameFormat = data.LoggerNameFormat;
}
_loggerName = data.Name;
}
/// <summary>
/// Format to use for creating the logger name. Defaults to "{listenerName}.{sourceName}".
/// </summary>
/// <remarks>
/// Available placeholders are:
/// <list type="bullet">
/// <item>{listenerName}: the configured name of this listener instance.</item>
/// <item>{sourceName}: the trace source name an event originates from (see e.g. <see cref="TraceListener.TraceEvent(System.Diagnostics.TraceEventCache,string,System.Diagnostics.TraceEventType,int,string,object[])"/>.</item>
/// </list>
/// </remarks>
public string LoggerNameFormat
{
get { return _loggerNameFormat; }
}
/// <summary>
/// NOT USED BY ENTLIB
/// </summary>
/// <exception cref="NotImplementedException"/>
public override void Write(string message)
{
throw new NotImplementedException();
}
/// <summary>
/// NOT USED BY ENTLIB
/// </summary>
/// <exception cref="NotImplementedException"/>
public override void WriteLine(string message)
{
throw new NotImplementedException();
}
/// <summary>
/// Overridden to redirect to call <see cref="Log"/>.
/// </summary>
public override void TraceData(TraceEventCache eventCache, string source, TraceEventType eventType, int id, object data)
{
if ((this.Filter == null) || this.Filter.ShouldTrace(eventCache, source, eventType, id, null, null, data, null))
{
if (data is LogEntry)
{
data = new LogEntryMessage(base.Formatter, (LogEntry)data);
}
Log(eventType, source, id, "{0}", data);
}
}
/// <summary>
/// Logs the given message to the Common.Logging infrastructure
/// </summary>
protected virtual void Log(TraceEventType eventType, string source, int id, string format, params object[] args)
{
if (!string.IsNullOrEmpty(source))
{
source = this.LoggerNameFormat.Replace("{listenerName}", _loggerName).Replace("{sourceName}", source);
}
ILog log = LogManager.GetLogger(source);
LogLevel logLevel = MapLogLevel(eventType);
switch (logLevel)
{
case LogLevel.Trace:
log.TraceFormat(format, args);
break;
case LogLevel.Debug:
log.DebugFormat(format, args);
break;
case LogLevel.Info:
log.InfoFormat(format, args);
break;
case LogLevel.Warn:
log.WarnFormat(format, args);
break;
case LogLevel.Error:
log.ErrorFormat(format, args);
break;
case LogLevel.Fatal:
log.FatalFormat(format, args);
break;
case LogLevel.Off:
break;
default:
throw new ArgumentOutOfRangeException("eventType", eventType, "invalid TraceEventType value");
}
}
private LogLevel MapLogLevel(TraceEventType eventType)
{
switch (eventType)
{
case TraceEventType.Start:
case TraceEventType.Stop:
case TraceEventType.Suspend:
case TraceEventType.Resume:
case TraceEventType.Transfer:
return LogLevel.Trace;
case TraceEventType.Verbose:
return LogLevel.Debug;
case TraceEventType.Information:
return LogLevel.Info;
case TraceEventType.Warning:
return LogLevel.Warn;
case TraceEventType.Error:
return LogLevel.Error;
case TraceEventType.Critical:
return LogLevel.Fatal;
default:
return LogLevel.Trace;
}
}
}
}
| |
/*
Copyright (c) 2006 Ladislav Prosek.
The use and distribution terms for this software are contained in the file named License.txt,
which can be found in the root of the Phalanger distribution. By using this software
in any fashion, you are agreeing to be bound by the terms of this license.
You must not remove this notice from this software.
*/
using System;
using System.IO;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Schema;
using System.Text;
using System.ComponentModel;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using PHP.Core;
using System.Diagnostics;
namespace PHP.Library.Xml
{
/// <summary>
/// DOM document.
/// </summary>
[ImplementsType]
public partial class DOMDocument : DOMNode
{
#region Fields and Properties
protected internal XmlDocument XmlDocument
{
get
{ return (XmlDocument)XmlNode; }
set
{ XmlNode = value; }
}
private bool _formatOutput;
private bool _validateOnParse;
internal bool _isHtmlDocument;
/// <summary>
/// Returns "#document".
/// </summary>
[PhpVisible]
public override string nodeName
{
get
{ return "#document"; }
}
/// <summary>
/// Returns <B>null</B>.
/// </summary>
[PhpVisible]
public override object nodeValue
{
get
{ return null; }
set
{ }
}
/// <summary>
/// Returns the type of the node (<see cref="NodeType.Document"/>).
/// </summary>
[PhpVisible]
public override object nodeType
{
get
{ return (int)NodeType.Document; }
}
/// <summary>
/// Returns the node containing the DOCTYPE declaration.
/// </summary>
[PhpVisible]
public object doctype
{
get
{
XmlDocumentType doc_type = XmlDocument.DocumentType;
return (doc_type == null ? null : DOMNode.Create(doc_type));
}
}
/// <summary>
/// Returns the DOM implementation.
/// </summary>
[PhpVisible]
public object implementation
{
get
{ return new DOMImplementation(); }
}
/// <summary>
/// Returns the root element of this document.
/// </summary>
[PhpVisible]
public object documentElement
{
get
{
XmlElement root = XmlDocument.DocumentElement;
return (root == null ? null : DOMNode.Create(root));
}
}
/// <summary>
/// Returns the encoding of this document.
/// </summary>
[PhpVisible]
public object actualEncoding
{
get
{ return this.encoding; }
}
/// <summary>
/// Returns the encoding of this document.
/// </summary>
[PhpVisible]
public object xmlEncoding
{
get
{ return this.encoding; }
}
/// <summary>
/// Returns or set the encoding of this document.
/// </summary>
[PhpVisible]
public object encoding
{
get
{
XmlDeclaration decl = GetXmlDeclaration();
if (decl != null) return decl.Encoding;
return null;
}
set
{
string enc = PHP.Core.Convert.ObjectToString(value);
XmlDeclaration decl = GetXmlDeclaration();
if (decl != null) decl.Encoding = enc;
else
{
decl = XmlDocument.CreateXmlDeclaration("1.0", enc, null);
XmlDocument.InsertBefore(decl, XmlDocument.FirstChild);
}
}
}
/// <summary>
/// Returns or sets the standalone flag of this document.
/// </summary>
[PhpVisible]
public object xmlStandalone
{
get
{ return this.standalone; }
set
{ this.standalone = value; }
}
/// <summary>
/// Returns or sets the standalone flag of this document.
/// </summary>
[PhpVisible]
public object standalone
{
get
{
XmlDeclaration decl = GetXmlDeclaration();
return (decl == null || (decl.Standalone != "no"));
}
set
{
string stand = (PHP.Core.Convert.ObjectToBoolean(value) ? "yes" : "no");
XmlDeclaration decl = GetXmlDeclaration();
if (decl != null) decl.Standalone = stand;
else
{
decl = XmlDocument.CreateXmlDeclaration("1.0", null, stand);
XmlDocument.InsertBefore(decl, XmlDocument.FirstChild);
}
}
}
/// <summary>
/// Returns or sets the XML version of this document.
/// </summary>
[PhpVisible]
public object xmlVersion
{
get
{ return this.version; }
set
{ this.version = value; }
}
/// <summary>
/// Returns or sets the XML version of this document.
/// </summary>
[PhpVisible]
public object version
{
get
{
XmlDeclaration decl = GetXmlDeclaration();
return (decl == null ? "1.0" : decl.Version);
}
set
{
string ver = PHP.Core.Convert.ObjectToString(value);
XmlDeclaration decl = GetXmlDeclaration();
if (decl != null)
{
XmlDeclaration new_decl = XmlDocument.CreateXmlDeclaration(ver, decl.Encoding, decl.Standalone);
XmlDocument.ReplaceChild(new_decl, decl);
}
else
{
decl = XmlDocument.CreateXmlDeclaration(ver, null, null);
XmlDocument.InsertBefore(decl, XmlDocument.FirstChild);
}
}
}
/// <summary>
/// Returns <B>true</B>.
/// </summary>
[PhpVisible]
public object strictErrorChecking
{
get
{ return true; }
set
{ }
}
/// <summary>
/// Returns the base URI of this document.
/// </summary>
[PhpVisible]
public object documentURI
{
get
{ return XmlDocument.BaseURI; }
set
{ }
}
/// <summary>
/// Returns <B>null</B>.
/// </summary>
[PhpVisible]
public object config
{
get
{ return null; }
}
/// <summary>
/// Returns or sets whether XML is formatted by <see cref="save(string,int)"/> and <see cref="saveXML(DOMNode)"/>.
/// </summary>
[PhpVisible]
public object formatOutput
{
get
{ return _formatOutput; }
set
{ _formatOutput = PHP.Core.Convert.ObjectToBoolean(value); }
}
/// <summary>
/// Returns of sets whether XML is validated against schema by <see cref="load(DOMDocument,string,int)"/> and
/// <see cref="loadXML(DOMDocument,string,int)"/>.
/// </summary>
[PhpVisible]
public object validateOnParse
{
get
{ return _validateOnParse; }
set
{ _validateOnParse = PHP.Core.Convert.ObjectToBoolean(value); }
}
/// <summary>
/// Returns <B>false</B>.
/// </summary>
[PhpVisible]
public object resolveExternals
{
get
{ return false; }
set
{ }
}
/// <summary>
/// Returns or sets whether whitespace should be preserved by this XML document.
/// </summary>
[PhpVisible]
public object preserveWhiteSpace
{
get
{ return XmlDocument.PreserveWhitespace; }
set
{ XmlDocument.PreserveWhitespace = PHP.Core.Convert.ObjectToBoolean(value); }
}
/// <summary>
/// Returns <B>false</B>.
/// </summary>
[PhpVisible]
public object recover
{
get
{ return false; }
set
{ }
}
/// <summary>
/// Returns <B>false</B>.
/// </summary>
[PhpVisible]
public object substituteEntities
{
get
{ return false; }
set
{ }
}
#endregion
#region Construction
public DOMDocument()
: base(ScriptContext.CurrentContext, true)
{
this.XmlDocument = new XmlDocument();
this.XmlDocument.PreserveWhitespace = true;
}
internal DOMDocument(XmlDocument/*!*/ xmlDocument)
: base(ScriptContext.CurrentContext, true)
{
this.XmlDocument = xmlDocument;
}
protected override PHP.Core.Reflection.DObject CloneObjectInternal(PHP.Core.Reflection.DTypeDesc caller, ScriptContext context, bool deepCopyFields)
{
return new DOMDocument(XmlDocument);
}
[PhpVisible]
public virtual void __construct([Optional] string version, [Optional] string encoding)
{
// append the corresponding XML declaration to the document
if (version == null) version = "1.0";
XmlDocument.AppendChild(XmlDocument.CreateXmlDeclaration("1.0", encoding, String.Empty));
}
#endregion
#region Node factory
/// <summary>
/// Creates an element with the specified name and inner text.
/// </summary>
/// <param name="tagName">The qualified name of the element.</param>
/// <param name="value">The inner text (value) of the element.</param>
/// <returns>A new <see cref="DOMElement"/>.</returns>
[PhpVisible]
public object createElement(string tagName, [Optional] string value)
{
XmlElement element = XmlDocument.CreateElement(tagName);
if (value != null) element.InnerText = value;
return new DOMElement(element);
}
/// <summary>
/// Creates a new document fragment.
/// </summary>
/// <returns>A new <see cref="DOMDocumentFragment"/>.</returns>
[PhpVisible]
public object createDocumentFragment()
{
XmlDocumentFragment fragment = XmlDocument.CreateDocumentFragment();
return new DOMDocumentFragment(fragment);
}
/// <summary>
/// Creates a new text node with the specified text.
/// </summary>
/// <param name="data">The text for the text node.</param>
/// <returns>A new <see cref="DOMText"/>.</returns>
[PhpVisible]
public object createTextNode(string data)
{
XmlText text = XmlDocument.CreateTextNode(data);
return new DOMText(text);
}
/// <summary>
/// Creates a comment node containing the specified data.
/// </summary>
/// <param name="data">The comment data.</param>
/// <returns>A new <see cref="DOMComment"/>.</returns>
[PhpVisible]
public object createComment(string data)
{
XmlComment comment = XmlDocument.CreateComment(data);
return new DOMComment(comment);
}
/// <summary>
/// Creates a CDATA section containing the specified data.
/// </summary>
/// <param name="data">The content of the new CDATA section.</param>
/// <returns>A new <see cref="DOMCdataSection"/>.</returns>
[PhpVisible]
public object createCDATASection(string data)
{
XmlCDataSection cdata = XmlDocument.CreateCDataSection(data);
return new DOMCdataSection(cdata);
}
/// <summary>
/// Creates a processing instruction with the specified name and data.
/// </summary>
/// <param name="target">The name of the processing instruction.</param>
/// <param name="data">The data for the processing instruction.</param>
/// <returns>A new <see cref="DOMProcessingInstruction"/>.</returns>
[PhpVisible]
public object createProcessingInstruction(string target, string data)
{
XmlProcessingInstruction pi = XmlDocument.CreateProcessingInstruction(target, data);
return new DOMProcessingInstruction(pi);
}
/// <summary>
/// Creates an attribute with the specified name.
/// </summary>
/// <param name="name">The qualified name of the attribute.</param>
/// <returns>A new <see cref="DOMAttr"/>.</returns>
[PhpVisible]
public object createAttribute(string name)
{
XmlAttribute attribute = XmlDocument.CreateAttribute(name);
return new DOMAttr(attribute);
}
/// <summary>
/// Creates an entity reference with the specified name.
/// </summary>
/// <param name="name">The name of the entity reference.</param>
/// <returns>A new <see cref="DOMEntityReference"/>.</returns>
[PhpVisible]
public object createEntityReference(string name)
{
XmlEntityReference entref = XmlDocument.CreateEntityReference(name);
return new DOMEntityReference(entref);
}
/// <summary>
/// Creates an element with the specified namespace URI and qualified name.
/// </summary>
/// <param name="namespaceUri">The namespace URI of the element.</param>
/// <param name="qualifiedName">The qualified name of the element.</param>
/// <param name="value">The inner text (value) of the element.</param>
/// <returns>A new <see cref="DOMElement"/>.</returns>
[PhpVisible]
public object createElementNS(string namespaceUri, string qualifiedName, [Optional] string value)
{
XmlElement element = XmlDocument.CreateElement(qualifiedName, namespaceUri);
if (value != null) element.InnerText = value;
return new DOMElement(element);
}
/// <summary>
/// Creates an attribute with the specified namespace URI and qualified name.
/// </summary>
/// <param name="namespaceUri">The namespace URI of the attribute.</param>
/// <param name="qualifiedName">The qualified name of the attribute.</param>
/// <returns>A new <see cref="DOMAttr"/>.</returns>
[PhpVisible]
public object createAttributeNS(string namespaceUri, string qualifiedName)
{
XmlAttribute attribute = XmlDocument.CreateAttribute(qualifiedName, namespaceUri);
return new DOMAttr(attribute);
}
#endregion
#region Child elements
/// <summary>
/// Gets all descendant elements with the matching tag name.
/// </summary>
/// <param name="name">The tag name. Use <B>*</B> to return all elements within the element tree.</param>
/// <returns>A <see cref="DOMNodeList"/>.</returns>
[PhpVisible]
public object getElementsByTagName(string name)
{
DOMNodeList list = new DOMNodeList();
// enumerate elements in the default namespace
foreach (XmlNode node in XmlDocument.GetElementsByTagName(name))
{
IXmlDomNode dom_node = DOMNode.Create(node);
if (dom_node != null) list.AppendNode(dom_node);
}
// enumerate all namespaces
XPathNavigator navigator = XmlDocument.CreateNavigator();
XPathNodeIterator iterator = navigator.Select("//namespace::*[not(. = ../../namespace::*)]");
while (iterator.MoveNext())
{
string prefix = iterator.Current.Name;
if (!String.IsNullOrEmpty(prefix) && prefix != "xml")
{
// enumerate elements in this namespace
foreach (XmlNode node in XmlDocument.GetElementsByTagName(name, iterator.Current.Value))
{
IXmlDomNode dom_node = DOMNode.Create(node);
if (dom_node != null) list.AppendNode(dom_node);
}
}
}
return list;
}
/// <summary>
/// Gets all descendant elements with the matching namespace URI and local name.
/// </summary>
/// <param name="namespaceUri">The namespace URI.</param>
/// <param name="localName">The local name. Use <B>*</B> to return all elements within the element tree.</param>
/// <returns>A <see cref="DOMNodeList"/>.</returns>
[PhpVisible]
public object getElementsByTagNameNS(string namespaceUri, string localName)
{
DOMNodeList list = new DOMNodeList();
foreach (XmlNode node in XmlDocument.GetElementsByTagName(localName, namespaceUri))
{
IXmlDomNode dom_node = DOMNode.Create(node);
if (dom_node != null) list.AppendNode(dom_node);
}
return list;
}
/// <summary>
/// Not yet implemented.
/// </summary>
[PhpVisible]
public void getElementById(string elementId)
{
PhpException.Throw(PhpError.Warning, Resources.NotYetImplemented);
}
#endregion
#region Hierarchy
/// <summary>
/// Imports a node from another document to the current document.
/// </summary>
/// <param name="importedNode">The node being imported.</param>
/// <param name="deep"><B>True</B> to perform deep clone; otheriwse <B>false</B>.</param>
/// <returns>The imported <see cref="DOMNode"/>.</returns>
[PhpVisible]
public object importNode(DOMNode importedNode, bool deep)
{
if (importedNode.IsAssociated)
{
return DOMNode.Create(XmlDocument.ImportNode(importedNode.XmlNode, deep));
}
else
{
importedNode.Associate(XmlDocument);
return importedNode;
}
}
/// <summary>
/// Not implemented in PHP 5.1.6.
/// </summary>
[PhpVisible]
public void adoptNode(DOMNode source)
{
PhpException.Throw(PhpError.Warning, Resources.NotYetImplemented);
}
/// <summary>
/// Puts the entire XML document into a "normal" form.
/// </summary>
[PhpVisible]
public void normalizeDocument()
{
XmlDocument.Normalize();
}
/// <summary>
/// Not implemented in PHP 5.1.6.
/// </summary>
[PhpVisible]
public void renameNode(DOMNode node, string namespaceUri, string qualifiedName)
{
PhpException.Throw(PhpError.Warning, Resources.NotYetImplemented);
}
private XmlDeclaration GetXmlDeclaration()
{
return (XmlNode.FirstChild as XmlDeclaration);
}
#endregion
#region Load and Save
/// <summary>
/// Loads the XML document from the specified URL.
/// </summary>
/// <param name="instance">The <see cref="DOMDocument"/> instance or <B>null</B>.</param>
/// <param name="fileName">URL for the file containing the XML document to load.</param>
/// <param name="options">Undocumented.</param>
/// <returns>A new <see cref="DOMDocument"/> or <B>false</B> if <paramref name="instance"/>p
/// is <B>null</B>, <B>true</B> or <B>false</B> otherwise.</returns>
[PhpVisible]
public static object load([This] DOMDocument instance, string fileName, [Optional] int options)
{
// this method can be called both statically and via an instance
bool static_call;
if (instance == null)
{
static_call = true;
instance = new DOMDocument();
}
else static_call = false;
instance._isHtmlDocument = false;
using (PhpStream stream = PhpStream.Open(fileName, "rt"))
{
if (stream == null) return false;
try
{
if (instance._validateOnParse)
{
// create a validating XML reader
XmlReaderSettings settings = new XmlReaderSettings();
#pragma warning disable 618
settings.ValidationType = ValidationType.Auto;
#pragma warning restore 618
instance.XmlDocument.Load(XmlReader.Create(stream.RawStream, settings));
}
else instance.XmlDocument.Load(stream.RawStream);
}
catch (XmlException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, fileName));
return false;
}
catch (IOException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, fileName));
return false;
}
}
return (static_call ? instance : (object)true);
}
/// <summary>
/// Loads the XML document from the specified string.
/// </summary>
/// <param name="instance">The <see cref="DOMDocument"/> instance or <B>null</B>.</param>
/// <param name="xmlString">The XML string.</param>
/// <param name="options">Undocumented.</param>
/// <returns>A new <see cref="DOMDocument"/> or <B>false</B> if <paramref name="instance"/>p
/// is <B>null</B>, <B>true</B> or <B>false</B> otherwise.</returns>
[PhpVisible]
public static object loadXML([This] DOMDocument instance, string xmlString, [Optional] int options)
{
// this method can be called both statically and via an instance
bool static_call;
if (instance == null)
{
static_call = true;
instance = new DOMDocument();
}
else static_call = false;
var result = instance.loadXMLInternal(xmlString, options, false);
return static_call ? instance : (object)result;
}
/// <summary>
/// Loads provided XML string into this <see cref="DOMDocument"/>.
/// </summary>
/// <param name="xmlString">String representing XML document.</param>
/// <param name="options">PHP options.</param>
/// <param name="isHtml">Whether the <paramref name="xmlString"/> represents XML generated from HTML document (then it may contain some invalid XML characters).</param>
/// <returns></returns>
private bool loadXMLInternal(string xmlString, int options, bool isHtml)
{
this._isHtmlDocument = isHtml;
var stream = new StringReader(xmlString);
try
{
XmlReaderSettings settings = new XmlReaderSettings();
// validating XML reader
if (this._validateOnParse)
#pragma warning disable 618
settings.ValidationType = ValidationType.Auto;
#pragma warning restore 618
// do not check invalid characters in HTML (XML)
if (isHtml)
settings.CheckCharacters = false;
// load the document
this.XmlDocument.Load(XmlReader.Create(stream, settings));
// done
return true;
}
catch (XmlException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, null));
return false;
}
catch (IOException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, null));
return false;
}
}
/// <summary>
/// Saves the XML document to the specified stream.
/// </summary>
/// <param name="fileName">The location of the file where the document should be saved.</param>
/// <param name="options">Unsupported.</param>
/// <returns>The number of bytes written or <B>false</B> on error.</returns>
[PhpVisible]
public object save(string fileName, [Optional] int options)
{
using (PhpStream stream = PhpStream.Open(fileName, "wt"))
{
if (stream == null) return false;
try
{
// direct stream write indents
if (_formatOutput) XmlDocument.Save(stream.RawStream);
else
{
Encoding encoding = XmlDom.GetNodeEncoding(XmlNode);
using (XmlTextWriter writer = new XmlTextWriter(stream.RawStream, encoding))
{
XmlDocument.Save(writer);
}
}
}
catch (XmlException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, fileName));
return null;
}
catch (IOException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, fileName));
return false;
}
// TODO:
return (stream.RawStream.CanSeek ? stream.RawStream.Position : 1);
}
}
/// <summary>
/// Returns the string representation of this document.
/// </summary>
/// <param name="node">The node to dump (the entire document if <B>null</B>).</param>
/// <returns>The string representation of the document / the specified node or <B>false</B>.</returns>
[PhpVisible]
public object saveXML([Optional] DOMNode node)
{
XmlNode xml_node;
if (node == null) xml_node = XmlNode;
else
{
xml_node = node.XmlNode;
if (xml_node.OwnerDocument != XmlDocument && xml_node != XmlNode)
{
DOMException.Throw(ExceptionCode.WrongDocument);
return false;
}
}
// determine output encoding
Encoding encoding = XmlDom.GetNodeEncoding(xml_node);
using (MemoryStream stream = new MemoryStream())
{
// use a XML writer and set its Formatting proprty to Formatting.Indented
using (XmlTextWriter writer = new XmlTextWriter(stream, encoding))
{
writer.Formatting = (_formatOutput ? Formatting.Indented : Formatting.None);
xml_node.WriteTo(writer);
}
return new PhpBytes(stream.ToArray());
}
}
/// <summary>
/// Processes HTML errors, if any.
/// </summary>
/// <param name="htmlDoc"><see cref="HtmlAgilityPack.HtmlDocument"/> instance to process errors from.</param>
/// <param name="filename">HTML file name or <c>null</c> if HTML has been loaded from a string.</param>
private void CheckHtmlErrors(HtmlAgilityPack.HtmlDocument/*!*/htmlDoc, string filename)
{
Debug.Assert(htmlDoc != null);
foreach (var error in htmlDoc.ParseErrors)
{
switch (error.Code)
{
case HtmlAgilityPack.HtmlParseErrorCode.EndTagNotRequired:
case HtmlAgilityPack.HtmlParseErrorCode.TagNotOpened:
break;
default:
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, error.Line, error.LinePosition, "(" + error.Code.ToString() + ")" + error.Reason, filename));
break;
}
}
}
/// <summary>
/// Loads HTML from a string.
/// </summary>
/// <param name="source">
/// String containing HTML document.
/// </param>
[PhpVisible]
public object loadHTML(string source)
{
if (string.IsNullOrEmpty(source))
return false;
return loadHTML(new StringReader(source), null);
}
/// <summary>
/// Loads HTML from a file.
/// </summary>
/// <param name="sourceFile">
/// Path to a file containing HTML document.
/// </param>
[PhpVisible]
public object loadHTMLFile(string sourceFile)
{
using (PhpStream stream = PhpStream.Open(sourceFile, "rt"))
{
if (stream == null) return false;
return loadHTML(new StreamReader(stream.RawStream), sourceFile);
}
}
/// <summary>
/// Load HTML DOM from given <paramref name="stream"/>.
/// </summary>
private object loadHTML(TextReader stream, string filename)
{
HtmlAgilityPack.HtmlDocument htmlDoc = new HtmlAgilityPack.HtmlDocument();
// setup HTML parser
htmlDoc.OptionOutputAsXml = true;
//htmlDoc.OptionOutputOriginalCase = true; // NOTE: we need lower-cased names because of XPath queries
//htmlDoc.OptionFixNestedTags = true;
htmlDoc.OptionCheckSyntax = false;
htmlDoc.OptionUseIdAttribute = false; // only needed when XPath navigator is used on htmlDoc
htmlDoc.OptionWriteEmptyNodes = true;
// load HTML (from string or a stream)
htmlDoc.Load(stream);
CheckHtmlErrors(htmlDoc, filename);
// save to string as XML
using (StringWriter sw = new StringWriter())
{
htmlDoc.Save(sw);
// load as XML
return this.loadXMLInternal(sw.ToString(), 0, true);
}
}
/// <summary>
/// Not implemented (TODO: need an HTML parser for this).
/// </summary>
[PhpVisible]
public object saveHTML()
{
//TODO: use the HTML parse to same HTML
return saveXML(null);
}
/// <summary>
/// Not implemented (TODO: need an HTML parser for this).
/// </summary>
[PhpVisible]
public object saveHTMLFile(string file)
{
//TODO: use the HTML parse to same HTML
return save(file, 0);
}
#endregion
#region XInclude
/// <summary>
/// Not implemented (TODO: need a XInclude implementation for this).
/// </summary>
[PhpVisible]
public void xinclude([Optional] int options)
{
PhpException.Throw(PhpError.Warning, Resources.NotYetImplemented);
}
#endregion
#region Validation
/// <summary>
/// Not implemented (System.Xml does not support post-load DTD validation).
/// </summary>
[PhpVisible]
public void validate()
{
PhpException.Throw(PhpError.Warning, Resources.PostLoadDtdUnsupported);
}
/// <summary>
/// Validates the document against the specified XML schema.
/// </summary>
/// <param name="schemaFile">URL for the file containing the XML schema to load.</param>
/// <returns><B>True</B> or <B>false</B>.</returns>
[PhpVisible]
public object schemaValidate(string schemaFile)
{
XmlSchema schema;
using (PhpStream stream = PhpStream.Open(schemaFile, "rt"))
{
if (stream == null) return false;
try
{
schema = XmlSchema.Read(stream.RawStream, null);
}
catch (XmlException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_WARNING, 0, 0, 0, e.Message, schemaFile));
return false;
}
catch (IOException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_ERROR, 0, 0, 0, e.Message, schemaFile));
return false;
}
}
XmlDocument.Schemas.Add(schema);
try
{
XmlDocument.Validate(null);
}
catch (XmlException)
{
return false;
}
finally
{
XmlDocument.Schemas.Remove(schema);
}
return true;
}
/// <summary>
/// Validates the document against the specified XML schema.
/// </summary>
/// <param name="schemaString">The XML schema string.</param>
/// <returns><B>True</B> or <B>false</B>.</returns>
[PhpVisible]
public object schemaValidateSource(string schemaString)
{
XmlSchema schema;
try
{
schema = XmlSchema.Read(new System.IO.StringReader(schemaString), null);
}
catch (XmlException e)
{
PhpLibXml.IssueXmlError(new PhpLibXml.XmlError(PhpLibXml.LIBXML_ERR_WARNING, 0, 0, 0, e.Message, null));
return false;
}
XmlDocument.Schemas.Add(schema);
try
{
XmlDocument.Validate(null);
}
catch (XmlException)
{
return false;
}
finally
{
XmlDocument.Schemas.Remove(schema);
}
return true;
}
/// <summary>
/// Not implemented (TODO: will need a Relax NG validator for this).
/// </summary>
[PhpVisible]
public void relaxNGValidate(string schemaFile)
{
PhpException.Throw(PhpError.Warning, Resources.RelaxNGUnsupported);
}
/// <summary>
/// Not implemented (TODO: will need a Relax NG validator for this).
/// </summary>
[PhpVisible]
public void relaxNGValidateSource(string schema)
{
PhpException.Throw(PhpError.Warning, Resources.RelaxNGUnsupported);
}
#endregion
}
/// <summary>
/// DOM document fragment.
/// </summary>
[ImplementsType]
public partial class DOMDocumentFragment : DOMNode
{
#region Fields and Properties
protected internal XmlDocumentFragment XmlDocumentFragment
{
get
{ return (XmlDocumentFragment)XmlNode; }
set
{ XmlNode = value; }
}
/// <summary>
/// Returns "#document-fragment".
/// </summary>
[PhpVisible]
public override string nodeName
{
get
{ return "#document-fragment"; }
}
/// <summary>
/// Returns <B>null</B>.
/// </summary>
[PhpVisible]
public override object nodeValue
{
get
{ return null; }
set
{ }
}
/// <summary>
/// Returns the namespace URI of the node.
/// </summary>
[PhpVisible]
public override string namespaceURI
{
get
{ return (IsAssociated ? base.namespaceURI : null); }
}
/// <summary>
/// Returns the type of the node (<see cref="NodeType.DocumentFragment"/>).
/// </summary>
[PhpVisible]
public override object nodeType
{
get
{ return (int)NodeType.DocumentFragment; }
}
#endregion
#region Construction
public DOMDocumentFragment()
: base(ScriptContext.CurrentContext, true)
{ }
internal DOMDocumentFragment(XmlDocumentFragment/*!*/ xmlDocumentFragment)
: base(ScriptContext.CurrentContext, true)
{
this.XmlDocumentFragment = xmlDocumentFragment;
}
protected override PHP.Core.Reflection.DObject CloneObjectInternal(PHP.Core.Reflection.DTypeDesc caller, ScriptContext context, bool deepCopyFields)
{
return new DOMDocumentFragment(XmlDocumentFragment);
}
[PhpVisible]
public void __construct()
{ }
#endregion
#region Hierarchy
protected internal override void Associate(XmlDocument document)
{
if (!IsAssociated)
{
XmlDocumentFragment = document.CreateDocumentFragment();
}
}
#endregion
#region Operations
/// <summary>
/// Appends (well-formed) XML data to this document fragment.
/// </summary>
/// <param name="data">The data to append.</param>
/// <returns><B>True</B> or <B>false</B>.</returns>
[PhpVisible]
public object appendXML(string data)
{
try
{
XmlDocumentFragment.InnerXml += data;
}
catch (XmlException)
{
return false;
}
return true;
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
namespace System.Text.Tests
{
public class EncodingGetChars1
{
#region Positive Testcases
[Fact]
public void PosTest1()
{
PositiveTestString(Encoding.UTF8, "TestString", new byte[] { 84, 101, 115, 116, 83, 116, 114, 105, 110, 103 }, "00A");
}
[Fact]
public void PosTest2()
{
PositiveTestString(Encoding.UTF8, "", new byte[] { }, "00B");
}
[Fact]
public void PosTest3()
{
PositiveTestString(Encoding.UTF8, "FooBA\u0400R", new byte[] { 70, 111, 111, 66, 65, 208, 128, 82 }, "00C");
}
[Fact]
public void PosTest4()
{
PositiveTestString(Encoding.UTF8, "\u00C0nima\u0300l", new byte[] { 195, 128, 110, 105, 109, 97, 204, 128, 108 }, "00D");
}
[Fact]
public void PosTest5()
{
PositiveTestString(Encoding.UTF8, "Test\uD803\uDD75Test", new byte[] { 84, 101, 115, 116, 240, 144, 181, 181, 84, 101, 115, 116 }, "00E");
}
[Fact]
public void PosTest6()
{
PositiveTestString(Encoding.UTF8, "\0Te\nst\0\t\0T\u000Fest\0", new byte[] { 0, 84, 101, 10, 115, 116, 0, 9, 0, 84, 15, 101, 115, 116, 0 }, "00F");
}
[Fact]
public void PosTest7()
{
PositiveTestString(Encoding.UTF8, "\uFFFDTest\uFFFD\uFFFD\u0130\uFFFDTest\uFFFD", new byte[] { 196, 84, 101, 115, 116, 196, 196, 196, 176, 176, 84, 101, 115, 116, 176 }, "00G");
}
[Fact]
public void PosTest8()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "TestTest", new byte[] { 84, 101, 115, 116, 84, 101, 115, 116 }, "00H");
}
[Fact]
public void PosTest9()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "\uFFFD", new byte[] { 176 }, "00I");
}
[Fact]
public void PosTest10()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "\uFFFD", new byte[] { 196 }, "00J");
}
[Fact]
public void PosTest11()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "\uD803\uDD75\uD803\uDD75\uD803\uDD75", new byte[] { 240, 144, 181, 181, 240, 144, 181, 181, 240, 144, 181, 181 }, "00K");
}
[Fact]
public void PosTest12()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "\u0130", new byte[] { 196, 176 }, "00L");
}
[Fact]
public void PosTest13()
{
PositiveTestString(Encoding.GetEncoding("utf-8"), "\uFFFD\uD803\uDD75\uD803\uDD75\uFFFD\uFFFD", new byte[] { 240, 240, 144, 181, 181, 240, 144, 181, 181, 240, 144, 240 }, "0A2");
}
[Fact]
public void PosTest14()
{
PositiveTestString(Encoding.Unicode, "TestString\uFFFD", new byte[] { 84, 0, 101, 0, 115, 0, 116, 0, 83, 0, 116, 0, 114, 0, 105, 0, 110, 0, 103, 0, 45 }, "00A3");
}
[Fact]
public void PosTest15()
{
PositiveTestString(Encoding.Unicode, "", new byte[] { }, "00B3");
}
[Fact]
public void PosTest16()
{
PositiveTestString(Encoding.Unicode, "FooBA\u0400R", new byte[] { 70, 0, 111, 0, 111, 0, 66, 0, 65, 0, 0, 4, 82, 0 }, "00C3");
}
[Fact]
public void PosTest17()
{
PositiveTestString(Encoding.Unicode, "\u00C0nima\u0300l", new byte[] { 192, 0, 110, 0, 105, 0, 109, 0, 97, 0, 0, 3, 108, 0 }, "00D3");
}
[Fact]
public void PosTest18()
{
PositiveTestString(Encoding.Unicode, "Test\uD803\uDD75Test", new byte[] { 84, 0, 101, 0, 115, 0, 116, 0, 3, 216, 117, 221, 84, 0, 101, 0, 115, 0, 116, 0 }, "00E3");
}
[Fact]
public void PosTest19()
{
PositiveTestString(Encoding.Unicode, "\0Te\nst\0\t\0T\u000Fest\0", new byte[] { 0, 0, 84, 0, 101, 0, 10, 0, 115, 0, 116, 0, 0, 0, 9, 0, 0, 0, 84, 0, 15, 0, 101, 0, 115, 0, 116, 0, 0, 0 }, "00F3");
}
[Fact]
public void PosTest20()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "TestTest", new byte[] { 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116, 0 }, "00G3");
}
[Fact]
public void PosTest21()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "TestTest\uFFFD", new byte[] { 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116, 0, 117, 221 }, "00H3");
}
[Fact]
public void PosTest22()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "TestTest\uFFFD", new byte[] { 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116, 0, 3, 216 }, "00I3");
}
[Fact]
public void PosTest23()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "\uFFFD\uFFFD", new byte[] { 3, 216, 84 }, "00J3");
}
[Fact]
public void PosTest24()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "\uD803\uDD75\uD803\uDD75\uD803\uDD75", new byte[] { 3, 216, 117, 221, 3, 216, 117, 221, 3, 216, 117, 221 }, "00K3");
}
[Fact]
public void PosTest25()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "\u0130", new byte[] { 48, 1 }, "00L3");
}
[Fact]
public void PosTest26()
{
PositiveTestString(Encoding.GetEncoding("utf-16"), "\uD803\uDD75\uD803\uDD75", new byte[] { 3, 216, 117, 221, 3, 216, 117, 221 }, "0A23");
}
[Fact]
public void PosTest27()
{
PositiveTestString(Encoding.BigEndianUnicode, "TestString\uFFFD", new byte[] { 0, 84, 0, 101, 0, 115, 0, 116, 0, 83, 0, 116, 0, 114, 0, 105, 0, 110, 0, 103, 0 }, "00A4");
}
[Fact]
public void PosTest28()
{
PositiveTestString(Encoding.BigEndianUnicode, "", new byte[] { }, "00B4");
}
[Fact]
public void PosTest29()
{
PositiveTestString(Encoding.BigEndianUnicode, "FooBA\u0400R\uFFFD", new byte[] { 0, 70, 0, 111, 0, 111, 0, 66, 0, 65, 4, 0, 0, 82, 70 }, "00C4");
}
[Fact]
public void PosTest30()
{
PositiveTestString(Encoding.BigEndianUnicode, "\u00C0nima\u0300l", new byte[] { 0, 192, 0, 110, 0, 105, 0, 109, 0, 97, 3, 0, 0, 108 }, "00D4");
}
[Fact]
public void PosTest31()
{
PositiveTestString(Encoding.BigEndianUnicode, "Test\uD803\uDD75Test", new byte[] { 0, 84, 0, 101, 0, 115, 0, 116, 216, 3, 221, 117, 0, 84, 0, 101, 0, 115, 0, 116 }, "00E4");
}
[Fact]
public void PosTest32()
{
PositiveTestString(Encoding.BigEndianUnicode, "\0Te\nst\0\t\0T\u000Fest\0\uFFFD", new byte[] { 0, 0, 0, 84, 0, 101, 0, 10, 0, 115, 0, 116, 0, 0, 0, 9, 0, 0, 0, 84, 0, 15, 0, 101, 0, 115, 0, 116, 0, 0, 0 }, "00F4");
}
[Fact]
public void PosTest33()
{
PositiveTestString(Encoding.BigEndianUnicode, "TestTest", new byte[] { 0, 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116 }, "00G4");
}
[Fact]
public void PosTest34()
{
PositiveTestString(Encoding.BigEndianUnicode, "TestTest\uFFFD", new byte[] { 0, 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116, 221, 117 }, "00H4");
}
[Fact]
public void PosTest35()
{
PositiveTestString(Encoding.GetEncoding("UTF-16BE"), "TestTest\uFFFD", new byte[] { 0, 84, 0, 101, 0, 115, 0, 116, 0, 84, 0, 101, 0, 115, 0, 116, 216, 3 }, "00I4");
}
[Fact]
public void PosTest36()
{
PositiveTestString(Encoding.GetEncoding("UTF-16BE"), "\uFFFD\uFFFD", new byte[] { 216, 3, 48 }, "00J4");
}
[Fact]
public void PosTest37()
{
PositiveTestString(Encoding.GetEncoding("UTF-16BE"), "\uD803\uDD75\uD803\uDD75\uD803\uDD75", new byte[] { 216, 3, 221, 117, 216, 3, 221, 117, 216, 3, 221, 117 }, "00K4");
}
[Fact]
public void PosTest38()
{
PositiveTestString(Encoding.GetEncoding("UTF-16BE"), "\u0130", new byte[] { 1, 48 }, "00L4");
}
[Fact]
public void PosTest39()
{
PositiveTestString(Encoding.GetEncoding("UTF-16BE"), "\uD803\uDD75\uD803\uDD75", new byte[] { 216, 3, 221, 117, 216, 3, 221, 117 }, "0A24");
}
#endregion
#region Negative Testcases
[Fact]
public void NegTest1()
{
NegativeTestChars<ArgumentNullException>(new UTF8Encoding(), null, "00O");
}
[Fact]
public void NegTest2()
{
NegativeTestChars<ArgumentNullException>(new UnicodeEncoding(), null, "00O3");
}
[Fact]
public void NegTest3()
{
NegativeTestChars<ArgumentNullException>(new UnicodeEncoding(true, false), null, "00O4");
}
[Fact]
public void NegTest4()
{
NegativeTestChars2<ArgumentNullException>(new UTF8Encoding(), null, 0, 0, "00P");
}
[Fact]
public void NegTest5()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UTF8Encoding(), new byte[] { 0, 0 }, -1, 1, "00P");
}
[Fact]
public void NegTest6()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UTF8Encoding(), new byte[] { 0, 0 }, 1, -1, "00Q");
}
[Fact]
public void NegTest7()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UTF8Encoding(), new byte[] { 0, 0 }, 0, 10, "00R");
}
[Fact]
public void NegTest8()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UTF8Encoding(), new byte[] { 0, 0 }, 3, 0, "00S");
}
[Fact]
public void NegTest9()
{
NegativeTestChars2<ArgumentNullException>(new UnicodeEncoding(), null, 0, 0, "00P3");
}
[Fact]
public void NegTest10()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(), new byte[] { 0, 0 }, -1, 1, "00P3");
}
[Fact]
public void NegTest11()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(), new byte[] { 0, 0 }, 1, -1, "00Q3");
}
[Fact]
public void NegTest12()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(), new byte[] { 0, 0 }, 0, 10, "00R3");
}
[Fact]
public void NegTest13()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(), new byte[] { 0, 0 }, 3, 0, "00S3");
}
[Fact]
public void NegTest14()
{
NegativeTestChars2<ArgumentNullException>(new UnicodeEncoding(true, false), null, 0, 0, "00P4");
}
[Fact]
public void NegTest15()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(true, false), new byte[] { 0, 0 }, -1, 1, "00P4");
}
[Fact]
public void NegTest16()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(true, false), new byte[] { 0, 0 }, 1, -1, "00Q4");
}
[Fact]
public void NegTest17()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(true, false), new byte[] { 0, 0 }, 0, 10, "00R4");
}
[Fact]
public void NegTest18()
{
NegativeTestChars2<ArgumentOutOfRangeException>(new UnicodeEncoding(true, false), new byte[] { 0, 0 }, 3, 0, "00S4");
}
private static char[] s_output = new char[20];
[Fact]
public void NegTest19()
{
NegativeTestChars3<ArgumentNullException>(Encoding.UTF8, null, 0, 0, s_output, 0, "00T");
}
[Fact]
public void NegTest20()
{
NegativeTestChars3<ArgumentNullException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, 0, null, 0, "00U");
}
[Fact]
public void NegTest21()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, -1, 0, s_output, 0, "00V");
}
[Fact]
public void NegTest22()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, 0, s_output, -1, "00W");
}
[Fact]
public void NegTest23()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, 3, 0, s_output, 0, "00X");
}
[Fact]
public void NegTest24()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, 0, s_output, 21, "00Y");
}
[Fact]
public void NegTest25()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, 10, s_output, 0, "00Z");
}
[Fact]
public void NegTest26()
{
NegativeTestChars3<ArgumentException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, 2, s_output, 20, "0A0");
}
[Fact]
public void NegTest27()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.UTF8, new byte[] { 0, 0 }, 0, -1, s_output, 0, "0A1");
}
[Fact]
public void NegTest28()
{
NegativeTestChars3<ArgumentNullException>(Encoding.Unicode, null, 0, 0, s_output, 0, "00T3");
}
[Fact]
public void NegTest29()
{
NegativeTestChars3<ArgumentNullException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, 0, null, 0, "00U3");
}
[Fact]
public void NegTest30()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, -1, 0, s_output, 0, "00V3");
}
[Fact]
public void NegTest31()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, 0, s_output, -1, "00W3");
}
[Fact]
public void NegTest32()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, 3, 0, s_output, 0, "00X3");
}
[Fact]
public void NegTest33()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, 0, s_output, 21, "00Y3");
}
[Fact]
public void NegTest34()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, 10, s_output, 0, "00Z3");
}
[Fact]
public void NegTest35()
{
NegativeTestChars3<ArgumentException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, 2, s_output, 20, "0A03");
}
[Fact]
public void NegTest36()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.Unicode, new byte[] { 0, 0 }, 0, -1, s_output, 0, "0A13");
}
[Fact]
public void NegTest37()
{
NegativeTestChars3<ArgumentNullException>(Encoding.BigEndianUnicode, null, 0, 0, s_output, 0, "00T4");
}
[Fact]
public void NegTest38()
{
NegativeTestChars3<ArgumentNullException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, 0, null, 0, "00U4");
}
[Fact]
public void NegTest39()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, -1, 0, s_output, 0, "00V4");
}
[Fact]
public void NegTest40()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, 0, s_output, -1, "00W4");
}
[Fact]
public void NegTest41()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 3, 0, s_output, 0, "00X4");
}
[Fact]
public void NegTest42()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, 0, s_output, 21, "00Y4");
}
[Fact]
public void NegTest43()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, 10, s_output, 0, "00Z4");
}
[Fact]
public void NegTest44()
{
NegativeTestChars3<ArgumentException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, 2, s_output, 20, "0A04");
}
[Fact]
public void NegTest45()
{
NegativeTestChars3<ArgumentOutOfRangeException>(Encoding.BigEndianUnicode, new byte[] { 0, 0 }, 0, -1, s_output, 0, "0A14");
}
#endregion
public void PositiveTestString(Encoding enc, string expected, byte[] bytes, string id)
{
char[] chars = enc.GetChars(bytes);
string str = new string(chars);
Assert.Equal(expected, str);
}
public void NegativeTestChars<T>(Encoding enc, byte[] bytes, string id) where T : Exception
{
Assert.Throws<T>(() =>
{
char[] chars = enc.GetChars(bytes);
string str = new string(chars);
});
}
public void NegativeTestChars2<T>(Encoding enc, byte[] bytes, int index, int count, string id) where T : Exception
{
Assert.Throws<T>(() =>
{
char[] chars = enc.GetChars(bytes, index, count);
string str = new string(chars);
});
}
public void NegativeTestChars3<T>(Encoding enc, byte[] bytes, int index, int count, char[] chars, int bIndex, string id)
where T : Exception
{
Assert.Throws<T>(() =>
{
int output = enc.GetChars(bytes, index, count, chars, bIndex);
string str = new string(chars);
});
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Hyak.Common.Internals;
using Microsoft.Azure;
using Microsoft.Azure.Management.ApiManagement;
using Microsoft.Azure.Management.ApiManagement.SmapiModels;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.ApiManagement
{
/// <summary>
/// Operations for managing Loggers.
/// </summary>
internal partial class LoggerOperations : IServiceOperations<ApiManagementClient>, ILoggerOperations
{
/// <summary>
/// Initializes a new instance of the LoggerOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal LoggerOperations(ApiManagementClient client)
{
this._client = client;
}
private ApiManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.ApiManagement.ApiManagementClient.
/// </summary>
public ApiManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates new logger.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='loggerid'>
/// Required. Identifier of the logger.
/// </param>
/// <param name='parameters'>
/// Required. Create parameters.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> CreateAsync(string resourceGroupName, string serviceName, string loggerid, LoggerCreateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serviceName == null)
{
throw new ArgumentNullException("serviceName");
}
if (loggerid == null)
{
throw new ArgumentNullException("loggerid");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.Credentials == null)
{
throw new ArgumentNullException("parameters.Credentials");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serviceName", serviceName);
tracingParameters.Add("loggerid", loggerid);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "CreateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.ApiManagement";
url = url + "/service/";
url = url + Uri.EscapeDataString(serviceName);
url = url + "/loggers/";
url = url + Uri.EscapeDataString(loggerid);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-10-10");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject loggerCreateParametersValue = new JObject();
requestDoc = loggerCreateParametersValue;
loggerCreateParametersValue["type"] = parameters.Type.ToString();
if (parameters.Description != null)
{
loggerCreateParametersValue["description"] = parameters.Description;
}
if (parameters.Credentials != null)
{
if (parameters.Credentials is ILazyCollection == false || ((ILazyCollection)parameters.Credentials).IsInitialized)
{
JObject credentialsDictionary = new JObject();
foreach (KeyValuePair<string, string> pair in parameters.Credentials)
{
string credentialsKey = pair.Key;
string credentialsValue = pair.Value;
credentialsDictionary[credentialsKey] = credentialsValue;
}
loggerCreateParametersValue["credentials"] = credentialsDictionary;
}
}
if (parameters.IsBuffered != null)
{
loggerCreateParametersValue["isBuffered"] = parameters.IsBuffered.Value;
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.Created && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Deletes specific logger of the Api Management service instance.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='loggerid'>
/// Required. Identifier of the logger.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> DeleteAsync(string resourceGroupName, string serviceName, string loggerid, string etag, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serviceName == null)
{
throw new ArgumentNullException("serviceName");
}
if (loggerid == null)
{
throw new ArgumentNullException("loggerid");
}
if (etag == null)
{
throw new ArgumentNullException("etag");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serviceName", serviceName);
tracingParameters.Add("loggerid", loggerid);
tracingParameters.Add("etag", etag);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.ApiManagement";
url = url + "/service/";
url = url + Uri.EscapeDataString(serviceName);
url = url + "/loggers/";
url = url + Uri.EscapeDataString(loggerid);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-10-10");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.TryAddWithoutValidation("If-Match", etag);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets specific logger.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='loggerid'>
/// Required. Identifier of the logger.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Get Logger operation response details.
/// </returns>
public async Task<LoggerGetResponse> GetAsync(string resourceGroupName, string serviceName, string loggerid, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serviceName == null)
{
throw new ArgumentNullException("serviceName");
}
if (loggerid == null)
{
throw new ArgumentNullException("loggerid");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serviceName", serviceName);
tracingParameters.Add("loggerid", loggerid);
TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.ApiManagement";
url = url + "/service/";
url = url + Uri.EscapeDataString(serviceName);
url = url + "/loggers/";
url = url + Uri.EscapeDataString(loggerid);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-10-10");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LoggerGetResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new LoggerGetResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
LoggerGetContract valueInstance = new LoggerGetContract();
result.Value = valueInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
valueInstance.IdPath = idInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
LoggerTypeContract typeInstance = ((LoggerTypeContract)Enum.Parse(typeof(LoggerTypeContract), ((string)typeValue), true));
valueInstance.Type = typeInstance;
}
JToken descriptionValue = responseDoc["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
valueInstance.Description = descriptionInstance;
}
JToken isBufferedValue = responseDoc["isBuffered"];
if (isBufferedValue != null && isBufferedValue.Type != JTokenType.Null)
{
bool isBufferedInstance = ((bool)isBufferedValue);
valueInstance.IsBuffered = isBufferedInstance;
}
JToken credentialsSequenceElement = ((JToken)responseDoc["credentials"]);
if (credentialsSequenceElement != null && credentialsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in credentialsSequenceElement)
{
string credentialsKey = ((string)property.Name);
string credentialsValue = ((string)property.Value);
valueInstance.Credentials.Add(credentialsKey, credentialsValue);
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("ETag"))
{
result.ETag = httpResponse.Headers.GetValues("ETag").FirstOrDefault();
}
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// List all loggers.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='query'>
/// Optional.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// List Logger operation response details.
/// </returns>
public async Task<LoggerListResponse> ListAsync(string resourceGroupName, string serviceName, QueryParameters query, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serviceName == null)
{
throw new ArgumentNullException("serviceName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serviceName", serviceName);
tracingParameters.Add("query", query);
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.ApiManagement";
url = url + "/service/";
url = url + Uri.EscapeDataString(serviceName);
url = url + "/loggers";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-10-10");
List<string> odataFilter = new List<string>();
if (query != null && query.Filter != null)
{
odataFilter.Add(Uri.EscapeDataString(query.Filter));
}
if (odataFilter.Count > 0)
{
queryParameters.Add("$filter=" + string.Join(null, odataFilter));
}
if (query != null && query.Top != null)
{
queryParameters.Add("$top=" + Uri.EscapeDataString(query.Top.Value.ToString()));
}
if (query != null && query.Skip != null)
{
queryParameters.Add("$skip=" + Uri.EscapeDataString(query.Skip.Value.ToString()));
}
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LoggerListResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new LoggerListResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
LoggerPaged resultInstance = new LoggerPaged();
result.Result = resultInstance;
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
LoggerGetContract loggerGetContractInstance = new LoggerGetContract();
resultInstance.Values.Add(loggerGetContractInstance);
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
loggerGetContractInstance.IdPath = idInstance;
}
JToken typeValue = valueValue["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
LoggerTypeContract typeInstance = ((LoggerTypeContract)Enum.Parse(typeof(LoggerTypeContract), ((string)typeValue), true));
loggerGetContractInstance.Type = typeInstance;
}
JToken descriptionValue = valueValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
loggerGetContractInstance.Description = descriptionInstance;
}
JToken isBufferedValue = valueValue["isBuffered"];
if (isBufferedValue != null && isBufferedValue.Type != JTokenType.Null)
{
bool isBufferedInstance = ((bool)isBufferedValue);
loggerGetContractInstance.IsBuffered = isBufferedInstance;
}
JToken credentialsSequenceElement = ((JToken)valueValue["credentials"]);
if (credentialsSequenceElement != null && credentialsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in credentialsSequenceElement)
{
string credentialsKey = ((string)property.Name);
string credentialsValue = ((string)property.Value);
loggerGetContractInstance.Credentials.Add(credentialsKey, credentialsValue);
}
}
}
}
JToken countValue = responseDoc["count"];
if (countValue != null && countValue.Type != JTokenType.Null)
{
long countInstance = ((long)countValue);
resultInstance.TotalCount = countInstance;
}
JToken nextLinkValue = responseDoc["nextLink"];
if (nextLinkValue != null && nextLinkValue.Type != JTokenType.Null)
{
string nextLinkInstance = ((string)nextLinkValue);
resultInstance.NextLink = nextLinkInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// List next logger page.
/// </summary>
/// <param name='nextLink'>
/// Required. NextLink from the previous successful call to List
/// operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// List Logger operation response details.
/// </returns>
public async Task<LoggerListResponse> ListNextAsync(string nextLink, CancellationToken cancellationToken)
{
// Validate
if (nextLink == null)
{
throw new ArgumentNullException("nextLink");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextLink", nextLink);
TracingAdapter.Enter(invocationId, this, "ListNextAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + nextLink;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LoggerListResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new LoggerListResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
LoggerPaged resultInstance = new LoggerPaged();
result.Result = resultInstance;
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
LoggerGetContract loggerGetContractInstance = new LoggerGetContract();
resultInstance.Values.Add(loggerGetContractInstance);
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
loggerGetContractInstance.IdPath = idInstance;
}
JToken typeValue = valueValue["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
LoggerTypeContract typeInstance = ((LoggerTypeContract)Enum.Parse(typeof(LoggerTypeContract), ((string)typeValue), true));
loggerGetContractInstance.Type = typeInstance;
}
JToken descriptionValue = valueValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
loggerGetContractInstance.Description = descriptionInstance;
}
JToken isBufferedValue = valueValue["isBuffered"];
if (isBufferedValue != null && isBufferedValue.Type != JTokenType.Null)
{
bool isBufferedInstance = ((bool)isBufferedValue);
loggerGetContractInstance.IsBuffered = isBufferedInstance;
}
JToken credentialsSequenceElement = ((JToken)valueValue["credentials"]);
if (credentialsSequenceElement != null && credentialsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in credentialsSequenceElement)
{
string credentialsKey = ((string)property.Name);
string credentialsValue = ((string)property.Value);
loggerGetContractInstance.Credentials.Add(credentialsKey, credentialsValue);
}
}
}
}
JToken countValue = responseDoc["count"];
if (countValue != null && countValue.Type != JTokenType.Null)
{
long countInstance = ((long)countValue);
resultInstance.TotalCount = countInstance;
}
JToken nextLinkValue = responseDoc["nextLink"];
if (nextLinkValue != null && nextLinkValue.Type != JTokenType.Null)
{
string nextLinkInstance = ((string)nextLinkValue);
resultInstance.NextLink = nextLinkInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Patches specific logger.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='loggerid'>
/// Required. Identifier of the logger.
/// </param>
/// <param name='parameters'>
/// Required. Update parameters.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> UpdateAsync(string resourceGroupName, string serviceName, string loggerid, LoggerUpdateParameters parameters, string etag, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serviceName == null)
{
throw new ArgumentNullException("serviceName");
}
if (loggerid == null)
{
throw new ArgumentNullException("loggerid");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (etag == null)
{
throw new ArgumentNullException("etag");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serviceName", serviceName);
tracingParameters.Add("loggerid", loggerid);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("etag", etag);
TracingAdapter.Enter(invocationId, this, "UpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
url = url + "Microsoft.ApiManagement";
url = url + "/service/";
url = url + Uri.EscapeDataString(serviceName);
url = url + "/loggers/";
url = url + Uri.EscapeDataString(loggerid);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-10-10");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = new HttpMethod("PATCH");
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.TryAddWithoutValidation("If-Match", etag);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject loggerUpdateParametersValue = new JObject();
requestDoc = loggerUpdateParametersValue;
loggerUpdateParametersValue["type"] = parameters.Type.ToString();
if (parameters.Description != null)
{
loggerUpdateParametersValue["description"] = parameters.Description;
}
if (parameters.Credentials != null)
{
if (parameters.Credentials is ILazyCollection == false || ((ILazyCollection)parameters.Credentials).IsInitialized)
{
JObject credentialsDictionary = new JObject();
foreach (KeyValuePair<string, string> pair in parameters.Credentials)
{
string credentialsKey = pair.Key;
string credentialsValue = pair.Value;
credentialsDictionary[credentialsKey] = credentialsValue;
}
loggerUpdateParametersValue["credentials"] = credentialsDictionary;
}
}
if (parameters.IsBuffered != null)
{
loggerUpdateParametersValue["isBuffered"] = parameters.IsBuffered.Value;
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
AzureOperationResponse result = null;
// Deserialize Response
result = new AzureOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Diagnostics.Log;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.Options;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Versions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Diagnostics.EngineV1
{
internal partial class DiagnosticIncrementalAnalyzer : BaseDiagnosticIncrementalAnalyzer
{
private readonly int _correlationId;
private readonly MemberRangeMap _memberRangeMap;
private readonly AnalyzerExecutor _executor;
private readonly StateManager _stateManager;
/// <summary>
/// PERF: Always run analyzers sequentially for background analysis.
/// </summary>
private const bool ConcurrentAnalysis = false;
/// <summary>
/// Always compute suppressed diagnostics - diagnostic clients may or may not request for suppressed diagnostics.
/// </summary>
private const bool ReportSuppressedDiagnostics = true;
public DiagnosticIncrementalAnalyzer(
DiagnosticAnalyzerService owner,
int correlationId,
Workspace workspace,
HostAnalyzerManager analyzerManager,
AbstractHostDiagnosticUpdateSource hostDiagnosticUpdateSource)
: base(owner, workspace, analyzerManager, hostDiagnosticUpdateSource)
{
_correlationId = correlationId;
_memberRangeMap = new MemberRangeMap();
_executor = new AnalyzerExecutor(this);
_stateManager = new StateManager(analyzerManager);
_stateManager.ProjectAnalyzerReferenceChanged += OnProjectAnalyzerReferenceChanged;
}
private void OnProjectAnalyzerReferenceChanged(object sender, ProjectAnalyzerReferenceChangedEventArgs e)
{
if (e.Removed.Length == 0)
{
// nothing to refresh
return;
}
// events will be automatically serialized.
ClearProjectStatesAsync(e.Project, e.Removed, CancellationToken.None);
}
public override Task DocumentOpenAsync(Document document, CancellationToken cancellationToken)
{
using (Logger.LogBlock(FunctionId.Diagnostics_DocumentOpen, GetOpenLogMessage, document, cancellationToken))
{
return ClearOnlyDocumentStates(document, raiseEvent: true, cancellationToken: cancellationToken);
}
}
public override Task DocumentCloseAsync(Document document, CancellationToken cancellationToken)
{
using (Logger.LogBlock(FunctionId.Diagnostics_DocumentClose, GetResetLogMessage, document, cancellationToken))
{
// we don't need the info for closed file
_memberRangeMap.Remove(document.Id);
return ClearOnlyDocumentStates(document, raiseEvent: true, cancellationToken: cancellationToken);
}
}
public override Task DocumentResetAsync(Document document, CancellationToken cancellationToken)
{
using (Logger.LogBlock(FunctionId.Diagnostics_DocumentReset, GetResetLogMessage, document, cancellationToken))
{
// clear states for re-analysis and raise events about it. otherwise, some states might not updated on re-analysis
// due to our build-live de-duplication logic where we put all state in Documents state.
return ClearOnlyDocumentStates(document, raiseEvent: true, cancellationToken: cancellationToken);
}
}
private Task ClearOnlyDocumentStates(Document document, bool raiseEvent, CancellationToken cancellationToken)
{
// we remove whatever information we used to have on document open/close and re-calculate diagnostics
// we had to do this since some diagnostic analyzer changes its behavior based on whether the document is opened or not.
// so we can't use cached information.
ClearDocumentStates(document, _stateManager.GetStateSets(document.Project), raiseEvent, includeProjectState: false, cancellationToken: cancellationToken);
return SpecializedTasks.EmptyTask;
}
private bool CheckOption(Workspace workspace, string language, bool forceAnalysis)
{
if (workspace.Options.GetOption(ServiceFeatureOnOffOptions.ClosedFileDiagnostic, language) &&
workspace.Options.GetOption(RuntimeOptions.FullSolutionAnalysis))
{
return true;
}
if (forceAnalysis)
{
return true;
}
return false;
}
internal CompilationWithAnalyzers GetCompilationWithAnalyzers(Project project, Compilation compilation, bool concurrentAnalysis, bool reportSuppressedDiagnostics)
{
Contract.ThrowIfFalse(project.SupportsCompilation);
Contract.ThrowIfNull(compilation);
Func<Exception, bool> analyzerExceptionFilter = ex =>
{
if (project.Solution.Workspace.Options.GetOption(InternalDiagnosticsOptions.CrashOnAnalyzerException))
{
// if option is on, crash the host to get crash dump.
FatalError.ReportUnlessCanceled(ex);
}
return true;
};
var analysisOptions = new CompilationWithAnalyzersOptions(
new WorkspaceAnalyzerOptions(project.AnalyzerOptions, project.Solution.Workspace),
GetOnAnalyzerException(project.Id),
analyzerExceptionFilter,
concurrentAnalysis,
logAnalyzerExecutionTime: true,
reportSuppressedDiagnostics: reportSuppressedDiagnostics);
var analyzers = _stateManager.GetAnalyzers(project);
var filteredAnalyzers = analyzers
.Where(a => !CompilationWithAnalyzers.IsDiagnosticAnalyzerSuppressed(a, compilation.Options, analysisOptions.OnAnalyzerException))
.Distinct()
.ToImmutableArray();
if (filteredAnalyzers.IsEmpty)
{
return null;
}
return new CompilationWithAnalyzers(compilation, filteredAnalyzers, analysisOptions);
}
public override async Task AnalyzeSyntaxAsync(Document document, CancellationToken cancellationToken)
{
await AnalyzeSyntaxAsync(document, diagnosticIds: null, skipClosedFileChecks: false, cancellationToken: cancellationToken).ConfigureAwait(false);
}
private async Task AnalyzeSyntaxAsync(Document document, ImmutableHashSet<string> diagnosticIds, bool skipClosedFileChecks, CancellationToken cancellationToken)
{
try
{
if (!skipClosedFileChecks && !CheckOption(document.Project.Solution.Workspace, document.Project.Language, document.IsOpen()))
{
return;
}
var textVersion = await document.GetTextVersionAsync(cancellationToken).ConfigureAwait(false);
var dataVersion = await document.GetSyntaxVersionAsync(cancellationToken).ConfigureAwait(false);
var versions = new VersionArgument(textVersion, dataVersion);
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var fullSpan = root == null ? null : (TextSpan?)root.FullSpan;
var userDiagnosticDriver = new DiagnosticAnalyzerDriver(document, fullSpan, root, this, ConcurrentAnalysis, ReportSuppressedDiagnostics, cancellationToken);
var openedDocument = document.IsOpen();
foreach (var stateSet in _stateManager.GetOrUpdateStateSets(document.Project))
{
if (SkipRunningAnalyzer(document.Project.CompilationOptions, userDiagnosticDriver, openedDocument, skipClosedFileChecks, stateSet))
{
await ClearExistingDiagnostics(document, stateSet, StateType.Syntax, cancellationToken).ConfigureAwait(false);
continue;
}
if (ShouldRunAnalyzerForStateType(stateSet.Analyzer, StateType.Syntax, diagnosticIds))
{
var data = await _executor.GetSyntaxAnalysisDataAsync(userDiagnosticDriver, stateSet, versions).ConfigureAwait(false);
if (data.FromCache)
{
RaiseDiagnosticsCreatedFromCacheIfNeeded(StateType.Syntax, document, stateSet, data.Items);
continue;
}
var state = stateSet.GetState(StateType.Syntax);
await state.PersistAsync(document, data.ToPersistData(), cancellationToken).ConfigureAwait(false);
RaiseDocumentDiagnosticsUpdatedIfNeeded(StateType.Syntax, document, stateSet, data.OldItems, data.Items);
}
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
public override async Task AnalyzeDocumentAsync(Document document, SyntaxNode bodyOpt, CancellationToken cancellationToken)
{
await AnalyzeDocumentAsync(document, bodyOpt, diagnosticIds: null, skipClosedFileChecks: false, cancellationToken: cancellationToken).ConfigureAwait(false);
}
private async Task AnalyzeDocumentAsync(Document document, SyntaxNode bodyOpt, ImmutableHashSet<string> diagnosticIds, bool skipClosedFileChecks, CancellationToken cancellationToken)
{
try
{
if (!skipClosedFileChecks && !CheckOption(document.Project.Solution.Workspace, document.Project.Language, document.IsOpen()))
{
return;
}
var textVersion = await document.GetTextVersionAsync(cancellationToken).ConfigureAwait(false);
var projectVersion = await document.Project.GetDependentVersionAsync(cancellationToken).ConfigureAwait(false);
var dataVersion = await document.Project.GetDependentSemanticVersionAsync(cancellationToken).ConfigureAwait(false);
var versions = new VersionArgument(textVersion, dataVersion, projectVersion);
if (bodyOpt == null)
{
await AnalyzeDocumentAsync(document, versions, diagnosticIds, skipClosedFileChecks, cancellationToken).ConfigureAwait(false);
}
else
{
// only open file can go this route
await AnalyzeBodyDocumentAsync(document, bodyOpt, versions, cancellationToken).ConfigureAwait(false);
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private async Task AnalyzeBodyDocumentAsync(Document document, SyntaxNode member, VersionArgument versions, CancellationToken cancellationToken)
{
try
{
// syntax facts service must exist, otherwise, this method won't have called.
var syntaxFacts = document.Project.LanguageServices.GetService<ISyntaxFactsService>();
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var memberId = syntaxFacts.GetMethodLevelMemberId(root, member);
var spanBasedDriver = new DiagnosticAnalyzerDriver(document, member.FullSpan, root, this, ConcurrentAnalysis, ReportSuppressedDiagnostics, cancellationToken);
var documentBasedDriver = new DiagnosticAnalyzerDriver(document, root.FullSpan, root, this, ConcurrentAnalysis, ReportSuppressedDiagnostics, cancellationToken);
foreach (var stateSet in _stateManager.GetOrUpdateStateSets(document.Project))
{
if (Owner.IsAnalyzerSuppressed(stateSet.Analyzer, document.Project))
{
await ClearExistingDiagnostics(document, stateSet, StateType.Document, cancellationToken).ConfigureAwait(false);
continue;
}
if (ShouldRunAnalyzerForStateType(stateSet.Analyzer, StateType.Document))
{
var supportsSemanticInSpan = stateSet.Analyzer.SupportsSpanBasedSemanticDiagnosticAnalysis();
var userDiagnosticDriver = supportsSemanticInSpan ? spanBasedDriver : documentBasedDriver;
var ranges = _memberRangeMap.GetSavedMemberRange(stateSet.Analyzer, document);
var data = await _executor.GetDocumentBodyAnalysisDataAsync(
stateSet, versions, userDiagnosticDriver, root, member, memberId, supportsSemanticInSpan, ranges).ConfigureAwait(false);
_memberRangeMap.UpdateMemberRange(stateSet.Analyzer, document, versions.TextVersion, memberId, member.FullSpan, ranges);
var state = stateSet.GetState(StateType.Document);
await state.PersistAsync(document, data.ToPersistData(), cancellationToken).ConfigureAwait(false);
if (data.FromCache)
{
RaiseDiagnosticsCreatedFromCacheIfNeeded(StateType.Document, document, stateSet, data.Items);
continue;
}
RaiseDocumentDiagnosticsUpdatedIfNeeded(StateType.Document, document, stateSet, data.OldItems, data.Items);
}
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private async Task AnalyzeDocumentAsync(Document document, VersionArgument versions, ImmutableHashSet<string> diagnosticIds, bool skipClosedFileChecks, CancellationToken cancellationToken)
{
try
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var fullSpan = root == null ? null : (TextSpan?)root.FullSpan;
var userDiagnosticDriver = new DiagnosticAnalyzerDriver(document, fullSpan, root, this, ConcurrentAnalysis, ReportSuppressedDiagnostics, cancellationToken);
bool openedDocument = document.IsOpen();
foreach (var stateSet in _stateManager.GetOrUpdateStateSets(document.Project))
{
if (SkipRunningAnalyzer(document.Project.CompilationOptions, userDiagnosticDriver, openedDocument, skipClosedFileChecks, stateSet))
{
await ClearExistingDiagnostics(document, stateSet, StateType.Document, cancellationToken).ConfigureAwait(false);
continue;
}
if (ShouldRunAnalyzerForStateType(stateSet.Analyzer, StateType.Document, diagnosticIds))
{
var data = await _executor.GetDocumentAnalysisDataAsync(userDiagnosticDriver, stateSet, versions).ConfigureAwait(false);
if (data.FromCache)
{
RaiseDiagnosticsCreatedFromCacheIfNeeded(StateType.Document, document, stateSet, data.Items);
continue;
}
if (openedDocument)
{
_memberRangeMap.Touch(stateSet.Analyzer, document, versions.TextVersion);
}
var state = stateSet.GetState(StateType.Document);
await state.PersistAsync(document, data.ToPersistData(), cancellationToken).ConfigureAwait(false);
RaiseDocumentDiagnosticsUpdatedIfNeeded(StateType.Document, document, stateSet, data.OldItems, data.Items);
}
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
public override async Task AnalyzeProjectAsync(Project project, bool semanticsChanged, CancellationToken cancellationToken)
{
await AnalyzeProjectAsync(project, cancellationToken).ConfigureAwait(false);
}
private async Task AnalyzeProjectAsync(Project project, CancellationToken cancellationToken)
{
try
{
// Compilation actions can report diagnostics on open files, so "documentOpened = true"
if (!CheckOption(project.Solution.Workspace, project.Language, forceAnalysis: false))
{
return;
}
var projectTextVersion = await project.GetLatestDocumentVersionAsync(cancellationToken).ConfigureAwait(false);
var semanticVersion = await project.GetDependentSemanticVersionAsync(cancellationToken).ConfigureAwait(false);
var projectVersion = await project.GetDependentVersionAsync(cancellationToken).ConfigureAwait(false);
var analyzerDriver = new DiagnosticAnalyzerDriver(project, this, ConcurrentAnalysis, ReportSuppressedDiagnostics, cancellationToken);
var versions = new VersionArgument(projectTextVersion, semanticVersion, projectVersion);
foreach (var stateSet in _stateManager.GetOrUpdateStateSets(project))
{
// Compilation actions can report diagnostics on open files, so we skipClosedFileChecks.
if (SkipRunningAnalyzer(project.CompilationOptions, analyzerDriver, openedDocument: true, skipClosedFileChecks: true, stateSet: stateSet))
{
await ClearExistingDiagnostics(project, stateSet, cancellationToken).ConfigureAwait(false);
continue;
}
if (ShouldRunAnalyzerForStateType(stateSet.Analyzer, StateType.Project, diagnosticIds: null))
{
var data = await _executor.GetProjectAnalysisDataAsync(analyzerDriver, stateSet, versions).ConfigureAwait(false);
if (data.FromCache)
{
RaiseProjectDiagnosticsUpdatedIfNeeded(project, stateSet, ImmutableArray<DiagnosticData>.Empty, data.Items);
continue;
}
var state = stateSet.GetState(StateType.Project);
await PersistProjectData(project, state, data).ConfigureAwait(false);
RaiseProjectDiagnosticsUpdatedIfNeeded(project, stateSet, data.OldItems, data.Items);
}
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private bool SkipRunningAnalyzer(
CompilationOptions compilationOptions,
DiagnosticAnalyzerDriver userDiagnosticDriver,
bool openedDocument,
bool skipClosedFileChecks,
StateSet stateSet)
{
if (Owner.IsAnalyzerSuppressed(stateSet.Analyzer, userDiagnosticDriver.Project))
{
return true;
}
if (skipClosedFileChecks)
{
return false;
}
if (ShouldRunAnalyzerForClosedFile(compilationOptions, openedDocument, stateSet.Analyzer))
{
return false;
}
return true;
}
private static async Task PersistProjectData(Project project, DiagnosticState state, AnalysisData data)
{
// TODO: Cancellation is not allowed here to prevent data inconsistency. But there is still a possibility of data inconsistency due to
// things like exception. For now, I am letting it go and let v2 engine take care of it properly. If v2 doesn't come online soon enough
// more refactoring is required on project state.
// clear all existing data
state.Remove(project.Id);
foreach (var document in project.Documents)
{
state.Remove(document.Id);
}
// quick bail out
if (data.Items.Length == 0)
{
return;
}
// save new data
var group = data.Items.GroupBy(d => d.DocumentId);
foreach (var kv in group)
{
if (kv.Key == null)
{
// save project scope diagnostics
await state.PersistAsync(project, new AnalysisData(data.TextVersion, data.DataVersion, kv.ToImmutableArrayOrEmpty()), CancellationToken.None).ConfigureAwait(false);
continue;
}
// save document scope diagnostics
var document = project.GetDocument(kv.Key);
if (document == null)
{
continue;
}
await state.PersistAsync(document, new AnalysisData(data.TextVersion, data.DataVersion, kv.ToImmutableArrayOrEmpty()), CancellationToken.None).ConfigureAwait(false);
}
}
public override void RemoveDocument(DocumentId documentId)
{
using (Logger.LogBlock(FunctionId.Diagnostics_RemoveDocument, GetRemoveLogMessage, documentId, CancellationToken.None))
{
_memberRangeMap.Remove(documentId);
foreach (var stateSet in _stateManager.GetStateSets(documentId.ProjectId))
{
stateSet.Remove(documentId);
var solutionArgs = new SolutionArgument(null, documentId.ProjectId, documentId);
for (var stateType = 0; stateType < s_stateTypeCount; stateType++)
{
RaiseDiagnosticsRemoved((StateType)stateType, documentId, stateSet, solutionArgs);
}
}
}
}
public override void RemoveProject(ProjectId projectId)
{
using (Logger.LogBlock(FunctionId.Diagnostics_RemoveProject, GetRemoveLogMessage, projectId, CancellationToken.None))
{
foreach (var stateSet in _stateManager.GetStateSets(projectId))
{
stateSet.Remove(projectId);
var solutionArgs = new SolutionArgument(null, projectId, null);
RaiseDiagnosticsRemoved(StateType.Project, projectId, stateSet, solutionArgs);
}
}
_stateManager.RemoveStateSet(projectId);
}
public override async Task<bool> TryAppendDiagnosticsForSpanAsync(Document document, TextSpan range, List<DiagnosticData> diagnostics, bool includeSuppressedDiagnostics = false, CancellationToken cancellationToken = default(CancellationToken))
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var getter = new LatestDiagnosticsForSpanGetter(this, document, root, range, blockForData: false, diagnostics: diagnostics, includeSuppressedDiagnostics: includeSuppressedDiagnostics, cancellationToken: cancellationToken);
return await getter.TryGetAsync().ConfigureAwait(false);
}
public override async Task<IEnumerable<DiagnosticData>> GetDiagnosticsForSpanAsync(Document document, TextSpan range, bool includeSuppressedDiagnostics = false, CancellationToken cancellationToken = default(CancellationToken))
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var getter = new LatestDiagnosticsForSpanGetter(this, document, root, range, blockForData: true, includeSuppressedDiagnostics: includeSuppressedDiagnostics, cancellationToken: cancellationToken);
var result = await getter.TryGetAsync().ConfigureAwait(false);
Contract.Requires(result);
return getter.Diagnostics;
}
private bool ShouldRunAnalyzerForClosedFile(CompilationOptions options, bool openedDocument, DiagnosticAnalyzer analyzer)
{
// we have opened document, doesn't matter
if (openedDocument || analyzer.IsCompilerAnalyzer())
{
return true;
}
// PERF: Don't query descriptors for compiler analyzer, always execute it.
if (analyzer.IsCompilerAnalyzer())
{
return true;
}
return Owner.GetDiagnosticDescriptors(analyzer).Any(d => GetEffectiveSeverity(d, options) != ReportDiagnostic.Hidden);
}
private static ReportDiagnostic GetEffectiveSeverity(DiagnosticDescriptor descriptor, CompilationOptions options)
{
return options == null
? MapSeverityToReport(descriptor.DefaultSeverity)
: descriptor.GetEffectiveSeverity(options);
}
private static ReportDiagnostic MapSeverityToReport(DiagnosticSeverity severity)
{
switch (severity)
{
case DiagnosticSeverity.Hidden:
return ReportDiagnostic.Hidden;
case DiagnosticSeverity.Info:
return ReportDiagnostic.Info;
case DiagnosticSeverity.Warning:
return ReportDiagnostic.Warn;
case DiagnosticSeverity.Error:
return ReportDiagnostic.Error;
default:
throw ExceptionUtilities.Unreachable;
}
}
private bool ShouldRunAnalyzerForStateType(DiagnosticAnalyzer analyzer, StateType stateTypeId, ImmutableHashSet<string> diagnosticIds)
{
return ShouldRunAnalyzerForStateType(analyzer, stateTypeId, diagnosticIds, Owner.GetDiagnosticDescriptors);
}
private static bool ShouldRunAnalyzerForStateType(DiagnosticAnalyzer analyzer, StateType stateTypeId,
ImmutableHashSet<string> diagnosticIds = null, Func<DiagnosticAnalyzer, ImmutableArray<DiagnosticDescriptor>> getDescriptors = null)
{
// PERF: Don't query descriptors for compiler analyzer, always execute it for all state types.
if (analyzer.IsCompilerAnalyzer())
{
return true;
}
if (diagnosticIds != null && getDescriptors(analyzer).All(d => !diagnosticIds.Contains(d.Id)))
{
return false;
}
switch (stateTypeId)
{
case StateType.Syntax:
return analyzer.SupportsSyntaxDiagnosticAnalysis();
case StateType.Document:
return analyzer.SupportsSemanticDiagnosticAnalysis();
case StateType.Project:
return analyzer.SupportsProjectDiagnosticAnalysis();
default:
throw ExceptionUtilities.Unreachable;
}
}
public override void LogAnalyzerCountSummary()
{
DiagnosticAnalyzerLogger.LogAnalyzerCrashCountSummary(_correlationId, DiagnosticLogAggregator);
DiagnosticAnalyzerLogger.LogAnalyzerTypeCountSummary(_correlationId, DiagnosticLogAggregator);
// reset the log aggregator
ResetDiagnosticLogAggregator();
}
private static bool CheckSyntaxVersions(Document document, AnalysisData existingData, VersionArgument versions)
{
if (existingData == null)
{
return false;
}
return document.CanReusePersistedTextVersion(versions.TextVersion, existingData.TextVersion) &&
document.CanReusePersistedSyntaxTreeVersion(versions.DataVersion, existingData.DataVersion);
}
private static bool CheckSemanticVersions(Document document, AnalysisData existingData, VersionArgument versions)
{
if (existingData == null)
{
return false;
}
return document.CanReusePersistedTextVersion(versions.TextVersion, existingData.TextVersion) &&
document.Project.CanReusePersistedDependentSemanticVersion(versions.ProjectVersion, versions.DataVersion, existingData.DataVersion);
}
private static bool CheckSemanticVersions(Project project, AnalysisData existingData, VersionArgument versions)
{
if (existingData == null)
{
return false;
}
return VersionStamp.CanReusePersistedVersion(versions.TextVersion, existingData.TextVersion) &&
project.CanReusePersistedDependentSemanticVersion(versions.ProjectVersion, versions.DataVersion, existingData.DataVersion);
}
private void RaiseDiagnosticsCreatedFromCacheIfNeeded(StateType type, Document document, StateSet stateSet, ImmutableArray<DiagnosticData> items)
{
RaiseDocumentDiagnosticsUpdatedIfNeeded(type, document, stateSet, ImmutableArray<DiagnosticData>.Empty, items);
}
private void RaiseDocumentDiagnosticsUpdatedIfNeeded(
StateType type, Document document, StateSet stateSet, ImmutableArray<DiagnosticData> existingItems, ImmutableArray<DiagnosticData> newItems)
{
var noItems = existingItems.Length == 0 && newItems.Length == 0;
if (noItems)
{
return;
}
RaiseDiagnosticsCreated(type, document.Id, stateSet, new SolutionArgument(document), newItems);
}
private void RaiseProjectDiagnosticsUpdatedIfNeeded(
Project project, StateSet stateSet, ImmutableArray<DiagnosticData> existingItems, ImmutableArray<DiagnosticData> newItems)
{
var noItems = existingItems.Length == 0 && newItems.Length == 0;
if (noItems)
{
return;
}
RaiseProjectDiagnosticsRemovedIfNeeded(project, stateSet, existingItems, newItems);
RaiseProjectDiagnosticsUpdated(project, stateSet, newItems);
}
private void RaiseProjectDiagnosticsRemovedIfNeeded(
Project project, StateSet stateSet, ImmutableArray<DiagnosticData> existingItems, ImmutableArray<DiagnosticData> newItems)
{
if (existingItems.Length == 0)
{
return;
}
var removedItems = existingItems.GroupBy(d => d.DocumentId).Select(g => g.Key).Except(newItems.GroupBy(d => d.DocumentId).Select(g => g.Key));
foreach (var documentId in removedItems)
{
if (documentId == null)
{
RaiseDiagnosticsRemoved(StateType.Project, project.Id, stateSet, new SolutionArgument(project));
continue;
}
var document = project.GetDocument(documentId);
var argument = documentId == null ? new SolutionArgument(null, documentId.ProjectId, documentId) : new SolutionArgument(document);
RaiseDiagnosticsRemoved(StateType.Project, documentId, stateSet, argument);
}
}
private void RaiseProjectDiagnosticsUpdated(Project project, StateSet stateSet, ImmutableArray<DiagnosticData> diagnostics)
{
var group = diagnostics.GroupBy(d => d.DocumentId);
foreach (var kv in group)
{
if (kv.Key == null)
{
RaiseDiagnosticsCreated(StateType.Project, project.Id, stateSet, new SolutionArgument(project), kv.ToImmutableArrayOrEmpty());
continue;
}
RaiseDiagnosticsCreated(StateType.Project, kv.Key, stateSet, new SolutionArgument(project.GetDocument(kv.Key)), kv.ToImmutableArrayOrEmpty());
}
}
private static ImmutableArray<DiagnosticData> GetDiagnosticData(ILookup<DocumentId, DiagnosticData> lookup, DocumentId documentId)
{
return lookup.Contains(documentId) ? lookup[documentId].ToImmutableArrayOrEmpty() : ImmutableArray<DiagnosticData>.Empty;
}
private void RaiseDiagnosticsCreated(
StateType type, object key, StateSet stateSet, SolutionArgument solution, ImmutableArray<DiagnosticData> diagnostics)
{
if (Owner == null)
{
return;
}
// get right arg id for the given analyzer
var id = CreateArgumentKey(type, key, stateSet);
Owner.RaiseDiagnosticsUpdated(this,
DiagnosticsUpdatedArgs.DiagnosticsCreated(id, Workspace, solution.Solution, solution.ProjectId, solution.DocumentId, diagnostics));
}
private static ArgumentKey CreateArgumentKey(StateType type, object key, StateSet stateSet)
{
return stateSet.ErrorSourceName != null
? new HostAnalyzerKey(stateSet.Analyzer, type, key, stateSet.ErrorSourceName)
: new ArgumentKey(stateSet.Analyzer, type, key);
}
private void RaiseDiagnosticsRemoved(
StateType type, object key, StateSet stateSet, SolutionArgument solution)
{
if (Owner == null)
{
return;
}
// get right arg id for the given analyzer
var id = CreateArgumentKey(type, key, stateSet);
Owner.RaiseDiagnosticsUpdated(this,
DiagnosticsUpdatedArgs.DiagnosticsRemoved(id, Workspace, solution.Solution, solution.ProjectId, solution.DocumentId));
}
private ImmutableArray<DiagnosticData> UpdateDocumentDiagnostics(
AnalysisData existingData, ImmutableArray<TextSpan> range, ImmutableArray<DiagnosticData> memberDiagnostics,
SyntaxTree tree, SyntaxNode member, int memberId)
{
// get old span
var oldSpan = range[memberId];
// get old diagnostics
var diagnostics = existingData.Items;
// check quick exit cases
if (diagnostics.Length == 0 && memberDiagnostics.Length == 0)
{
return diagnostics;
}
// simple case
if (diagnostics.Length == 0 && memberDiagnostics.Length > 0)
{
return memberDiagnostics;
}
// regular case
var result = new List<DiagnosticData>();
// update member location
Contract.Requires(member.FullSpan.Start == oldSpan.Start);
var delta = member.FullSpan.End - oldSpan.End;
var replaced = false;
foreach (var diagnostic in diagnostics)
{
if (diagnostic.TextSpan.Start < oldSpan.Start)
{
result.Add(diagnostic);
continue;
}
if (!replaced)
{
result.AddRange(memberDiagnostics);
replaced = true;
}
if (oldSpan.End <= diagnostic.TextSpan.Start)
{
result.Add(UpdatePosition(diagnostic, tree, delta));
continue;
}
}
// if it haven't replaced, replace it now
if (!replaced)
{
result.AddRange(memberDiagnostics);
replaced = true;
}
return result.ToImmutableArray();
}
private DiagnosticData UpdatePosition(DiagnosticData diagnostic, SyntaxTree tree, int delta)
{
var start = Math.Min(Math.Max(diagnostic.TextSpan.Start + delta, 0), tree.Length);
var newSpan = new TextSpan(start, start >= tree.Length ? 0 : diagnostic.TextSpan.Length);
var mappedLineInfo = tree.GetMappedLineSpan(newSpan);
var originalLineInfo = tree.GetLineSpan(newSpan);
return new DiagnosticData(
diagnostic.Id,
diagnostic.Category,
diagnostic.Message,
diagnostic.ENUMessageForBingSearch,
diagnostic.Severity,
diagnostic.DefaultSeverity,
diagnostic.IsEnabledByDefault,
diagnostic.WarningLevel,
diagnostic.CustomTags,
diagnostic.Properties,
diagnostic.Workspace,
diagnostic.ProjectId,
new DiagnosticDataLocation(diagnostic.DocumentId, newSpan,
originalFilePath: originalLineInfo.Path,
originalStartLine: originalLineInfo.StartLinePosition.Line,
originalStartColumn: originalLineInfo.StartLinePosition.Character,
originalEndLine: originalLineInfo.EndLinePosition.Line,
originalEndColumn: originalLineInfo.EndLinePosition.Character,
mappedFilePath: mappedLineInfo.GetMappedFilePathIfExist(),
mappedStartLine: mappedLineInfo.StartLinePosition.Line,
mappedStartColumn: mappedLineInfo.StartLinePosition.Character,
mappedEndLine: mappedLineInfo.EndLinePosition.Line,
mappedEndColumn: mappedLineInfo.EndLinePosition.Character),
description: diagnostic.Description,
helpLink: diagnostic.HelpLink,
isSuppressed: diagnostic.IsSuppressed);
}
private static IEnumerable<DiagnosticData> GetDiagnosticData(Document document, SyntaxTree tree, TextSpan? span, IEnumerable<Diagnostic> diagnostics)
{
return diagnostics != null ? diagnostics.Where(dx => ShouldIncludeDiagnostic(dx, tree, span)).Select(d => DiagnosticData.Create(document, d)) : null;
}
private static bool ShouldIncludeDiagnostic(Diagnostic diagnostic, SyntaxTree tree, TextSpan? span)
{
if (diagnostic == null)
{
return false;
}
if (diagnostic.Location == null || diagnostic.Location == Location.None)
{
return false;
}
if (diagnostic.Location.SourceTree != tree)
{
return false;
}
if (span == null)
{
return true;
}
return span.Value.Contains(diagnostic.Location.SourceSpan);
}
private static IEnumerable<DiagnosticData> GetDiagnosticData(Project project, IEnumerable<Diagnostic> diagnostics)
{
if (diagnostics == null)
{
yield break;
}
foreach (var diagnostic in diagnostics)
{
if (diagnostic.Location == null || diagnostic.Location == Location.None)
{
yield return DiagnosticData.Create(project, diagnostic);
continue;
}
var document = project.GetDocument(diagnostic.Location.SourceTree);
if (document == null)
{
continue;
}
yield return DiagnosticData.Create(document, diagnostic);
}
}
private static async Task<IEnumerable<DiagnosticData>> GetSyntaxDiagnosticsAsync(DiagnosticAnalyzerDriver userDiagnosticDriver, DiagnosticAnalyzer analyzer)
{
using (Logger.LogBlock(FunctionId.Diagnostics_SyntaxDiagnostic, GetSyntaxLogMessage, userDiagnosticDriver.Document, userDiagnosticDriver.Span, analyzer, userDiagnosticDriver.CancellationToken))
{
try
{
Contract.ThrowIfNull(analyzer);
var tree = await userDiagnosticDriver.Document.GetSyntaxTreeAsync(userDiagnosticDriver.CancellationToken).ConfigureAwait(false);
var diagnostics = await userDiagnosticDriver.GetSyntaxDiagnosticsAsync(analyzer).ConfigureAwait(false);
return GetDiagnosticData(userDiagnosticDriver.Document, tree, userDiagnosticDriver.Span, diagnostics);
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
}
private static async Task<IEnumerable<DiagnosticData>> GetSemanticDiagnosticsAsync(DiagnosticAnalyzerDriver userDiagnosticDriver, DiagnosticAnalyzer analyzer)
{
using (Logger.LogBlock(FunctionId.Diagnostics_SemanticDiagnostic, GetSemanticLogMessage, userDiagnosticDriver.Document, userDiagnosticDriver.Span, analyzer, userDiagnosticDriver.CancellationToken))
{
try
{
Contract.ThrowIfNull(analyzer);
var tree = await userDiagnosticDriver.Document.GetSyntaxTreeAsync(userDiagnosticDriver.CancellationToken).ConfigureAwait(false);
var diagnostics = await userDiagnosticDriver.GetSemanticDiagnosticsAsync(analyzer).ConfigureAwait(false);
return GetDiagnosticData(userDiagnosticDriver.Document, tree, userDiagnosticDriver.Span, diagnostics);
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
}
private static async Task<IEnumerable<DiagnosticData>> GetProjectDiagnosticsAsync(DiagnosticAnalyzerDriver userDiagnosticDriver, DiagnosticAnalyzer analyzer)
{
using (Logger.LogBlock(FunctionId.Diagnostics_ProjectDiagnostic, GetProjectLogMessage, userDiagnosticDriver.Project, analyzer, userDiagnosticDriver.CancellationToken))
{
try
{
Contract.ThrowIfNull(analyzer);
var diagnostics = await userDiagnosticDriver.GetProjectDiagnosticsAsync(analyzer).ConfigureAwait(false);
return GetDiagnosticData(userDiagnosticDriver.Project, diagnostics);
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
}
private void ClearDocumentStates(
Document document, IEnumerable<StateSet> states,
bool raiseEvent, bool includeProjectState,
CancellationToken cancellationToken)
{
// Compiler + User diagnostics
foreach (var state in states)
{
for (var stateType = 0; stateType < s_stateTypeCount; stateType++)
{
if (!includeProjectState && stateType == (int)StateType.Project)
{
// don't re-set project state type
continue;
}
cancellationToken.ThrowIfCancellationRequested();
ClearDocumentState(document, state, (StateType)stateType, raiseEvent);
}
}
}
private void ClearDocumentState(Document document, StateSet stateSet, StateType type, bool raiseEvent)
{
var state = stateSet.GetState(type);
// remove saved info
state.Remove(document.Id);
if (raiseEvent)
{
// raise diagnostic updated event
var documentId = document.Id;
var solutionArgs = new SolutionArgument(document);
RaiseDiagnosticsRemoved(type, document.Id, stateSet, solutionArgs);
}
}
private void ClearProjectStatesAsync(Project project, IEnumerable<StateSet> states, CancellationToken cancellationToken)
{
foreach (var document in project.Documents)
{
ClearDocumentStates(document, states, raiseEvent: true, includeProjectState: true, cancellationToken: cancellationToken);
}
foreach (var stateSet in states)
{
cancellationToken.ThrowIfCancellationRequested();
ClearProjectState(project, stateSet);
}
}
private void ClearProjectState(Project project, StateSet stateSet)
{
var state = stateSet.GetState(StateType.Project);
// remove saved cache
state.Remove(project.Id);
// raise diagnostic updated event
var solutionArgs = new SolutionArgument(project);
RaiseDiagnosticsRemoved(StateType.Project, project.Id, stateSet, solutionArgs);
}
private async Task ClearExistingDiagnostics(Document document, StateSet stateSet, StateType type, CancellationToken cancellationToken)
{
var state = stateSet.GetState(type);
var existingData = await state.TryGetExistingDataAsync(document, cancellationToken).ConfigureAwait(false);
if (existingData?.Items.Length > 0)
{
ClearDocumentState(document, stateSet, type, raiseEvent: true);
}
}
private async Task ClearExistingDiagnostics(Project project, StateSet stateSet, CancellationToken cancellationToken)
{
var state = stateSet.GetState(StateType.Project);
var existingData = await state.TryGetExistingDataAsync(project, cancellationToken).ConfigureAwait(false);
if (existingData?.Items.Length > 0)
{
ClearProjectState(project, stateSet);
}
}
private static string GetSyntaxLogMessage(Document document, TextSpan? span, DiagnosticAnalyzer analyzer)
{
return string.Format("syntax: {0}, {1}, {2}", document.FilePath ?? document.Name, span.HasValue ? span.Value.ToString() : "Full", analyzer.ToString());
}
private static string GetSemanticLogMessage(Document document, TextSpan? span, DiagnosticAnalyzer analyzer)
{
return string.Format("semantic: {0}, {1}, {2}", document.FilePath ?? document.Name, span.HasValue ? span.Value.ToString() : "Full", analyzer.ToString());
}
private static string GetProjectLogMessage(Project project, DiagnosticAnalyzer analyzer)
{
return string.Format("project: {0}, {1}", project.FilePath ?? project.Name, analyzer.ToString());
}
private static string GetResetLogMessage(Document document)
{
return string.Format("document reset: {0}", document.FilePath ?? document.Name);
}
private static string GetOpenLogMessage(Document document)
{
return string.Format("document open: {0}", document.FilePath ?? document.Name);
}
private static string GetRemoveLogMessage(DocumentId id)
{
return string.Format("document remove: {0}", id.ToString());
}
private static string GetRemoveLogMessage(ProjectId id)
{
return string.Format("project remove: {0}", id.ToString());
}
public override Task NewSolutionSnapshotAsync(Solution newSolution, CancellationToken cancellationToken)
{
return SpecializedTasks.EmptyTask;
}
}
}
| |
/*
* Copyright (c) 2013-2015, SteamDB. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using Dapper;
using SteamKit2;
namespace SteamDatabaseBackend
{
class DepotProcessor
{
public class ManifestJob
{
public uint ChangeNumber;
public uint ParentAppID;
public uint DepotID;
public int BuildID;
public ulong ManifestID;
public string DepotName;
public string CDNToken;
public string Server;
public byte[] DepotKey;
public int Tries;
}
private readonly CDNClient CDNClient;
private readonly List<string> CDNServers;
private readonly ConcurrentDictionary<uint, byte> DepotLocks;
public DepotProcessor(SteamClient client, CallbackManager manager)
{
DepotLocks = new ConcurrentDictionary<uint, byte>();
CDNClient = new CDNClient(client);
FileDownloader.SetCDNClient(CDNClient);
CDNServers = new List<string>
{
"cdn.level3.cs.steampowered.com",
"cdn.akamai.cs.steampowered.com",
"cdn.highwinds.cs.steampowered.com"
};
manager.Register(new Callback<SteamApps.CDNAuthTokenCallback>(OnCDNAuthTokenCallback));
manager.Register(new Callback<SteamApps.DepotKeyCallback>(OnDepotKeyCallback));
}
public void Process(uint appID, uint changeNumber, KeyValue depots)
{
var requests = new List<ManifestJob>();
// Get data in format we want first
foreach (var depot in depots.Children)
{
// Ignore these for now, parent app should be updated too anyway
if (depot["depotfromapp"].Value != null)
{
continue;
}
var request = new ManifestJob
{
ChangeNumber = changeNumber,
ParentAppID = appID,
DepotName = depot["name"].AsString()
};
// Ignore keys that aren't integers, for example "branches"
if (!uint.TryParse(depot.Name, out request.DepotID))
{
continue;
}
// TODO: instead of locking we could wait for current process to finish
if (DepotLocks.ContainsKey(request.DepotID))
{
continue;
}
// If there is no public manifest for this depot, it still could have some sort of open beta
if (depot["manifests"]["public"].Value == null || !ulong.TryParse(depot["manifests"]["public"].Value, out request.ManifestID))
{
var branch = depot["manifests"].Children.FirstOrDefault(x => x.Name != "local");
if (branch == null || !ulong.TryParse(branch.Value, out request.ManifestID))
{
using (var db = Database.GetConnection())
{
db.Execute("INSERT INTO `Depots` (`DepotID`, `Name`) VALUES (@DepotID, @DepotName) ON DUPLICATE KEY UPDATE `Name` = @DepotName", new { request.DepotID, request.DepotName });
}
continue;
}
request.BuildID = branch["build"].AsInteger();
}
else
{
request.BuildID = depots["branches"]["public"]["buildid"].AsInteger();
}
requests.Add(request);
}
if (!requests.Any())
{
return;
}
using (var db = Database.GetConnection())
{
var dbDepots = db.Query<Depot>("SELECT `DepotID`, `Name`, `BuildID`, `ManifestID`, `LastManifestID` FROM `Depots` WHERE `DepotID` IN @Depots", new { Depots = requests.Select(x => x.DepotID) })
.ToDictionary(x => x.DepotID, x => x);
foreach (var request in requests)
{
Depot dbDepot;
if (dbDepots.ContainsKey(request.DepotID))
{
dbDepot = dbDepots[request.DepotID];
if (dbDepot.BuildID > request.BuildID)
{
// buildid went back in time? this either means a rollback, or a shared depot that isn't synced properly
Log.WriteDebug("Depot Processor", "Skipping depot {0} due to old buildid: {1} > {2}", request.DepotID, dbDepot.BuildID, request.BuildID);
continue;
}
if (dbDepot.LastManifestID == request.ManifestID && dbDepot.ManifestID == request.ManifestID && Settings.Current.FullRun < 2)
{
// Update depot name if changed
if (!request.DepotName.Equals(dbDepot.Name))
{
db.Execute("UPDATE `Depots` SET `Name` = @DepotName WHERE `DepotID` = @DepotID", new { request.DepotID, request.DepotName });
}
continue;
}
}
else
{
dbDepot = new Depot();
}
if (dbDepot.BuildID != request.BuildID || dbDepot.ManifestID != request.ManifestID || !request.DepotName.Equals(dbDepot.Name))
{
db.Execute(@"INSERT INTO `Depots` (`DepotID`, `Name`, `BuildID`, `ManifestID`) VALUES (@DepotID, @DepotName, @BuildID, @ManifestID)
ON DUPLICATE KEY UPDATE `LastUpdated` = CURRENT_TIMESTAMP(), `Name` = @DepotName, `BuildID` = @BuildID, `ManifestID` = @ManifestID",
new {
request.DepotID,
request.DepotName,
request.BuildID,
request.ManifestID
});
}
if (dbDepot.ManifestID != request.ManifestID)
{
MakeHistory(db, request, string.Empty, "manifest_change", dbDepot.ManifestID, request.ManifestID);
}
if (LicenseList.OwnedApps.ContainsKey(request.DepotID) || Settings.Current.FullRun > 1)
{
DepotLocks.TryAdd(request.DepotID, 1);
JobManager.AddJob(() => Steam.Instance.Apps.GetDepotDecryptionKey(request.DepotID, request.ParentAppID), request);
}
#if DEBUG
else
{
Log.WriteDebug("Depot Processor", "Skipping depot {0} from app {1} because we don't own it", request.DepotID, request.ParentAppID);
}
#endif
}
}
}
private void OnDepotKeyCallback(SteamApps.DepotKeyCallback callback)
{
JobAction job;
if (!JobManager.TryRemoveJob(callback.JobID, out job))
{
RemoveLock(callback.DepotID);
return;
}
var request = job.ManifestJob;
if (callback.Result != EResult.OK)
{
if (callback.Result != EResult.AccessDenied || FileDownloader.IsImportantDepot(request.DepotID))
{
Log.WriteError("Depot Processor", "Failed to get depot key for depot {0} (parent {1}) - {2}", callback.DepotID, request.ParentAppID, callback.Result);
}
RemoveLock(request.DepotID);
return;
}
request.DepotKey = callback.DepotKey;
request.Tries = CDNServers.Count;
request.Server = GetContentServer();
JobManager.AddJob(() => Steam.Instance.Apps.GetCDNAuthToken(request.DepotID, request.Server), request);
}
private void OnCDNAuthTokenCallback(SteamApps.CDNAuthTokenCallback callback)
{
JobAction job;
if (!JobManager.TryRemoveJob(callback.JobID, out job))
{
return;
}
var request = job.ManifestJob;
if (callback.Result != EResult.OK)
{
if (FileDownloader.IsImportantDepot(request.DepotID))
{
Log.WriteError("Depot Processor", "Failed to get CDN auth token for depot {0} (parent {1} - server {2}) - {3} (#{4})",
request.DepotID, request.ParentAppID, request.Server, callback.Result, request.Tries);
}
if (--request.Tries >= 0)
{
request.Server = GetContentServer(request.Tries);
JobManager.AddJob(() => Steam.Instance.Apps.GetCDNAuthToken(request.DepotID, request.Server), request);
return;
}
RemoveLock(request.DepotID);
return;
}
request.CDNToken = callback.Token;
// TODO: Using tasks makes every manifest download timeout
// TODO: which seems to be bug with mono's threadpool implementation
/*TaskManager.Run(() => DownloadManifest(request)).ContinueWith(task =>
{
RemoveLock(request.DepotID);
Log.WriteDebug("Depot Processor", "Processed depot {0} ({1} depot locks left)", request.DepotID, DepotLocks.Count);
});*/
try
{
DownloadManifest(request);
}
catch (Exception)
{
RemoveLock(request.DepotID);
}
}
private void DownloadManifest(ManifestJob request)
{
Log.WriteInfo("Depot Processor", "DepotID: {0}", request.DepotID);
DepotManifest depotManifest = null;
string lastError = string.Empty;
// CDN is very random, just keep trying
for (var i = 0; i <= 5; i++)
{
try
{
depotManifest = CDNClient.DownloadManifest(request.DepotID, request.ManifestID, request.Server, request.CDNToken, request.DepotKey);
break;
}
catch (Exception e)
{
lastError = e.Message;
}
}
if (depotManifest == null)
{
Log.WriteError("Depot Processor", "Failed to download depot manifest for depot {0} ({1}: {2}) (#{3})", request.DepotID, request.Server, lastError, request.Tries);
if (--request.Tries >= 0)
{
request.Server = GetContentServer(request.Tries);
JobManager.AddJob(() => Steam.Instance.Apps.GetCDNAuthToken(request.DepotID, request.Server), request);
return;
}
RemoveLock(request.DepotID); // TODO: Remove this once task in OnCDNAuthTokenCallback is used
if (FileDownloader.IsImportantDepot(request.DepotID))
{
IRC.Instance.SendOps("{0}[{1}]{2} Failed to download depot {3} manifest ({4}: {5})",
Colors.OLIVE, Steam.GetAppName(request.ParentAppID), Colors.NORMAL, request.DepotID, request.Server, lastError);
}
return;
}
if (FileDownloader.IsImportantDepot(request.DepotID))
{
TaskManager.Run(() => FileDownloader.DownloadFilesFromDepot(request, depotManifest));
}
// TODO: Task here instead of in OnCDNAuthTokenCallback due to mono's silly threadpool
TaskManager.Run(() =>
{
using(var db = Database.GetConnection())
{
ProcessDepotAfterDownload(db, request, depotManifest);
}
}).ContinueWith(task =>
{
RemoveLock(request.DepotID);
Log.WriteDebug("Depot Processor", "Processed depot {0} ({1} depot locks left)", request.DepotID, DepotLocks.Count);
});
}
private static void ProcessDepotAfterDownload(IDbConnection db, ManifestJob request, DepotManifest depotManifest)
{
var filesOld = db.Query<DepotFile>("SELECT `ID`, `File`, `Hash`, `Size`, `Flags` FROM `DepotsFiles` WHERE `DepotID` = @DepotID", new { request.DepotID }).ToDictionary(x => x.File, x => x);
var filesNew = new List<DepotFile>();
var filesAdded = new List<DepotFile>();
var shouldHistorize = filesOld.Any(); // Don't historize file additions if we didn't have any data before
foreach (var file in depotManifest.Files)
{
var name = file.FileName.Replace('\\', '/');
// safe guard
if (name.Length > 255)
{
ErrorReporter.Notify(new OverflowException(string.Format("File \"{0}\" in depot {1} is too long", name, request.DepotID)));
continue;
}
var depotFile = new DepotFile
{
DepotID = request.DepotID,
File = name,
Size = file.TotalSize,
Flags = file.Flags
};
if (file.FileHash.Length > 0 && !file.Flags.HasFlag(EDepotFileFlag.Directory))
{
depotFile.Hash = string.Concat(Array.ConvertAll(file.FileHash, x => x.ToString("X2")));
}
else
{
depotFile.Hash = "0000000000000000000000000000000000000000";
}
filesNew.Add(depotFile);
}
foreach (var file in filesNew)
{
if (filesOld.ContainsKey(file.File))
{
var oldFile = filesOld[file.File];
var updateFile = false;
if (oldFile.Size != file.Size || !file.Hash.Equals(oldFile.Hash))
{
MakeHistory(db, request, file.File, "modified", oldFile.Size, file.Size);
updateFile = true;
}
if (oldFile.Flags != file.Flags)
{
MakeHistory(db, request, file.File, "modified_flags", (ulong)oldFile.Flags, (ulong)file.Flags);
updateFile = true;
}
if (updateFile)
{
file.ID = oldFile.ID;
db.Execute("UPDATE `DepotsFiles` SET `Hash` = @Hash, `Size` = @Size, `Flags` = @Flags WHERE `DepotID` = @DepotID AND `ID` = @ID", file);
}
filesOld.Remove(file.File);
}
else
{
// We want to historize modifications first, and only then deletions and additions
filesAdded.Add(file);
}
}
if (filesOld.Any())
{
db.Execute("DELETE FROM `DepotsFiles` WHERE `DepotID` = @DepotID AND `ID` IN @Files", new { request.DepotID, Files = filesOld.Select(x => x.Value.ID) });
db.Execute(GetHistoryQuery(), filesOld.Select(x => new DepotHistory
{
DepotID = request.DepotID,
ChangeID = request.ChangeNumber,
Action = "removed",
File = x.Value.File
}));
}
if (filesAdded.Any())
{
db.Execute("INSERT INTO `DepotsFiles` (`DepotID`, `File`, `Hash`, `Size`, `Flags`) VALUES (@DepotID, @File, @Hash, @Size, @Flags)", filesAdded);
if (shouldHistorize)
{
db.Execute(GetHistoryQuery(), filesAdded.Select(x => new DepotHistory
{
DepotID = request.DepotID,
ChangeID = request.ChangeNumber,
Action = "added",
File = x.File
}));
}
}
db.Execute("UPDATE `Depots` SET `LastManifestID` = @ManifestID WHERE `DepotID` = @DepotID", new { request.DepotID, request.ManifestID });
}
public static string GetHistoryQuery()
{
return "INSERT INTO `DepotsHistory` (`ChangeID`, `DepotID`, `File`, `Action`, `OldValue`, `NewValue`) VALUES (@ChangeID, @DepotID, @File, @Action, @OldValue, @NewValue)";
}
private static void MakeHistory(IDbConnection db, ManifestJob request, string file, string action, ulong oldValue = 0, ulong newValue = 0)
{
db.Execute(GetHistoryQuery(),
new DepotHistory
{
DepotID = request.DepotID,
ChangeID = request.ChangeNumber,
Action = action,
File = file,
OldValue = oldValue,
NewValue = newValue
}
);
}
private void RemoveLock(uint depotID)
{
byte microsoftWhyIsThereNoRemoveMethodWithoutSecondParam;
DepotLocks.TryRemove(depotID, out microsoftWhyIsThereNoRemoveMethodWithoutSecondParam);
}
private string GetContentServer()
{
var i = new Random().Next(CDNServers.Count);
return CDNServers[i];
}
private string GetContentServer(int i)
{
i %= CDNServers.Count;
return CDNServers[i];
}
}
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Algo.Candles.Compression.Algo
File: ConvertableCandleBuilderSource.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Algo.Candles.Compression
{
using System;
using System.Collections.Generic;
using System.Linq;
using Ecng.Common;
using Ecng.ComponentModel;
using StockSharp.BusinessEntities;
using StockSharp.Localization;
/// <summary>
/// The base data source for <see cref="ICandleBuilder"/> which convert data from the <typeparamref name="TSourceValue" /> type to the <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
/// <typeparam name="TSourceValue">The source data type (for example, <see cref="Trade"/>).</typeparam>
public abstract class ConvertableCandleBuilderSource<TSourceValue> : BaseCandleBuilderSource
{
static ConvertableCandleBuilderSource()
{
if (typeof(TSourceValue) == typeof(Trade))
{
DefaultConverter = ((Func<Trade, ICandleBuilderSourceValue>)(t => new TradeCandleBuilderSourceValue(t))).To<Func<TSourceValue, ICandleBuilderSourceValue>>();
DefaultFilter = ((Func<Trade, bool>)(t => t.IsSystem != false)).To<Func<TSourceValue, bool>>();
}
else if (typeof(TSourceValue) == typeof(MarketDepth))
{
DefaultConverter = ((Func<MarketDepth, ICandleBuilderSourceValue>)(d => new DepthCandleBuilderSourceValue(d))).To<Func<TSourceValue, ICandleBuilderSourceValue>>();
DefaultFilter = v => true;
}
else
throw new InvalidOperationException(LocalizedStrings.Str653Params.Put(typeof(TSourceValue)));
}
/// <summary>
/// Initialize <see cref="ConvertableCandleBuilderSource{T}"/>.
/// </summary>
protected ConvertableCandleBuilderSource()
{
}
/// <summary>
/// The default function to convert data from the <typeparamref name="TSourceValue" /> type to the <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
public static Func<TSourceValue, ICandleBuilderSourceValue> DefaultConverter { get; }
private Func<TSourceValue, ICandleBuilderSourceValue> _converter = DefaultConverter;
/// <summary>
/// The function to convert data from the <typeparamref name="TSourceValue" /> type to the <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
public Func<TSourceValue, ICandleBuilderSourceValue> Converter
{
get { return _converter; }
set
{
if (value == null)
throw new ArgumentNullException(nameof(value));
_converter = value;
}
}
/// <summary>
/// The default function to filter data <typeparamref name="TSourceValue" />.
/// </summary>
public static Func<TSourceValue, bool> DefaultFilter { get; }
private Func<TSourceValue, bool> _filter = DefaultFilter;
/// <summary>
/// The function to filter data <typeparamref name="TSourceValue" />.
/// </summary>
public Func<TSourceValue, bool> Filter
{
get { return _filter; }
set
{
if (value == null)
throw new ArgumentNullException(nameof(value));
_filter = value;
}
}
/// <summary>
/// To convert new data using the <see cref="Converter"/>.
/// </summary>
/// <param name="values">New source data.</param>
/// <returns>Data in format <see cref="ICandleBuilder"/>.</returns>
protected IEnumerable<ICandleBuilderSourceValue> Convert(IEnumerable<TSourceValue> values)
{
return values.Where(Filter).Select(Converter);
}
/// <summary>
/// To convert and pass new data to the method <see cref="BaseCandleBuilderSource.RaiseProcessing"/>.
/// </summary>
/// <param name="series">Candles series.</param>
/// <param name="values">New source data.</param>
protected virtual void NewSourceValues(CandleSeries series, IEnumerable<TSourceValue> values)
{
RaiseProcessing(series, Convert(values));
}
}
/// <summary>
/// The data source working directly with ready data collection.
/// </summary>
/// <typeparam name="TSourceValue">The source data type (for example, <see cref="Trade"/>).</typeparam>
public class RawConvertableCandleBuilderSource<TSourceValue> : ConvertableCandleBuilderSource<TSourceValue>
{
private readonly Security _security;
private readonly DateTimeOffset _from;
private readonly DateTimeOffset _to;
/// <summary>
/// Initializes a new instance of the <see cref="RawConvertableCandleBuilderSource{T}"/>.
/// </summary>
/// <param name="security">The instrument whose data is passed to the source.</param>
/// <param name="from">The first time value.</param>
/// <param name="to">The last time value.</param>
/// <param name="values">Ready data collection.</param>
public RawConvertableCandleBuilderSource(Security security, DateTimeOffset from, DateTimeOffset to, IEnumerable<TSourceValue> values)
{
if (security == null)
throw new ArgumentNullException(nameof(security));
if (values == null)
throw new ArgumentNullException(nameof(values));
_security = security;
_from = from;
_to = to;
Values = values;
}
/// <summary>
/// The source priority by speed (0 - the best).
/// </summary>
public override int SpeedPriority => 0;
/// <summary>
/// Ready data collection.
/// </summary>
public IEnumerable<TSourceValue> Values { get; }
/// <summary>
/// To get time ranges for which this source of passed candles series has data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <returns>Time ranges.</returns>
public override IEnumerable<Range<DateTimeOffset>> GetSupportedRanges(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
if (series.Security != _security)
yield break;
yield return new Range<DateTimeOffset>(_from, _to);
}
/// <summary>
/// To send data request.
/// </summary>
/// <param name="series">The candles series for which data receiving should be started.</param>
/// <param name="from">The initial date from which you need to get data.</param>
/// <param name="to">The final date by which you need to get data.</param>
public override void Start(CandleSeries series, DateTimeOffset from, DateTimeOffset to)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
if (series.Security != _security)
return;
NewSourceValues(series, Values);
RaiseStopped(series);
}
/// <summary>
/// To stop data receiving starting through <see cref="Start"/>.
/// </summary>
/// <param name="series">Candles series.</param>
public override void Stop(CandleSeries series)
{
RaiseStopped(series);
}
}
}
| |
// Copyright (c) 2018 SIL International
// This software is licensed under the MIT License (http://opensource.org/licenses/MIT)
using System;
using System.Runtime.InteropServices;
namespace Icu
{
internal static partial class NativeMethods
{
private class LocalesMethodsContainer
{
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getLCIDDelegate([MarshalAs(UnmanagedType.LPStr)]string localeID);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getLocaleForLCIDDelegate(int lcid, IntPtr locale, int localeCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate IntPtr uloc_getISO3CountryDelegate(
[MarshalAs(UnmanagedType.LPStr)]string locale);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate IntPtr uloc_getISO3LanguageDelegate(
[MarshalAs(UnmanagedType.LPStr)]string locale);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
internal delegate int uloc_countAvailableDelegate();
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
internal delegate IntPtr uloc_getAvailableDelegate(int n);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getLanguageDelegate(string localeID, IntPtr language,
int languageCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getScriptDelegate(string localeID, IntPtr script,
int scriptCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getCountryDelegate(string localeID, IntPtr country,
int countryCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getVariantDelegate(string localeID, IntPtr variant,
int variantCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getDisplayNameDelegate(string localeID, string inLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getDisplayLanguageDelegate(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getDisplayScriptDelegate(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getDisplayCountryDelegate(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getDisplayVariantDelegate(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getNameDelegate(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_getBaseNameDelegate(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err);
[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
internal delegate int uloc_canonicalizeDelegate(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err);
internal uloc_countAvailableDelegate uloc_countAvailable;
internal uloc_getLCIDDelegate uloc_getLCID;
internal uloc_getLocaleForLCIDDelegate uloc_getLocaleForLCID;
internal uloc_getISO3CountryDelegate uloc_getISO3Country;
internal uloc_getISO3LanguageDelegate uloc_getISO3Language;
internal uloc_getAvailableDelegate uloc_getAvailable;
internal uloc_getLanguageDelegate uloc_getLanguage;
internal uloc_getScriptDelegate uloc_getScript;
internal uloc_getCountryDelegate uloc_getCountry;
internal uloc_getVariantDelegate uloc_getVariant;
internal uloc_getDisplayNameDelegate uloc_getDisplayName;
internal uloc_getDisplayLanguageDelegate uloc_getDisplayLanguage;
internal uloc_getDisplayScriptDelegate uloc_getDisplayScript;
internal uloc_getDisplayCountryDelegate uloc_getDisplayCountry;
internal uloc_getDisplayVariantDelegate uloc_getDisplayVariant;
internal uloc_getNameDelegate uloc_getName;
internal uloc_getBaseNameDelegate uloc_getBaseName;
internal uloc_canonicalizeDelegate uloc_canonicalize;
}
private static LocalesMethodsContainer _LocalesMethods;
private static LocalesMethodsContainer LocalesMethods =>
_LocalesMethods ??
(_LocalesMethods = new LocalesMethodsContainer());
/// <summary>Get the ICU LCID for a locale</summary>
public static int uloc_getLCID([MarshalAs(UnmanagedType.LPStr)] string localeID)
{
if (LocalesMethods.uloc_getLCID == null)
LocalesMethods.uloc_getLCID = GetMethod<LocalesMethodsContainer.uloc_getLCIDDelegate>(IcuCommonLibHandle, "uloc_getLCID");
return LocalesMethods.uloc_getLCID(localeID);
}
/// <summary>Gets the ICU locale ID for the specified Win32 LCID value. </summary>
public static int uloc_getLocaleForLCID(int lcid, IntPtr locale, int localeCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getLocaleForLCID == null)
LocalesMethods.uloc_getLocaleForLCID = GetMethod<LocalesMethodsContainer.uloc_getLocaleForLCIDDelegate>(IcuCommonLibHandle, "uloc_getLocaleForLCID");
return LocalesMethods.uloc_getLocaleForLCID(lcid, locale, localeCapacity, out err);
}
/// <summary>Return the ISO 3 char value, if it exists</summary>
public static IntPtr uloc_getISO3Country(
[MarshalAs(UnmanagedType.LPStr)] string locale)
{
if (LocalesMethods.uloc_getISO3Country == null)
LocalesMethods.uloc_getISO3Country = GetMethod<LocalesMethodsContainer.uloc_getISO3CountryDelegate>(IcuCommonLibHandle, "uloc_getISO3Country");
return LocalesMethods.uloc_getISO3Country(locale);
}
/// <summary>Return the ISO 3 char value, if it exists</summary>
public static IntPtr uloc_getISO3Language(
[MarshalAs(UnmanagedType.LPStr)] string locale)
{
if (LocalesMethods.uloc_getISO3Language == null)
LocalesMethods.uloc_getISO3Language = GetMethod<LocalesMethodsContainer.uloc_getISO3LanguageDelegate>(IcuCommonLibHandle, "uloc_getISO3Language");
return LocalesMethods.uloc_getISO3Language(locale);
}
/// <summary>
/// Gets the size of the all available locale list.
/// </summary>
/// <returns>the size of the locale list </returns>
public static int uloc_countAvailable()
{
if (LocalesMethods.uloc_countAvailable == null)
LocalesMethods.uloc_countAvailable = GetMethod<LocalesMethodsContainer.uloc_countAvailableDelegate>(IcuCommonLibHandle, "uloc_countAvailable");
return LocalesMethods.uloc_countAvailable();
}
/// <summary>
/// Gets the specified locale from a list of all available locales.
/// The return value is a pointer to an item of a locale name array. Both this array
/// and the pointers it contains are owned by ICU and should not be deleted or written
/// through by the caller. The locale name is terminated by a null pointer.
/// </summary>
/// <param name="n">n the specific locale name index of the available locale list</param>
/// <returns>a specified locale name of all available locales</returns>
public static IntPtr uloc_getAvailable(int n)
{
if (LocalesMethods.uloc_getAvailable == null)
LocalesMethods.uloc_getAvailable = GetMethod<LocalesMethodsContainer.uloc_getAvailableDelegate>(IcuCommonLibHandle, "uloc_getAvailable");
return LocalesMethods.uloc_getAvailable(n);
}
/// <summary>
/// Gets the language code for the specified locale.
/// </summary>
/// <param name="localeID">the locale to get the language code with </param>
/// <param name="language">the language code for localeID </param>
/// <param name="languageCapacity">the size of the language buffer to store the language
/// code with </param>
/// <param name="err">error information if retrieving the language code failed</param>
/// <returns>the actual buffer size needed for the language code. If it's greater
/// than languageCapacity, the returned language code will be truncated</returns>
public static int uloc_getLanguage(string localeID, IntPtr language,
int languageCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getLanguage == null)
LocalesMethods.uloc_getLanguage = GetMethod<LocalesMethodsContainer.uloc_getLanguageDelegate>(IcuCommonLibHandle, "uloc_getLanguage");
return LocalesMethods.uloc_getLanguage(localeID, language, languageCapacity, out err);
}
/// <summary>
/// Gets the script code for the specified locale.
/// </summary>
/// <param name="localeID">the locale to get the script code with </param>
/// <param name="script">the script code for localeID </param>
/// <param name="scriptCapacity">the size of the script buffer to store the script
/// code with </param>
/// <param name="err">error information if retrieving the script code failed</param>
/// <returns>the actual buffer size needed for the script code. If it's greater
/// than scriptCapacity, the returned script code will be truncated</returns>
public static int uloc_getScript(string localeID, IntPtr script,
int scriptCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getScript == null)
LocalesMethods.uloc_getScript = GetMethod<LocalesMethodsContainer.uloc_getScriptDelegate>(IcuCommonLibHandle, "uloc_getScript");
return LocalesMethods.uloc_getScript(localeID, script, scriptCapacity, out err);
}
/// <summary>
/// Gets the country code for the specified locale.
/// </summary>
/// <param name="localeID">the locale to get the country code with </param>
/// <param name="country">the country code for localeID </param>
/// <param name="countryCapacity">the size of the country buffer to store the country
/// code with </param>
/// <param name="err">error information if retrieving the country code failed</param>
/// <returns>the actual buffer size needed for the country code. If it's greater
/// than countryCapacity, the returned country code will be truncated</returns>
public static int uloc_getCountry(string localeID, IntPtr country,
int countryCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getCountry == null)
LocalesMethods.uloc_getCountry = GetMethod<LocalesMethodsContainer.uloc_getCountryDelegate>(IcuCommonLibHandle, "uloc_getCountry");
return LocalesMethods.uloc_getCountry(localeID, country, countryCapacity, out err);
}
/// <summary>
/// Gets the variant code for the specified locale.
/// </summary>
/// <param name="localeID">the locale to get the variant code with </param>
/// <param name="variant">the variant code for localeID </param>
/// <param name="variantCapacity">the size of the variant buffer to store the variant
/// code with </param>
/// <param name="err">error information if retrieving the variant code failed</param>
/// <returns>the actual buffer size needed for the variant code. If it's greater
/// than variantCapacity, the returned variant code will be truncated</returns>
public static int uloc_getVariant(string localeID, IntPtr variant,
int variantCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getVariant == null)
LocalesMethods.uloc_getVariant = GetMethod<LocalesMethodsContainer.uloc_getVariantDelegate>(IcuCommonLibHandle, "uloc_getVariant");
return LocalesMethods.uloc_getVariant(localeID, variant, variantCapacity, out err);
}
/// <summary>
/// Gets the full name suitable for display for the specified locale.
/// </summary>
/// <param name="localeID">the locale to get the displayable name with</param>
/// <param name="inLocaleID">Specifies the locale to be used to display the name. In
/// other words, if the locale's language code is "en", passing Locale::getFrench()
/// for inLocale would result in "Anglais", while passing Locale::getGerman() for
/// inLocale would result in "Englisch". </param>
/// <param name="result">the displayable name for localeID</param>
/// <param name="maxResultSize">the size of the name buffer to store the displayable
/// full name with</param>
/// <param name="err">error information if retrieving the displayable name failed</param>
/// <returns>the actual buffer size needed for the displayable name. If it's greater
/// than variantCapacity, the returned displayable name will be truncated.</returns>
public static int uloc_getDisplayName(string localeID, string inLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getDisplayName == null)
LocalesMethods.uloc_getDisplayName = GetMethod<LocalesMethodsContainer.uloc_getDisplayNameDelegate>(IcuCommonLibHandle, "uloc_getDisplayName");
return LocalesMethods.uloc_getDisplayName(localeID, inLocaleID, result, maxResultSize, out err);
}
public static int uloc_getDisplayLanguage(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getDisplayLanguage == null)
LocalesMethods.uloc_getDisplayLanguage = GetMethod<LocalesMethodsContainer.uloc_getDisplayLanguageDelegate>(IcuCommonLibHandle, "uloc_getDisplayLanguage");
return LocalesMethods.uloc_getDisplayLanguage(localeID, displayLocaleID, result, maxResultSize, out err);
}
public static int uloc_getDisplayScript(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getDisplayScript == null)
LocalesMethods.uloc_getDisplayScript = GetMethod<LocalesMethodsContainer.uloc_getDisplayScriptDelegate>(IcuCommonLibHandle, "uloc_getDisplayScript");
return LocalesMethods.uloc_getDisplayScript(localeID, displayLocaleID, result, maxResultSize, out err);
}
public static int uloc_getDisplayCountry(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getDisplayCountry == null)
LocalesMethods.uloc_getDisplayCountry = GetMethod<LocalesMethodsContainer.uloc_getDisplayCountryDelegate>(IcuCommonLibHandle, "uloc_getDisplayCountry");
return LocalesMethods.uloc_getDisplayCountry(localeID, displayLocaleID, result, maxResultSize, out err);
}
public static int uloc_getDisplayVariant(string localeID, string displayLocaleID,
IntPtr result, int maxResultSize, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getDisplayVariant == null)
LocalesMethods.uloc_getDisplayVariant = GetMethod<LocalesMethodsContainer.uloc_getDisplayVariantDelegate>(IcuCommonLibHandle, "uloc_getDisplayVariant");
return LocalesMethods.uloc_getDisplayVariant(localeID, displayLocaleID, result, maxResultSize, out err);
}
public static int uloc_getName(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getName == null)
LocalesMethods.uloc_getName = GetMethod<LocalesMethodsContainer.uloc_getNameDelegate>(IcuCommonLibHandle, "uloc_getName");
return LocalesMethods.uloc_getName(localeID, name, nameCapacity, out err);
}
public static int uloc_getBaseName(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_getBaseName == null)
LocalesMethods.uloc_getBaseName = GetMethod<LocalesMethodsContainer.uloc_getBaseNameDelegate>(IcuCommonLibHandle, "uloc_getBaseName");
return LocalesMethods.uloc_getBaseName(localeID, name, nameCapacity, out err);
}
public static int uloc_canonicalize(string localeID, IntPtr name,
int nameCapacity, out ErrorCode err)
{
err = ErrorCode.NoErrors;
if (LocalesMethods.uloc_canonicalize == null)
LocalesMethods.uloc_canonicalize = GetMethod<LocalesMethodsContainer.uloc_canonicalizeDelegate>(IcuCommonLibHandle, "uloc_canonicalize");
var res = LocalesMethods.uloc_canonicalize(localeID, name, nameCapacity, out err);
return res;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.IO;
using OpenMetaverse;
using OpenMetaverse.Imaging;
namespace groupmanager
{
public partial class frmGroupInfo : Form
{
Group Group;
GridClient Client;
Group Profile = new Group();
Dictionary<UUID, GroupMember> Members = new Dictionary<UUID,GroupMember>();
Dictionary<UUID, GroupTitle> Titles = new Dictionary<UUID,GroupTitle>();
Dictionary<UUID, GroupMemberData> MemberData = new Dictionary<UUID, GroupMemberData>();
Dictionary<UUID, string> Names = new Dictionary<UUID, string>();
GroupManager.GroupProfileCallback GroupProfileCallback;
GroupManager.GroupMembersCallback GroupMembersCallback;
GroupManager.GroupTitlesCallback GroupTitlesCallback;
AvatarManager.AvatarNamesCallback AvatarNamesCallback;
AssetManager.ImageReceivedCallback ImageReceivedCallback;
public frmGroupInfo(Group group, GridClient client)
{
InitializeComponent();
while (!IsHandleCreated)
{
// Force handle creation
IntPtr temp = Handle;
}
GroupProfileCallback = new GroupManager.GroupProfileCallback(GroupProfileHandler);
GroupMembersCallback = new GroupManager.GroupMembersCallback(GroupMembersHandler);
GroupTitlesCallback = new GroupManager.GroupTitlesCallback(GroupTitlesHandler);
AvatarNamesCallback = new AvatarManager.AvatarNamesCallback(AvatarNamesHandler);
ImageReceivedCallback = new AssetManager.ImageReceivedCallback(Assets_OnImageReceived);
Group = group;
Client = client;
// Register the callbacks for this form
Client.Assets.OnImageReceived += ImageReceivedCallback;
Client.Groups.OnGroupProfile += GroupProfileCallback;
Client.Groups.OnGroupMembers += GroupMembersCallback;
Client.Groups.OnGroupTitles += GroupTitlesCallback;
Client.Avatars.OnAvatarNames += AvatarNamesCallback;
// Request the group information
Client.Groups.RequestGroupProfile(Group.ID);
Client.Groups.RequestGroupMembers(Group.ID);
Client.Groups.RequestGroupTitles(Group.ID);
}
~frmGroupInfo()
{
// Unregister the callbacks for this form
Client.Assets.OnImageReceived -= ImageReceivedCallback;
Client.Groups.OnGroupProfile -= GroupProfileCallback;
Client.Groups.OnGroupMembers -= GroupMembersCallback;
Client.Groups.OnGroupTitles -= GroupTitlesCallback;
Client.Avatars.OnAvatarNames -= AvatarNamesCallback;
}
private void GroupProfileHandler(Group profile)
{
Profile = profile;
if (Group.InsigniaID != UUID.Zero)
Client.Assets.RequestImage(Group.InsigniaID, ImageType.Normal, 113000.0f, 0, 0);
if (this.InvokeRequired)
this.BeginInvoke(new MethodInvoker(UpdateProfile));
}
void Assets_OnImageReceived(ImageDownload image, AssetTexture assetTexture)
{
ManagedImage imgData;
Image bitmap;
if (image.Success)
{
OpenJPEG.DecodeToImage(image.AssetData, out imgData, out bitmap);
picInsignia.Image = bitmap;
}
}
private void UpdateProfile()
{
lblGroupName.Text = Profile.Name;
txtCharter.Text = Profile.Charter;
chkShow.Checked = Profile.ShowInList;
chkPublish.Checked = Profile.AllowPublish;
chkOpenEnrollment.Checked = Profile.OpenEnrollment;
chkFee.Checked = (Profile.MembershipFee != 0);
numFee.Value = Profile.MembershipFee;
chkMature.Checked = Profile.MaturePublish;
Client.Avatars.RequestAvatarName(Profile.FounderID);
}
private void AvatarNamesHandler(Dictionary<UUID, string> names)
{
lock (Names)
{
foreach (KeyValuePair<UUID, string> agent in names)
{
Names[agent.Key] = agent.Value;
}
}
UpdateNames();
}
private void UpdateNames()
{
if (this.InvokeRequired)
{
Invoke(new MethodInvoker(UpdateNames));
}
else
{
lock (Names)
{
if (Profile.FounderID != UUID.Zero && Names.ContainsKey(Profile.FounderID))
{
lblFoundedBy.Text = "Founded by " + Names[Profile.FounderID];
}
lock (MemberData)
{
foreach (KeyValuePair<UUID, string> name in Names)
{
if (!MemberData.ContainsKey(name.Key))
{
MemberData[name.Key] = new GroupMemberData();
}
MemberData[name.Key].Name = name.Value;
}
}
}
UpdateMemberList();
}
}
private void UpdateMemberList()
{
// General tab list
lock (lstMembers)
{
lstMembers.Items.Clear();
foreach (GroupMemberData entry in MemberData.Values)
{
ListViewItem lvi = new ListViewItem();
lvi.Text = entry.Name;
ListViewItem.ListViewSubItem lvsi = new ListViewItem.ListViewSubItem();
lvsi.Text = entry.Title;
lvi.SubItems.Add(lvsi);
lvsi = new ListViewItem.ListViewSubItem();
lvsi.Text = entry.LastOnline;
lvi.SubItems.Add(lvsi);
lstMembers.Items.Add(lvi);
}
}
// Members tab list
lock (lstMembers2)
{
lstMembers2.Items.Clear();
foreach (GroupMemberData entry in MemberData.Values)
{
ListViewItem lvi = new ListViewItem();
lvi.Text = entry.Name;
ListViewItem.ListViewSubItem lvsi = new ListViewItem.ListViewSubItem();
lvsi.Text = entry.Contribution.ToString();
lvi.SubItems.Add(lvsi);
lvsi = new ListViewItem.ListViewSubItem();
lvsi.Text = entry.LastOnline;
lvi.SubItems.Add(lvsi);
lstMembers2.Items.Add(lvi);
}
}
}
private void GroupMembersHandler(Dictionary<UUID, GroupMember> members)
{
Members = members;
UpdateMembers();
}
private void UpdateMembers()
{
if (this.InvokeRequired)
{
Invoke(new MethodInvoker(UpdateMembers));
}
else
{
List<UUID> requestids = new List<UUID>();
lock (Members)
{
lock (MemberData)
{
foreach (GroupMember member in Members.Values)
{
GroupMemberData memberData = new GroupMemberData();
memberData.ID = member.ID;
memberData.IsOwner = member.IsOwner;
memberData.LastOnline = member.OnlineStatus;
memberData.Powers = (ulong)member.Powers;
memberData.Title = member.Title;
memberData.Contribution = member.Contribution;
MemberData[member.ID] = memberData;
// Add this ID to the name request batch
requestids.Add(member.ID);
}
}
}
Client.Avatars.RequestAvatarNames(requestids);
}
}
private void GroupTitlesHandler(Dictionary<UUID, GroupTitle> titles)
{
Titles = titles;
UpdateTitles();
}
private void UpdateTitles()
{
if (this.InvokeRequired)
{
Invoke(new MethodInvoker(UpdateTitles));
}
else
{
lock (Titles)
{
foreach (KeyValuePair<UUID, GroupTitle> kvp in Titles)
{
Console.Write("Title: " + kvp.Value.Title + " = " + kvp.Key.ToString());
if (kvp.Value.Selected)
Console.WriteLine(" (Selected)");
else
Console.WriteLine();
}
}
}
}
}
public class GroupMemberData
{
public UUID ID;
public string Name;
public string Title;
public string LastOnline;
public ulong Powers;
public bool IsOwner;
public int Contribution;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Tests.Cache
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Transactions;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Configuration;
using Apache.Ignite.Core.Transactions;
using NUnit.Framework;
/// <summary>
/// Transactional cache tests.
/// </summary>
public abstract class CacheAbstractTransactionalTest : CacheAbstractTest
{
/// <summary>
/// Simple cache lock test (while <see cref="TestLock"/> is ignored).
/// </summary>
[Test]
public void TestLockSimple()
{
var cache = Cache();
const int key = 7;
Action<ICacheLock> checkLock = lck =>
{
using (lck)
{
Assert.Throws<InvalidOperationException>(lck.Exit); // can't exit if not entered
lck.Enter();
Assert.IsTrue(cache.IsLocalLocked(key, true));
Assert.IsTrue(cache.IsLocalLocked(key, false));
lck.Exit();
Assert.IsFalse(cache.IsLocalLocked(key, true));
Assert.IsFalse(cache.IsLocalLocked(key, false));
Assert.IsTrue(lck.TryEnter());
Assert.IsTrue(cache.IsLocalLocked(key, true));
Assert.IsTrue(cache.IsLocalLocked(key, false));
lck.Exit();
}
Assert.Throws<ObjectDisposedException>(lck.Enter); // Can't enter disposed lock
};
checkLock(cache.Lock(key));
checkLock(cache.LockAll(new[] { key, 1, 2, 3 }));
}
/// <summary>
/// Tests cache locks.
/// </summary>
[Test]
[Ignore("IGNITE-835")]
public void TestLock()
{
var cache = Cache();
const int key = 7;
// Lock
CheckLock(cache, key, () => cache.Lock(key));
// LockAll
CheckLock(cache, key, () => cache.LockAll(new[] { key, 2, 3, 4, 5 }));
}
/// <summary>
/// Internal lock test routine.
/// </summary>
/// <param name="cache">Cache.</param>
/// <param name="key">Key.</param>
/// <param name="getLock">Function to get the lock.</param>
private static void CheckLock(ICache<int, int> cache, int key, Func<ICacheLock> getLock)
{
var sharedLock = getLock();
using (sharedLock)
{
Assert.Throws<InvalidOperationException>(() => sharedLock.Exit()); // can't exit if not entered
sharedLock.Enter();
try
{
Assert.IsTrue(cache.IsLocalLocked(key, true));
Assert.IsTrue(cache.IsLocalLocked(key, false));
EnsureCannotLock(getLock, sharedLock);
sharedLock.Enter();
try
{
Assert.IsTrue(cache.IsLocalLocked(key, true));
Assert.IsTrue(cache.IsLocalLocked(key, false));
EnsureCannotLock(getLock, sharedLock);
}
finally
{
sharedLock.Exit();
}
Assert.IsTrue(cache.IsLocalLocked(key, true));
Assert.IsTrue(cache.IsLocalLocked(key, false));
EnsureCannotLock(getLock, sharedLock);
Assert.Throws<SynchronizationLockException>(() => sharedLock.Dispose()); // can't dispose while locked
}
finally
{
sharedLock.Exit();
}
Assert.IsFalse(cache.IsLocalLocked(key, true));
Assert.IsFalse(cache.IsLocalLocked(key, false));
var innerTask = new Task(() =>
{
Assert.IsTrue(sharedLock.TryEnter());
sharedLock.Exit();
using (var otherLock = getLock())
{
Assert.IsTrue(otherLock.TryEnter());
otherLock.Exit();
}
});
innerTask.Start();
innerTask.Wait();
}
Assert.IsFalse(cache.IsLocalLocked(key, true));
Assert.IsFalse(cache.IsLocalLocked(key, false));
var outerTask = new Task(() =>
{
using (var otherLock = getLock())
{
Assert.IsTrue(otherLock.TryEnter());
otherLock.Exit();
}
});
outerTask.Start();
outerTask.Wait();
Assert.Throws<ObjectDisposedException>(() => sharedLock.Enter()); // Can't enter disposed lock
}
/// <summary>
/// Ensure that lock cannot be obtained by other threads.
/// </summary>
/// <param name="getLock">Get lock function.</param>
/// <param name="sharedLock">Shared lock.</param>
private static void EnsureCannotLock(Func<ICacheLock> getLock, ICacheLock sharedLock)
{
var task = new Task(() =>
{
Assert.IsFalse(sharedLock.TryEnter());
Assert.IsFalse(sharedLock.TryEnter(TimeSpan.FromMilliseconds(100)));
using (var otherLock = getLock())
{
Assert.IsFalse(otherLock.TryEnter());
Assert.IsFalse(otherLock.TryEnter(TimeSpan.FromMilliseconds(100)));
}
});
task.Start();
task.Wait();
}
/// <summary>
/// Tests that commit applies cache changes.
/// </summary>
[Test]
public void TestTxCommit([Values(true, false)] bool async)
{
var cache = Cache();
Assert.IsNull(Transactions.Tx);
using (var tx = Transactions.TxStart())
{
cache.Put(1, 1);
cache.Put(2, 2);
if (async)
{
var task = tx.CommitAsync();
task.Wait();
Assert.IsTrue(task.IsCompleted);
}
else
tx.Commit();
}
Assert.AreEqual(1, cache.Get(1));
Assert.AreEqual(2, cache.Get(2));
Assert.IsNull(Transactions.Tx);
}
/// <summary>
/// Tests that rollback reverts cache changes.
/// </summary>
[Test]
public void TestTxRollback()
{
var cache = Cache();
cache.Put(1, 1);
cache.Put(2, 2);
Assert.IsNull(Transactions.Tx);
using (var tx = Transactions.TxStart())
{
cache.Put(1, 10);
cache.Put(2, 20);
tx.Rollback();
}
Assert.AreEqual(1, cache.Get(1));
Assert.AreEqual(2, cache.Get(2));
Assert.IsNull(Transactions.Tx);
}
/// <summary>
/// Tests that Dispose without Commit reverts changes.
/// </summary>
[Test]
public void TestTxClose()
{
var cache = Cache();
cache.Put(1, 1);
cache.Put(2, 2);
Assert.IsNull(Transactions.Tx);
using (Transactions.TxStart())
{
cache.Put(1, 10);
cache.Put(2, 20);
}
Assert.AreEqual(1, cache.Get(1));
Assert.AreEqual(2, cache.Get(2));
Assert.IsNull(Transactions.Tx);
}
/// <summary>
/// Tests all concurrency and isolation modes with and without timeout.
/// </summary>
[Test]
public void TestTxAllModes([Values(true, false)] bool withTimeout)
{
var cache = Cache();
int cntr = 0;
foreach (TransactionConcurrency concurrency in Enum.GetValues(typeof(TransactionConcurrency)))
{
foreach (TransactionIsolation isolation in Enum.GetValues(typeof(TransactionIsolation)))
{
Console.WriteLine("Test tx [concurrency=" + concurrency + ", isolation=" + isolation + "]");
Assert.IsNull(Transactions.Tx);
using (var tx = withTimeout
? Transactions.TxStart(concurrency, isolation, TimeSpan.FromMilliseconds(1100), 10)
: Transactions.TxStart(concurrency, isolation))
{
Assert.AreEqual(concurrency, tx.Concurrency);
Assert.AreEqual(isolation, tx.Isolation);
if (withTimeout)
Assert.AreEqual(1100, tx.Timeout.TotalMilliseconds);
cache.Put(1, cntr);
tx.Commit();
}
Assert.IsNull(Transactions.Tx);
Assert.AreEqual(cntr, cache.Get(1));
cntr++;
}
}
}
/// <summary>
/// Tests that transaction properties are applied and propagated properly.
/// </summary>
[Test]
public void TestTxAttributes()
{
ITransaction tx = Transactions.TxStart(TransactionConcurrency.Optimistic,
TransactionIsolation.RepeatableRead, TimeSpan.FromMilliseconds(2500), 100);
Assert.IsFalse(tx.IsRollbackOnly);
Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation);
Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(TransactionState.Active, tx.State);
Assert.IsTrue(tx.StartTime.Ticks > 0);
Assert.AreEqual(tx.NodeId, GetIgnite(0).GetCluster().GetLocalNode().Id);
DateTime startTime1 = tx.StartTime;
tx.Commit();
Assert.IsFalse(tx.IsRollbackOnly);
Assert.AreEqual(TransactionState.Committed, tx.State);
Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation);
Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(startTime1, tx.StartTime);
Thread.Sleep(100);
tx = Transactions.TxStart(TransactionConcurrency.Pessimistic, TransactionIsolation.ReadCommitted,
TimeSpan.FromMilliseconds(3500), 200);
Assert.IsFalse(tx.IsRollbackOnly);
Assert.AreEqual(TransactionConcurrency.Pessimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.ReadCommitted, tx.Isolation);
Assert.AreEqual(3500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(TransactionState.Active, tx.State);
Assert.IsTrue(tx.StartTime.Ticks > 0);
Assert.IsTrue(tx.StartTime > startTime1);
DateTime startTime2 = tx.StartTime;
tx.Rollback();
Assert.AreEqual(TransactionState.RolledBack, tx.State);
Assert.AreEqual(TransactionConcurrency.Pessimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.ReadCommitted, tx.Isolation);
Assert.AreEqual(3500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(startTime2, tx.StartTime);
Thread.Sleep(100);
tx = Transactions.TxStart(TransactionConcurrency.Optimistic, TransactionIsolation.RepeatableRead,
TimeSpan.FromMilliseconds(2500), 100);
Assert.IsFalse(tx.IsRollbackOnly);
Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation);
Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(TransactionState.Active, tx.State);
Assert.IsTrue(tx.StartTime > startTime2);
DateTime startTime3 = tx.StartTime;
tx.Commit();
Assert.IsFalse(tx.IsRollbackOnly);
Assert.AreEqual(TransactionState.Committed, tx.State);
Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency);
Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation);
Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds);
Assert.AreEqual(startTime3, tx.StartTime);
// Check defaults.
tx = Transactions.TxStart();
Assert.AreEqual(Transactions.DefaultTransactionConcurrency, tx.Concurrency);
Assert.AreEqual(Transactions.DefaultTransactionIsolation, tx.Isolation);
Assert.AreEqual(Transactions.DefaultTimeout, tx.Timeout);
tx.Commit();
}
/// <summary>
/// Tests <see cref="ITransaction.IsRollbackOnly"/> flag.
/// </summary>
[Test]
public void TestTxRollbackOnly()
{
var cache = Cache();
cache.Put(1, 1);
cache.Put(2, 2);
var tx = Transactions.TxStart();
cache.Put(1, 10);
cache.Put(2, 20);
Assert.IsFalse(tx.IsRollbackOnly);
tx.SetRollbackonly();
Assert.IsTrue(tx.IsRollbackOnly);
Assert.AreEqual(TransactionState.MarkedRollback, tx.State);
var ex = Assert.Throws<TransactionRollbackException>(() => tx.Commit());
Assert.IsTrue(ex.Message.StartsWith("Invalid transaction state for prepare [state=MARKED_ROLLBACK"));
tx.Dispose();
Assert.AreEqual(TransactionState.RolledBack, tx.State);
Assert.IsTrue(tx.IsRollbackOnly);
Assert.AreEqual(1, cache.Get(1));
Assert.AreEqual(2, cache.Get(2));
Assert.IsNull(Transactions.Tx);
}
/// <summary>
/// Tests transaction metrics.
/// </summary>
[Test]
public void TestTxMetrics()
{
var cache = Cache();
var startTime = DateTime.UtcNow.AddSeconds(-1);
Transactions.ResetMetrics();
var metrics = Transactions.GetMetrics();
Assert.AreEqual(0, metrics.TxCommits);
Assert.AreEqual(0, metrics.TxRollbacks);
using (Transactions.TxStart())
{
cache.Put(1, 1);
}
using (var tx = Transactions.TxStart())
{
cache.Put(1, 1);
tx.Commit();
}
metrics = Transactions.GetMetrics();
Assert.AreEqual(1, metrics.TxCommits);
Assert.AreEqual(1, metrics.TxRollbacks);
Assert.LessOrEqual(startTime, metrics.CommitTime);
Assert.LessOrEqual(startTime, metrics.RollbackTime);
Assert.GreaterOrEqual(DateTime.UtcNow, metrics.CommitTime);
Assert.GreaterOrEqual(DateTime.UtcNow, metrics.RollbackTime);
}
/// <summary>
/// Tests transaction state transitions.
/// </summary>
[Test]
public void TestTxStateAndExceptions()
{
var tx = Transactions.TxStart();
Assert.AreEqual(TransactionState.Active, tx.State);
Assert.AreEqual(Thread.CurrentThread.ManagedThreadId, tx.ThreadId);
tx.AddMeta("myMeta", 42);
Assert.AreEqual(42, tx.Meta<int>("myMeta"));
Assert.AreEqual(42, tx.RemoveMeta<int>("myMeta"));
tx.RollbackAsync().Wait();
Assert.AreEqual(TransactionState.RolledBack, tx.State);
Assert.Throws<InvalidOperationException>(() => tx.Commit());
tx = Transactions.TxStart();
Assert.AreEqual(TransactionState.Active, tx.State);
tx.CommitAsync().Wait();
Assert.AreEqual(TransactionState.Committed, tx.State);
var task = tx.RollbackAsync(); // Illegal, but should not fail here; will fail in task
Assert.Throws<AggregateException>(() => task.Wait());
}
/// <summary>
/// Tests the transaction deadlock detection.
/// </summary>
[Test]
public void TestTxDeadlockDetection()
{
if (LocalCache())
{
return;
}
var cache = Cache();
var keys0 = Enumerable.Range(1, 100).ToArray();
cache.PutAll(keys0.ToDictionary(x => x, x => x));
var barrier = new Barrier(2);
Action<int[]> increment = keys =>
{
using (var tx = Transactions.TxStart(TransactionConcurrency.Pessimistic,
TransactionIsolation.RepeatableRead, TimeSpan.FromSeconds(0.5), 0))
{
foreach (var key in keys)
cache[key]++;
barrier.SignalAndWait(500);
tx.Commit();
}
};
// Increment keys within tx in different order to cause a deadlock.
var aex = Assert.Throws<AggregateException>(() =>
Task.WaitAll(new[]
{
Task.Factory.StartNew(() => increment(keys0)),
Task.Factory.StartNew(() => increment(keys0.Reverse().ToArray()))
},
TimeSpan.FromSeconds(40)));
Assert.AreEqual(2, aex.InnerExceptions.Count);
var deadlockEx = aex.InnerExceptions.OfType<TransactionDeadlockException>().First();
Assert.IsTrue(deadlockEx.Message.Trim().StartsWith("Deadlock detected:"), deadlockEx.Message);
}
/// <summary>
/// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/>.
/// </summary>
[Test]
public void TestTransactionScopeSingleCache()
{
var cache = Cache();
cache[1] = 1;
cache[2] = 2;
// Commit.
using (var ts = new TransactionScope())
{
cache[1] = 10;
cache[2] = 20;
Assert.IsNotNull(cache.Ignite.GetTransactions().Tx);
ts.Complete();
}
Assert.AreEqual(10, cache[1]);
Assert.AreEqual(20, cache[2]);
// Rollback.
using (new TransactionScope())
{
cache[1] = 100;
cache[2] = 200;
}
Assert.AreEqual(10, cache[1]);
Assert.AreEqual(20, cache[2]);
}
/// <summary>
/// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/>
/// with multiple participating caches.
/// </summary>
[Test]
public void TestTransactionScopeMultiCache([Values(true, false)] bool async)
{
var cache1 = Cache();
var cache2 = GetIgnite(0).GetOrCreateCache<int, int>(new CacheConfiguration(cache1.Name + "_")
{
AtomicityMode = CacheAtomicityMode.Transactional
});
cache1[1] = 1;
cache2[1] = 2;
// Commit.
using (var ts = new TransactionScope())
{
if (async)
{
cache1.PutAsync(1, 10);
cache2.PutAsync(1, 20);
}
else
{
cache1.Put(1, 10);
cache2.Put(1, 20);
}
ts.Complete();
}
Assert.AreEqual(10, cache1[1]);
Assert.AreEqual(20, cache2[1]);
// Rollback.
using (new TransactionScope())
{
if (async)
{
cache1.PutAsync(1, 100);
cache2.PutAsync(1, 200);
}
else
{
cache1.Put(1, 100);
cache2.Put(1, 200);
}
}
Assert.AreEqual(10, cache1[1]);
Assert.AreEqual(20, cache2[1]);
}
/// <summary>
/// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/>
/// when Ignite tx is started manually.
/// </summary>
[Test]
public void TestTransactionScopeWithManualIgniteTx()
{
var cache = Cache();
var transactions = cache.Ignite.GetTransactions();
cache[1] = 1;
// When Ignite tx is started manually, it won't be enlisted in TransactionScope.
using (var tx = transactions.TxStart())
{
using (new TransactionScope())
{
cache[1] = 2;
} // Revert transaction scope.
tx.Commit(); // Commit manual tx.
}
Assert.AreEqual(2, cache[1]);
}
/// <summary>
/// Test Ignite transaction with <see cref="TransactionScopeOption.Suppress"/> option.
/// </summary>
[Test]
public void TestSuppressedTransactionScope()
{
var cache = Cache();
cache[1] = 1;
using (new TransactionScope(TransactionScopeOption.Suppress))
{
cache[1] = 2;
}
// Even though transaction is not completed, the value is updated, because tx is suppressed.
Assert.AreEqual(2, cache[1]);
}
/// <summary>
/// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/> with nested scopes.
/// </summary>
[Test]
public void TestNestedTransactionScope()
{
var cache = Cache();
cache[1] = 1;
foreach (var option in new[] {TransactionScopeOption.Required, TransactionScopeOption.RequiresNew})
{
// Commit.
using (var ts1 = new TransactionScope())
{
using (var ts2 = new TransactionScope(option))
{
cache[1] = 2;
ts2.Complete();
}
cache[1] = 3;
ts1.Complete();
}
Assert.AreEqual(3, cache[1]);
// Rollback.
using (new TransactionScope())
{
using (new TransactionScope(option))
cache[1] = 4;
cache[1] = 5;
}
// In case with Required option there is a single tx
// that gets aborted, second put executes outside the tx.
Assert.AreEqual(option == TransactionScopeOption.Required ? 5 : 3, cache[1], option.ToString());
}
}
/// <summary>
/// Test that ambient <see cref="TransactionScope"/> options propagate to Ignite transaction.
/// </summary>
[Test]
public void TestTransactionScopeOptions()
{
var cache = Cache();
var transactions = cache.Ignite.GetTransactions();
var modes = new[]
{
Tuple.Create(IsolationLevel.Serializable, TransactionIsolation.Serializable),
Tuple.Create(IsolationLevel.RepeatableRead, TransactionIsolation.RepeatableRead),
Tuple.Create(IsolationLevel.ReadCommitted, TransactionIsolation.ReadCommitted),
Tuple.Create(IsolationLevel.ReadUncommitted, TransactionIsolation.ReadCommitted),
Tuple.Create(IsolationLevel.Snapshot, TransactionIsolation.ReadCommitted),
Tuple.Create(IsolationLevel.Chaos, TransactionIsolation.ReadCommitted),
};
foreach (var mode in modes)
{
using (new TransactionScope(TransactionScopeOption.Required, new TransactionOptions
{
IsolationLevel = mode.Item1
}))
{
cache[1] = 1;
var tx = transactions.Tx;
Assert.AreEqual(mode.Item2, tx.Isolation);
Assert.AreEqual(transactions.DefaultTransactionConcurrency, tx.Concurrency);
}
}
}
/// <summary>
/// Tests all transactional operations with <see cref="TransactionScope"/>.
/// </summary>
[Test]
public void TestTransactionScopeAllOperations()
{
for (var i = 0; i < 10; i++)
{
CheckTxOp((cache, key) => cache.Put(key, -5));
CheckTxOp((cache, key) => cache.PutAsync(key, -5));
CheckTxOp((cache, key) => cache.PutAll(new Dictionary<int, int> {{key, -7}}));
CheckTxOp((cache, key) => cache.PutAllAsync(new Dictionary<int, int> {{key, -7}}));
CheckTxOp((cache, key) =>
{
cache.Remove(key);
cache.PutIfAbsent(key, -10);
});
CheckTxOp((cache, key) =>
{
cache.Remove(key);
cache.PutIfAbsentAsync(key, -10);
});
CheckTxOp((cache, key) => cache.GetAndPut(key, -9));
CheckTxOp((cache, key) => cache.GetAndPutAsync(key, -9));
CheckTxOp((cache, key) =>
{
cache.Remove(key);
cache.GetAndPutIfAbsent(key, -10);
});
CheckTxOp((cache, key) =>
{
cache.Remove(key);
cache.GetAndPutIfAbsentAsync(key, -10);
});
CheckTxOp((cache, key) => cache.GetAndRemove(key));
CheckTxOp((cache, key) => cache.GetAndRemoveAsync(key));
CheckTxOp((cache, key) => cache.GetAndReplace(key, -11));
CheckTxOp((cache, key) => cache.GetAndReplaceAsync(key, -11));
CheckTxOp((cache, key) => cache.Invoke(key, new AddProcessor(), 1));
CheckTxOp((cache, key) => cache.InvokeAsync(key, new AddProcessor(), 1));
CheckTxOp((cache, key) => cache.InvokeAll(new[] {key}, new AddProcessor(), 1));
CheckTxOp((cache, key) => cache.InvokeAllAsync(new[] {key}, new AddProcessor(), 1));
CheckTxOp((cache, key) => cache.Remove(key));
CheckTxOp((cache, key) => cache.RemoveAsync(key));
CheckTxOp((cache, key) => cache.RemoveAll(new[] {key}));
CheckTxOp((cache, key) => cache.RemoveAllAsync(new[] {key}));
CheckTxOp((cache, key) => cache.Replace(key, 100));
CheckTxOp((cache, key) => cache.ReplaceAsync(key, 100));
CheckTxOp((cache, key) => cache.Replace(key, cache[key], 100));
CheckTxOp((cache, key) => cache.ReplaceAsync(key, cache[key], 100));
}
}
/// <summary>
/// Checks that cache operation behaves transactionally.
/// </summary>
private void CheckTxOp(Action<ICache<int, int>, int> act)
{
var isolationLevels = new[]
{
IsolationLevel.Serializable, IsolationLevel.RepeatableRead, IsolationLevel.ReadCommitted,
IsolationLevel.ReadUncommitted, IsolationLevel.Snapshot, IsolationLevel.Chaos
};
foreach (var isolationLevel in isolationLevels)
{
var txOpts = new TransactionOptions {IsolationLevel = isolationLevel};
const TransactionScopeOption scope = TransactionScopeOption.Required;
var cache = Cache();
cache[1] = 1;
cache[2] = 2;
// Rollback.
using (new TransactionScope(scope, txOpts))
{
act(cache, 1);
Assert.IsNotNull(cache.Ignite.GetTransactions().Tx, "Transaction has not started.");
}
Assert.AreEqual(1, cache[1]);
Assert.AreEqual(2, cache[2]);
using (new TransactionScope(scope, txOpts))
{
act(cache, 1);
act(cache, 2);
}
Assert.AreEqual(1, cache[1]);
Assert.AreEqual(2, cache[2]);
// Commit.
using (var ts = new TransactionScope(scope, txOpts))
{
act(cache, 1);
ts.Complete();
}
Assert.IsTrue(!cache.ContainsKey(1) || cache[1] != 1);
Assert.AreEqual(2, cache[2]);
using (var ts = new TransactionScope(scope, txOpts))
{
act(cache, 1);
act(cache, 2);
ts.Complete();
}
Assert.IsTrue(!cache.ContainsKey(1) || cache[1] != 1);
Assert.IsTrue(!cache.ContainsKey(2) || cache[2] != 2);
}
}
[Serializable]
private class AddProcessor : ICacheEntryProcessor<int, int, int, int>
{
public int Process(IMutableCacheEntry<int, int> entry, int arg)
{
entry.Value += arg;
return arg;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace BackendlessAPI.Utils
{
/**
* The <code>StreamTokenizer</code> class takes an input stream and
* parses it into "tokens", allowing the tokens to be
* Read one at a time. The parsing process is controlled by a table
* and a number of flags that can be set to various states. The
* stream tokenizer can recognize identifiers, numbers, quoted
* strings, and various comment styles.
* <p>
* Each byte Read from the input stream is regarded as a character
* in the range <code>'\u0000'</code> through <code>'\u00FF'</code>.
* The character value is used to look up five possible attributes of
* the character: <i>white space</i>, <i>alphabetic</i>,
* <i>numeric</i>, <i>string quote</i>, and <i>comment character</i>.
* Each character can have zero or more of these attributes.
* <p>
* In addition, an instance has four flags. These flags indicate:
* <ul>
* <li>Whether line terminators are to be returned as tokens or treated
* as white space that merely separates tokens.
* <li>Whether C-style comments are to be recognized and skipped.
* <li>Whether C++-style comments are to be recognized and skipped.
* <li>Whether the characters of identifiers are converted to lowercase.
* </ul>
* <p>
* A typical application first constructs an instance of this class,
* sets up the syntax tables, and then repeatedly loops calling the
* <code>nextToken</code> method in each iteration of the loop until
* it returns the value <code>TT_EOF</code>.
*
*/
public class StreamTokenizer : IEnumerable<int>
{
/* Only one of these will be non-null */
private TextReader reader = null;
private char[] buf = new char[20];
/**
* The next character to be considered by the nextToken method. May also
* be NEED_CHAR to indicate that a new character should be Read, or SKIP_LF
* to indicate that a new character should be Read and, if it is a '\n'
* character, it should be discarded and a second new character should be
* Read.
*/
private int peekc = NEED_CHAR;
private const int NEED_CHAR = Int32.MaxValue;
private const int SKIP_LF = Int32.MaxValue - 1;
private bool pushedBack;
private bool forceLower;
/** The line number of the last token Read */
private bool eolIsSignificantP = false;
private bool slashSlashCommentsP = false;
private bool slashStarCommentsP = false;
private byte[] characterType = new byte[256];
private const byte CT_WHITESPACE = 1;
private const byte CT_DIGIT = 2;
private const byte CT_ALPHA = 4;
private const byte CT_QUOTE = 8;
private const byte CT_COMMENT = 16;
public int LineNumber{ get; private set; }
/**
* After a call to the <code>nextToken</code> method, this field
* contains the type of the token just Read. For a single character
* token, its value is the single character, converted to an integer.
* For a quoted string token, its value is the quote character.
* Otherwise, its value is one of the following:
* <ul>
* <li><code>TT_WORD</code> indicates that the token is a word.
* <li><code>TT_NUMBER</code> indicates that the token is a number.
* <li><code>TT_EOL</code> indicates that the end of line has been Read.
* The field can only have this value if the
* <code>eolIsSignificant</code> method has been called with the
* argument <code>true</code>.
* <li><code>TT_EOF</code> indicates that the end of the input stream
* has been reached.
* </ul>
* <p>
* The initial value of this field is -4.
*
*/
public int ttype = TT_NOTHING;
/**
* A constant indicating that the end of the stream has been Read.
*/
public const int TT_EOF = -1;
/**
* A constant indicating that the end of the line has been Read.
*/
public const int TT_EOL = '\n';
/**
* A constant indicating that a number token has been Read.
*/
public const int TT_NUMBER = -2;
/**
* A constant indicating that a word token has been Read.
*/
public const int TT_WORD = -3;
/* A constant indicating that no token has been Read, used for
* initializing ttype. FIXME This could be made public and
* made available as the part of the API in a future release.
*/
private const int TT_NOTHING = -4;
/**
* If the current token is a word token, this field contains a
* string giving the characters of the word token. When the current
* token is a quoted string token, this field contains the body of
* the string.
* <p>
* The current token is a word when the value of the
* <code>ttype</code> field is <code>TT_WORD</code>. The current token is
* a quoted string token when the value of the <code>ttype</code> field is
* a quote character.
* <p>
* The initial value of this field is null.
*
* @see java.io.StreamTokenizer#quoteChar(int)
* @see java.io.StreamTokenizer#TT_WORD
* @see java.io.StreamTokenizer#ttype
*/
public string StringValue{ get; private set; }
/**
* If the current token is a number, this field contains the value
* of that number. The current token is a number when the value of
* the <code>ttype</code> field is <code>TT_NUMBER</code>.
* <p>
* The initial value of this field is 0.0.
*/
public double NumberValue{ get; private set; }
/** Private constructor that initializes everything except the streams. */
private StreamTokenizer()
{
WordChars( 'a', 'z' );
WordChars( 'A', 'Z' );
WordChars( 128 + 32, 255 );
WhitespaceChars( 0, ' ' );
CommentChar( '/' );
QuoteChar( '"' );
QuoteChar( '\'' );
ParseNumbers();
LineNumber = 1;
}
/**
* Create a tokenizer that parses the given character stream.
*
* @param r a Reader object providing the input stream.
*/
public StreamTokenizer( TextReader r ) : this()
{
if( r == null )
{
throw new ArgumentNullException();
}
reader = r;
}
/**
* Resets this tokenizer's syntax table so that all characters are
* "ordinary." See the <code>ordinaryChar</code> method
* for more information on a character being ordinary.
*/
public void ResetSyntax()
{
Array.Clear( characterType, 0, characterType.Length );
}
/**
* Specifies that all characters <i>c</i> in the range
* <code>low <= <i>c</i> <= high</code>
* are word constituents. A word token consists of a word constituent
* followed by zero or more word constituents or number constituents.
*
* @param low the low end of the range.
* @param hi the high end of the range.
*/
public void WordChars( int low, int hi )
{
if( low < 0 )
{
low = 0;
}
if( hi >= characterType.Length )
{
hi = characterType.Length - 1;
}
while( low <= hi )
{
characterType[ low++ ] |= CT_ALPHA;
}
}
/**
* Specifies that all characters <i>c</i> in the range
* <code>low <= <i>c</i> <= high</code>
* are white space characters. White space characters serve only to
* separate tokens in the input stream.
*
* <p>Any other attribute settings for the characters in the specified
* range are cleared.
*
* @param low the low end of the range.
* @param hi the high end of the range.
*/
public void WhitespaceChars( int low, int hi )
{
if( low < 0 )
{
low = 0;
}
if( hi >= characterType.Length )
{
hi = characterType.Length - 1;
}
while( low <= hi )
{
characterType[ low++ ] = CT_WHITESPACE;
}
}
/**
* Specifies that all characters <i>c</i> in the range
* <code>low <= <i>c</i> <= high</code>
* are "ordinary" in this tokenizer. See the
* <code>ordinaryChar</code> method for more information on a
* character being ordinary.
*
* @param low the low end of the range.
* @param hi the high end of the range.
* @see java.io.StreamTokenizer#ordinaryChar(int)
*/
public void OrdinaryChars( int low, int hi )
{
if( low < 0 )
{
low = 0;
}
if( hi >= characterType.Length )
{
hi = characterType.Length - 1;
}
while( low <= hi )
{
characterType[ low++ ] = 0;
}
}
/**
* Specifies that the character argument is "ordinary"
* in this tokenizer. It removes any special significance the
* character has as a comment character, word component, string
* delimiter, white space, or number character. When such a character
* is encountered by the parser, the parser treats it as a
* single-character token and sets <code>ttype</code> field to the
* character value.
*
* <p>Making a line terminator character "ordinary" may interfere
* with the ability of a <code>StreamTokenizer</code> to count
* lines. The <code>lineno</code> method may no longer reflect
* the presence of such terminator characters in its line count.
*
* @param ch the character.
*/
public void OrdinaryChar( int ch )
{
if( ch >= 0 && ch < characterType.Length )
characterType[ ch ] = 0;
}
/**
* Specified that the character argument starts a single-line
* comment. All characters from the comment character to the end of
* the line are ignored by this stream tokenizer.
*
* <p>Any other attribute settings for the specified character are cleared.
*
* @param ch the character.
*/
public void CommentChar( int ch )
{
if( ch >= 0 && ch < characterType.Length )
{
characterType[ ch ] = CT_COMMENT;
}
}
/**
* Specifies that matching pairs of this character delimit string
* constants in this tokenizer.
* <p>
* When the <code>nextToken</code> method encounters a string
* constant, the <code>ttype</code> field is set to the string
* delimiter and the <code>sval</code> field is set to the body of
* the string.
* <p>
* If a string quote character is encountered, then a string is
* recognized, consisting of all characters after (but not including)
* the string quote character, up to (but not including) the next
* occurrence of that same string quote character, or a line
* terminator, or end of file. The usual escape sequences such as
* <code>"\n"</code> and <code>"\t"</code> are recognized and
* converted to single characters as the string is parsed.
*
* <p>Any other attribute settings for the specified character are cleared.
*
* @param ch the character.
*/
public void QuoteChar( int ch )
{
if( ch >= 0 && ch < characterType.Length )
characterType[ ch ] = CT_QUOTE;
}
/**
* Specifies that numbers should be parsed by this tokenizer. The
* syntax table of this tokenizer is modified so that each of the twelve
* characters:
* <blockquote><pre>
* 0 1 2 3 4 5 6 7 8 9 . -
* </pre></blockquote>
* <p>
* has the "numeric" attribute.
* <p>
* When the parser encounters a word token that has the format of a
* double precision floating-point number, it treats the token as a
* number rather than a word, by setting the <code>ttype</code>
* field to the value <code>TT_NUMBER</code> and putting the numeric
* value of the token into the <code>nval</code> field.
*/
public void ParseNumbers()
{
for( int i = '0'; i <= '9'; i++ )
{
characterType[ i ] |= CT_DIGIT;
}
characterType[ '.' ] |= CT_DIGIT;
characterType[ '-' ] |= CT_DIGIT;
}
/**
* Determines whether or not ends of line are treated as tokens.
* If the flag argument is true, this tokenizer treats end of lines
* as tokens; the <code>nextToken</code> method returns
* <code>TT_EOL</code> and also sets the <code>ttype</code> field to
* this value when an end of line is Read.
* <p>
* A line is a sequence of characters ending with either a
* carriage-return character (<code>'\r'</code>) or a newline
* character (<code>'\n'</code>). In addition, a carriage-return
* character followed immediately by a newline character is treated
* as a single end-of-line token.
* <p>
* If the <code>flag</code> is false, end-of-line characters are
* treated as white space and serve only to separate tokens.
*
* @param flag <code>true</code> indicates that end-of-line characters
* are separate tokens; <code>false</code> indicates that
* end-of-line characters are white space.
*/
public void EolIsSignificant( bool flag )
{
eolIsSignificantP = flag;
}
/**
* Determines whether or not the tokenizer recognizes C-style comments.
* If the flag argument is <code>true</code>, this stream tokenizer
* recognizes C-style comments. All text between successive
* occurrences of <code>/*</code> and <code>*/</code> are discarded.
* <p>
* If the flag argument is <code>false</code>, then C-style comments
* are not treated specially.
*
* @param flag <code>true</code> indicates to recognize and ignore
* C-style comments.
*/
public bool SlashStarComments
{
get { return slashStarCommentsP; }
set { slashStarCommentsP = value; }
}
/**
* Determines whether or not the tokenizer recognizes C++-style comments.
* If the flag argument is <code>true</code>, this stream tokenizer
* recognizes C++-style comments. Any occurrence of two consecutive
* slash characters (<code>'/'</code>) is treated as the beginning of
* a comment that extends to the end of the line.
* <p>
* If the flag argument is <code>false</code>, then C++-style
* comments are not treated specially.
*
* @param flag <code>true</code> indicates to recognize and ignore
* C++-style comments.
*/
public bool SlashSlashComments
{
get { return slashSlashCommentsP; }
set { slashSlashCommentsP = value; }
}
/**
* Determines whether or not word token are automatically lowercased.
* If the flag argument is <code>true</code>, then the value in the
* <code>sval</code> field is lowercased whenever a word token is
* returned (the <code>ttype</code> field has the
* value <code>TT_WORD</code> by the <code>nextToken</code> method
* of this tokenizer.
* <p>
* If the flag argument is <code>false</code>, then the
* <code>sval</code> field is not modified.
*
* @param fl <code>true</code> indicates that all word tokens should
* be lowercased.
*/
public bool LowerCaseMode
{
set { forceLower = value; }
}
/** Read the next character */
private int Read()
{
if( reader != null )
{
return reader.Read();
}
else
{
throw new InvalidOperationException();
}
}
/**
* Parses the next token from the input stream of this tokenizer.
* The type of the next token is returned in the <code>ttype</code>
* field. Additional information about the token may be in the
* <code>nval</code> field or the <code>sval</code> field of this
* tokenizer.
* <p>
* Typical clients of this
* class first set up the syntax tables and then sit in a loop
* calling nextToken to parse successive tokens until TT_EOF
* is returned.
*
* @return the value of the <code>ttype</code> field.
*/
public int NextToken()
{
if( pushedBack )
{
pushedBack = false;
return ttype;
}
byte[] ct = characterType;
StringValue = null;
int c = peekc;
if( c < 0 )
c = NEED_CHAR;
if( c == SKIP_LF )
{
c = Read();
if( c < 0 )
return ttype = TT_EOF;
if( c == '\n' )
c = NEED_CHAR;
}
if( c == NEED_CHAR )
{
c = Read();
if( c < 0 )
return ttype = TT_EOF;
}
ttype = c; /* Just to be safe */
/* Set peekc so that the next invocation of nextToken will Read
* another character unless peekc is reset in this invocation
*/
peekc = NEED_CHAR;
int ctype = c < 256 ? ct[ c ] : CT_ALPHA;
while( (ctype & CT_WHITESPACE) != 0 )
{
if( c == '\r' )
{
LineNumber++;
if( eolIsSignificantP )
{
peekc = SKIP_LF;
return ttype = TT_EOL;
}
c = Read();
if( c == '\n' )
c = Read();
}
else
{
if( c == '\n' )
{
LineNumber++;
if( eolIsSignificantP )
{
return ttype = TT_EOL;
}
}
c = Read();
}
if( c < 0 )
return ttype = TT_EOF;
ctype = c < 256 ? ct[ c ] : CT_ALPHA;
}
if( (ctype & CT_DIGIT) != 0 )
{
bool neg = false;
if( c == '-' )
{
c = Read();
if( c != '.' && (c < '0' || c > '9') )
{
peekc = c;
return ttype = '-';
}
neg = true;
}
double v = 0;
int decexp = 0;
int seendot = 0;
while( true )
{
if( c == '.' && seendot == 0 )
seendot = 1;
else if( '0' <= c && c <= '9' )
{
v = v * 10 + (c - '0');
decexp += seendot;
}
else
break;
c = Read();
}
peekc = c;
if( decexp != 0 )
{
double denom = 10;
decexp--;
while( decexp > 0 )
{
denom *= 10;
decexp--;
}
/* Do one division of a likely-to-be-more-accurate number */
v = v / denom;
}
NumberValue = neg ? -v : v;
return ttype = TT_NUMBER;
}
if( (ctype & CT_ALPHA) != 0 )
{
int i = 0;
do
{
if( i >= buf.Length )
{
char[] tempBuf = new char[buf.Length * 2];
Array.Copy( buf, tempBuf, buf.Length );
buf = tempBuf;
}
buf[ i++ ] = (char) c;
c = Read();
ctype = c < 0 ? CT_WHITESPACE : c < 256 ? ct[ c ] : CT_ALPHA;
} while( (ctype & (CT_ALPHA | CT_DIGIT)) != 0 );
peekc = c;
#if !(NET_35 || NET_40)
StringValue = new string( new ArraySegment<char>( buf, 0, i ).ToArray() );
#else
StringValue = new string( new ArraySegment<char>( buf, 0, i ).Array );
#endif
if( forceLower )
StringValue = StringValue.ToLower();
return ttype = TT_WORD;
}
if( (ctype & CT_QUOTE) != 0 )
{
ttype = c;
int i = 0;
/* Invariants (because \Octal needs a lookahead):
* (i) c contains char value
* (ii) d contains the lookahead
*/
int d = Read();
while( d >= 0 && d != ttype && d != '\n' && d != '\r' )
{
if( d == '\\' )
{
c = Read();
int first = c; /* To allow \377, but not \477 */
if( c >= '0' && c <= '7' )
{
c = c - '0';
int c2 = Read();
if( '0' <= c2 && c2 <= '7' )
{
c = (c << 3) + (c2 - '0');
c2 = Read();
if( '0' <= c2 && c2 <= '7' && first <= '3' )
{
c = (c << 3) + (c2 - '0');
d = Read();
}
else
d = c2;
}
else
d = c2;
}
else
{
switch( c )
{
case 'a':
c = 0x7;
break;
case 'b':
c = '\b';
break;
case 'f':
c = 0xC;
break;
case 'n':
c = '\n';
break;
case 'r':
c = '\r';
break;
case 't':
c = '\t';
break;
case 'v':
c = 0xB;
break;
}
d = Read();
}
}
else
{
c = d;
d = Read();
}
if( i >= buf.Length )
{
char[] tempBuf = new char[buf.Length * 2];
Array.Copy( buf, tempBuf, buf.Length );
buf = tempBuf;
}
buf[ i++ ] = (char) c;
}
/* If we broke out of the loop because we found a matching quote
* character then arrange to Read a new character next time
* around; otherwise, save the character.
*/
peekc = (d == ttype) ? NEED_CHAR : d;
#if !(NET_35 || NET_40)
StringValue = new string( new ArraySegment<char>( buf, 0, i ).ToArray() );
#else
StringValue = new string( new ArraySegment<char>( buf, 0, i ).Array );
#endif
return ttype;
}
if( c == '/' && (slashSlashCommentsP || slashStarCommentsP) )
{
c = Read();
if( c == '*' && slashStarCommentsP )
{
int prevc = 0;
while( (c = Read()) != '/' || prevc != '*' )
{
if( c == '\r' )
{
LineNumber++;
c = Read();
if( c == '\n' )
{
c = Read();
}
}
else
{
if( c == '\n' )
{
LineNumber++;
c = Read();
}
}
if( c < 0 )
return ttype = TT_EOF;
prevc = c;
}
return NextToken();
}
else if( c == '/' && slashSlashCommentsP )
{
while( (c = Read()) != '\n' && c != '\r' && c >= 0 ) ;
peekc = c;
return NextToken();
}
else
{
/* Now see if it is still a single line comment */
if( (ct[ '/' ] & CT_COMMENT) != 0 )
{
while( (c = Read()) != '\n' && c != '\r' && c >= 0 ) ;
peekc = c;
return NextToken();
}
else
{
peekc = c;
return ttype = '/';
}
}
}
if( (ctype & CT_COMMENT) != 0 )
{
while( (c = Read()) != '\n' && c != '\r' && c >= 0 ) ;
peekc = c;
return NextToken();
}
return ttype = c;
}
/**
* Causes the next call to the <code>nextToken</code> method of this
* tokenizer to return the current value in the <code>ttype</code>
* field, and not to modify the value in the <code>nval</code> or
* <code>sval</code> field.
*/
public void PushBack()
{
if( ttype != TT_NOTHING ) /* No-op if nextToken() not called */
{
pushedBack = true;
}
}
/**
* Returns the string representation of the current stream token and
* the line number it occurs on.
*
* <p>The precise string returned is unspecified, although the following
* example can be considered typical:
*
* <blockquote><pre>Token['a'], line 10</pre></blockquote>
*
* @return a string representation of the token
*/
public override string ToString()
{
string ret;
switch( ttype )
{
case TT_EOF:
ret = "EOF";
break;
case TT_EOL:
ret = "EOL";
break;
case TT_WORD:
ret = StringValue;
break;
case TT_NUMBER:
ret = "n=" + NumberValue;
break;
case TT_NOTHING:
ret = "NOTHING";
break;
default:
{
/*
* ttype is the first character of either a quoted string or
* is an ordinary character. ttype can definitely not be less
* than 0, since those are reserved values used in the previous
* case statements
*/
if( ttype < 256 &&
((characterType[ ttype ] & CT_QUOTE) != 0) )
{
ret = StringValue;
break;
}
char[] s = new char[3];
s[ 0 ] = s[ 2 ] = '\'';
s[ 1 ] = (char) ttype;
ret = new string( s );
break;
}
}
return "Token[" + ret + "], line " + LineNumber;
}
public IEnumerator<int> GetEnumerator()
{
ResetSyntax();
while( true )
{
int token = NextToken();
if( token == TT_EOF )
{
yield break;
}
yield return token;
}
}
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.Azure.AcceptanceTestsAzureSpecials
{
using Fixtures.Azure;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// SkipUrlEncodingOperations operations.
/// </summary>
internal partial class SkipUrlEncodingOperations : IServiceOperations<AutoRestAzureSpecialParametersTestClient>, ISkipUrlEncodingOperations
{
/// <summary>
/// Initializes a new instance of the SkipUrlEncodingOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal SkipUrlEncodingOperations(AutoRestAzureSpecialParametersTestClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestAzureSpecialParametersTestClient
/// </summary>
public AutoRestAzureSpecialParametersTestClient Client { get; private set; }
/// <summary>
/// Get method with unencoded path parameter with value 'path1/path2/path3'
/// </summary>
/// <param name='unencodedPathParam'>
/// Unencoded path parameter with value 'path1/path2/path3'
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetMethodPathValidWithHttpMessagesAsync(string unencodedPathParam, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (unencodedPathParam == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "unencodedPathParam");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("unencodedPathParam", unencodedPathParam);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetMethodPathValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/method/path/valid/{unencodedPathParam}").ToString();
_url = _url.Replace("{unencodedPathParam}", unencodedPathParam);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded path parameter with value 'path1/path2/path3'
/// </summary>
/// <param name='unencodedPathParam'>
/// Unencoded path parameter with value 'path1/path2/path3'
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetPathPathValidWithHttpMessagesAsync(string unencodedPathParam, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (unencodedPathParam == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "unencodedPathParam");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("unencodedPathParam", unencodedPathParam);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetPathPathValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/path/path/valid/{unencodedPathParam}").ToString();
_url = _url.Replace("{unencodedPathParam}", unencodedPathParam);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded path parameter with value 'path1/path2/path3'
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetSwaggerPathValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
string unencodedPathParam = "path1/path2/path3";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("unencodedPathParam", unencodedPathParam);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetSwaggerPathValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/swagger/path/valid/{unencodedPathParam}").ToString();
_url = _url.Replace("{unencodedPathParam}", unencodedPathParam);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded query parameter with value
/// 'value1&q2=value2&q3=value3'
/// </summary>
/// <param name='q1'>
/// Unencoded query parameter with value 'value1&q2=value2&q3=value3'
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetMethodQueryValidWithHttpMessagesAsync(string q1, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (q1 == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "q1");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("q1", q1);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetMethodQueryValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/method/query/valid").ToString();
List<string> _queryParameters = new List<string>();
if (q1 != null)
{
_queryParameters.Add(string.Format("q1={0}", q1));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded query parameter with value null
/// </summary>
/// <param name='q1'>
/// Unencoded query parameter with value null
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetMethodQueryNullWithHttpMessagesAsync(string q1 = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("q1", q1);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetMethodQueryNull", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/method/query/null").ToString();
List<string> _queryParameters = new List<string>();
if (q1 != null)
{
_queryParameters.Add(string.Format("q1={0}", q1));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded query parameter with value
/// 'value1&q2=value2&q3=value3'
/// </summary>
/// <param name='q1'>
/// Unencoded query parameter with value 'value1&q2=value2&q3=value3'
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetPathQueryValidWithHttpMessagesAsync(string q1, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (q1 == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "q1");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("q1", q1);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetPathQueryValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/path/query/valid").ToString();
List<string> _queryParameters = new List<string>();
if (q1 != null)
{
_queryParameters.Add(string.Format("q1={0}", q1));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get method with unencoded query parameter with value
/// 'value1&q2=value2&q3=value3'
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> GetSwaggerQueryValidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
string q1 = "value1&q2=value2&q3=value3";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("q1", q1);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetSwaggerQueryValid", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/skipUrlEncoding/swagger/query/valid").ToString();
List<string> _queryParameters = new List<string>();
if (q1 != null)
{
_queryParameters.Add(string.Format("q1={0}", q1));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Shouldly;
using Xunit;
namespace AutoMapper.UnitTests
{
public class When_implementing_multiple_IValueResolver_interfaces : AutoMapperSpecBase
{
public class Source1 { }
public class Source2 { }
public class Destination
{
public string Value { get; set; }
}
public class MyTestResolver : IValueResolver<Source1, Destination, string>, IValueResolver<Source2, Destination, string>
{
public string Resolve(Source1 source, Destination destination, string destMember, ResolutionContext context)
{
return "source1";
}
public string Resolve(Source2 source, Destination destination, string destMember, ResolutionContext context)
{
return "source2";
}
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source1, Destination>().ForMember(dest => dest.Value, opt => opt.MapFrom<MyTestResolver>());
cfg.CreateMap<Source2, Destination>().ForMember(dest => dest.Value, opt => opt.MapFrom<MyTestResolver>());
});
[Fact]
public void Should_map_ok()
{
Mapper.Map<Destination>(new Source1()).Value.ShouldBe("source1");
Mapper.Map<Destination>(new Source2()).Value.ShouldBe("source2");
}
}
public class When_using_IMemberResolver_derived_interface : AutoMapperSpecBase
{
Destination _destination;
class Source
{
public string SValue { get; set; }
}
class Destination
{
public string Value { get; set; }
}
interface IResolver : IMemberValueResolver<Source, Destination, string, string>
{
}
class Resolver : IResolver
{
public string Resolve(Source source, Destination destination, string sourceMember, string destMember, ResolutionContext context)
{
return "Resolved";
}
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom(new Resolver(), s=>s.SValue));
});
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source());
}
[Fact]
public void Should_map_ok()
{
_destination.Value.ShouldBe("Resolved");
}
}
public class OpenGenericMapForMember : AutoMapperSpecBase
{
ModelPager<int> _destination;
int[] _items = Enumerable.Range(1, 10).ToArray();
public interface IPager<out TItem> : IEnumerable<TItem>
{
int CurrentPage { get; set; }
int PageCount { get; set; }
int PageSize { get; set; }
int TotalItems { get; set; }
}
public class ModelPager<TItem>
{
public int CurrentPage { get; set; }
public IEnumerable<TItem> Items { get; set; }
public int PageCount { get; set; }
public int PageSize { get; set; }
public int TotalItems { get; set; }
}
public class Pager<TItem> : IPager<TItem>
{
private readonly IEnumerable<TItem> _items;
public Pager(IEnumerable<TItem> items) :this(items, 0, 0, 0)
{
}
public Pager(IEnumerable<TItem> items,
int currentPage,
int pageSize,
int totalItems)
{
_items = items ?? Enumerable.Empty<TItem>();
CurrentPage = currentPage;
PageSize = pageSize;
TotalItems = totalItems;
}
public int CurrentPage { get; set; }
public int PageCount { get; set; }
public int PageSize { get; set; }
public int TotalItems { get; set; }
IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); }
public IEnumerator<TItem> GetEnumerator() { return _items.GetEnumerator(); }
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
cfg.CreateMap(typeof(IPager<>), typeof(ModelPager<>)).ForMember("Items", e => e.MapFrom(o => (IEnumerable)o));
});
protected override void Because_of()
{
_destination = Mapper.Map<ModelPager<int>>(new Pager<int>(_items));
}
[Fact]
public void Should_map_ok()
{
_destination.Items.SequenceEqual(_items).ShouldBeTrue();
}
}
public class IntToNullableIntConverter : AutoMapperSpecBase
{
Destination _destination;
public class IntToNullableConverter : ITypeConverter<int, int?>
{
public int? Convert(int source, int? destination, ResolutionContext context)
{
if(source == default(int))
{
return null;
}
return source;
}
}
public class Source
{
public int Id { get; set; }
}
public class Destination
{
public int? Id { get; set; }
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
cfg.CreateMap<int, int?>().ConvertUsing<IntToNullableConverter>();
cfg.CreateMap<Source, Destination>();
});
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source());
}
[Fact]
public void Should_use_the_converter()
{
_destination.Id.ShouldBeNull();
}
}
public class When_throwing_NRE_from_MapFrom_value_types : AutoMapperSpecBase
{
ViewModel _viewModel;
public class Model
{
public List<SubModel> SubModels { get; set; }
}
public class SubModel
{
public List<SubSubModel> SubSubModels { get; set; }
}
public class SubSubModel
{
public int Id { get; set; }
}
public class ViewModel
{
public int SubModelId { get; set; }
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
cfg.CreateMap<Model, ViewModel>()
.ForMember(x => x.SubModelId,
opts => opts.MapFrom(src => src.SubModels.FirstOrDefault().SubSubModels.FirstOrDefault().Id));
});
protected override void Because_of()
{
var model = new Model
{
SubModels = new List<SubModel>()
};
_viewModel = Mapper.Map<ViewModel>(model);
}
[Fact]
public void Should_map_ok()
{
_viewModel.SubModelId.ShouldBe(0);
}
}
public class When_throwing_NRE_from_MapFrom : AutoMapperSpecBase
{
class Source
{
}
class Destination
{
public string Value { get; set; }
}
protected override MapperConfiguration Configuration => new MapperConfiguration(cfg =>
{
string x = null;
cfg.CreateMap<Source, Destination>().ForMember(d=>d.Value, o=>o.MapFrom(s=>x.ToString()));
});
[Fact]
public void We_should_catch_it()
{
Mapper.Map<Destination>(new Source()).Value.ShouldBeNull();
}
}
public class When_using_value_with_mismatched_properties : AutoMapperSpecBase
{
Destination _destination;
static Guid _guid = Guid.NewGuid();
class Source
{
public int Value { get; set; }
}
class Destination
{
public Guid Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c =>
{
c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom(src => _guid));
});
}
}
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source());
}
[Fact]
public void Should_map_ok()
{
_destination.Value.ShouldBe(_guid);
}
}
public class When_custom_resolving_mismatched_properties : AutoMapperSpecBase
{
Destination _destination;
static Guid _guid = Guid.NewGuid();
class Source
{
public int Value { get; set; }
}
class Destination
{
public Guid Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c =>
{
c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom<Resolver>());
});
}
}
class Resolver : IValueResolver<Source, Destination, Guid>
{
public Guid Resolve(Source model, Destination d, Guid dest, ResolutionContext context)
{
return _guid;
}
}
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source());
}
[Fact]
public void Should_map_ok()
{
_destination.Value.ShouldBe(_guid);
}
}
public class When_resolve_throws : NonValidatingSpecBase
{
Exception _ex = new Exception();
class Source
{
}
class Destination
{
public int Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c =>
{
c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom((s, d) => { Throw(); return 0; }));
});
}
}
private void Throw()
{
throw _ex;
}
[Fact]
public void Should_propagate_exception()
{
new Action(()=>Mapper.Map<Destination>(new Source())).ShouldThrowException<AutoMapperMappingException>(e=>e.InnerException.ShouldBe(_ex));
}
}
public class When_mapping_different_types_with_explicit_value : AutoMapperSpecBase
{
Destination _destination;
class InnerSource
{
public int IntValue { get; set; }
}
class InnerDestination
{
public int IntValue { get; set; }
}
class Source
{
}
class Destination
{
public InnerDestination Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c =>
{
c.CreateMap<InnerSource, InnerDestination>();
c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom(src => new InnerSource { IntValue = 15 }));
});
}
}
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source());
}
[Fact]
public void Should_work()
{
_destination.Value.IntValue.ShouldBe(15);
}
}
public class When_mapping_different_types_with_ResolveUsing : AutoMapperSpecBase
{
Destination _destination;
class InnerSource
{
public int IntValue { get; set; }
}
class InnerDestination
{
public int IntValue { get; set; }
}
class Source
{
public InnerSource ObjectValue { get; set; }
}
class Destination
{
public InnerDestination Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c =>
{
c.CreateMap<InnerSource, InnerDestination>();
c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom(s => s.ObjectValue));
});
}
}
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source { ObjectValue = new InnerSource { IntValue = 15 } });
}
[Fact]
public void Should_work()
{
_destination.Value.IntValue.ShouldBe(15);
}
}
public class When_mapping_from_object_to_string_with_use_value : AutoMapperSpecBase
{
Destination _destination;
class Source
{
}
class Destination
{
public string Value { get; set; }
}
protected override MapperConfiguration Configuration
{
get
{
return new MapperConfiguration(c => c.CreateMap<Source, Destination>().ForMember(d => d.Value, o => o.MapFrom(src => new object())));
}
}
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source { });
}
[Fact]
public void Should_use_to_string()
{
_destination.Value.ShouldBe("System.Object");
}
}
public class When_mapping_from_object_to_string : AutoMapperSpecBase
{
Destination _destination;
class Source
{
public object ObjectValue { get; set; }
}
class Destination
{
public string Value { get; set; }
}
protected override MapperConfiguration Configuration { get; }
= new MapperConfiguration(c => c.CreateMap<Source, Destination>().ForMember(d=>d.Value, o=>o.MapFrom(s=>s.ObjectValue)));
protected override void Because_of()
{
_destination = Mapper.Map<Destination>(new Source { ObjectValue = new object() });
}
[Fact]
public void Should_use_to_string()
{
_destination.Value.ShouldBe("System.Object");
}
}
public class When_mapping_to_a_dto_member_with_custom_mapping : AutoMapperSpecBase
{
private ModelDto _result;
public class ModelObject
{
public int Value { get; set; }
public int Value2fff { get; set; }
public int Value3 { get; set; }
public int Value4 { get; set; }
public int Value5 { get; set; }
}
public class ModelDto
{
public int Value { get; set; }
public int Value2 { get; set; }
public int Value3 { get; set; }
public int Value4 { get; set; }
public int Value5 { get; set; }
}
public class CustomResolver : IValueResolver<ModelObject, ModelDto, int>
{
public int Resolve(ModelObject source, ModelDto d, int dest, ResolutionContext context)
{
return source.Value + 1;
}
}
public class CustomResolver2 : IValueResolver<ModelObject, ModelDto, int>
{
public int Resolve(ModelObject source, ModelDto d, int dest, ResolutionContext context)
{
return source.Value2fff + 2;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<ModelObject, ModelDto>()
.ForMember(dto => dto.Value, opt => opt.MapFrom<CustomResolver>())
.ForMember(dto => dto.Value2, opt => opt.MapFrom(new CustomResolver2()))
.ForMember(dto => dto.Value5, opt => opt.MapFrom(src => src.Value5 + 5));
});
protected override void Because_of()
{
var model = new ModelObject {Value = 42, Value2fff = 42, Value3 = 42, Value4 = 42, Value5 = 42};
_result = Mapper.Map<ModelObject, ModelDto>(model);
}
[Fact]
public void Should_ignore_the_mapping_for_normal_members()
{
_result.Value3.ShouldBe(42);
}
[Fact]
public void Should_use_the_custom_generic_mapping_for_custom_dto_members()
{
_result.Value.ShouldBe(43);
}
[Fact]
public void Should_use_the_instance_based_mapping_for_custom_dto_members()
{
_result.Value2.ShouldBe(44);
}
[Fact]
public void Should_use_the_func_based_mapping_for_custom_dto_members()
{
_result.Value5.ShouldBe(47);
}
}
public class When_using_a_custom_resolver_for_a_child_model_property_instead_of_the_model : AutoMapperSpecBase
{
private ModelDto _result;
public class ModelObject
{
public ModelSubObject Sub { get; set; }
}
public class ModelSubObject
{
public int SomeValue { get; set; }
}
public class ModelDto
{
public int SomeValue { get; set; }
}
public class CustomResolver : IMemberValueResolver<object, object, ModelSubObject, int>
{
public int Resolve(object s, object d, ModelSubObject source, int ignored, ResolutionContext context)
{
return source.SomeValue + 1;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<ModelObject, ModelDto>()
.ForMember(dto => dto.SomeValue, opt => opt.MapFrom<CustomResolver, ModelSubObject>(m => m.Sub));
});
[Fact]
public void Should_use_the_specified_model_member_to_resolve_from()
{
var model = new ModelObject
{
Sub = new ModelSubObject
{
SomeValue = 46
}
};
_result = Mapper.Map<ModelObject, ModelDto>(model);
_result.SomeValue.ShouldBe(47);
}
}
public class When_reseting_a_mapping_to_use_a_resolver_to_a_different_member : AutoMapperSpecBase
{
private Dest _result;
public class Source
{
public int SomeValue { get; set; }
public int SomeOtherValue { get; set; }
}
public class Dest
{
public int SomeValue { get; set; }
}
public class CustomResolver : IMemberValueResolver<object, object, int, int>
{
public int Resolve(object s, object d, int source, int dest, ResolutionContext context)
{
return source + 5;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Dest>()
.ForMember(dto => dto.SomeValue,
opt => opt.MapFrom<CustomResolver, int>(m => m.SomeOtherValue));
});
protected override void Because_of()
{
var model = new Source
{
SomeValue = 36,
SomeOtherValue = 53
};
_result = Mapper.Map<Source, Dest>(model);
}
[Fact]
public void Should_override_the_existing_match_to_the_new_custom_resolved_member()
{
_result.SomeValue.ShouldBe(58);
}
}
public class When_reseting_a_mapping_from_a_property_to_a_method : AutoMapperSpecBase
{
private Dest _result;
public class Source
{
public int Type { get; set; }
}
public class Dest
{
public int Type { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Dest>()
.ForMember(dto => dto.Type, opt => opt.MapFrom(m => m.Type));
});
protected override void Because_of()
{
var model = new Source
{
Type = 5
};
_result = Mapper.Map<Source, Dest>(model);
}
[Fact]
public void Should_override_the_existing_match_to_the_new_custom_resolved_member()
{
_result.Type.ShouldBe(5);
}
}
public class When_specifying_a_custom_constructor_and_member_resolver : AutoMapperSpecBase
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public class CustomResolver : IMemberValueResolver<object, object, int, int>
{
private readonly int _toAdd;
public CustomResolver(int toAdd)
{
_toAdd = toAdd;
}
public CustomResolver()
{
_toAdd = 10;
}
public int Resolve(object s, object d, int source, int dest, ResolutionContext context)
{
return source + _toAdd;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ForMember(s => s.Value,
opt => opt.MapFrom(new CustomResolver(15), src => src.Value));
});
protected override void Because_of()
{
_source = new Source
{
Value = 10
};
_dest = Mapper.Map<Source, Destination>(_source);
}
[Fact]
public void Should_use_the_custom_constructor()
{
_dest.Value.ShouldBe(25);
}
}
public class When_specifying_a_member_resolver_and_custom_constructor : AutoMapperSpecBase
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public class CustomResolver : IMemberValueResolver<object, object, int, int>
{
private readonly int _toAdd;
public CustomResolver(int toAdd)
{
_toAdd = toAdd;
}
public CustomResolver()
{
_toAdd = 10;
}
public int Resolve(object s, object d, int source, int dest, ResolutionContext context)
{
return source + _toAdd;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ForMember(s => s.Value,
opt => opt.MapFrom(new CustomResolver(15), s => s.Value)
);
});
protected override void Because_of()
{
_source = new Source
{
Value = 10
};
_dest = Mapper.Map<Source, Destination>(_source);
}
[Fact]
public void Should_use_the_custom_constructor()
{
_dest.Value.ShouldBe(25);
}
}
public class When_specifying_a_custom_translator
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
public int AnotherValue { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public When_specifying_a_custom_translator()
{
_source = new Source
{
Value = 10,
AnotherValue = 1000
};
}
[Fact]
public void Should_use_the_custom_translator()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
[Fact]
public void Should_ignore_other_mapping_rules()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(src => src.AnotherValue))
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
}
public class When_specifying_a_custom_translator_using_projection
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
public int AnotherValue { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public When_specifying_a_custom_translator_using_projection()
{
_source = new Source
{
Value = 10,
AnotherValue = 1000
};
}
[Fact]
public void Should_use_the_custom_translator()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
[Fact]
public void Should_ignore_other_mapping_rules()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(src => src.AnotherValue))
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
}
public class When_specifying_a_custom_translator_and_passing_in_the_destination_object
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
public int AnotherValue { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public When_specifying_a_custom_translator_and_passing_in_the_destination_object()
{
_source = new Source
{
Value = 10,
AnotherValue = 1000
};
_dest = new Destination
{
Value = 2
};
}
[Fact]
public void Should_resolve_to_the_destination_object_from_the_custom_translator()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map(_source, _dest);
_dest.Value.ShouldBe(20);
}
[Fact]
public void Should_ignore_other_mapping_rules()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(src => src.AnotherValue))
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map(_source, _dest);
_dest.Value.ShouldBe(20);
}
}
public class When_specifying_a_custom_translator_using_generics
{
private Source _source;
private Destination _dest;
public class Source
{
public int Value { get; set; }
public int AnotherValue { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public When_specifying_a_custom_translator_using_generics()
{
_source = new Source
{
Value = 10,
AnotherValue = 1000
};
}
public class Converter : ITypeConverter<Source, Destination>
{
public Destination Convert(Source source, Destination destination, ResolutionContext context)
{
return new Destination { Value = source.Value + 10 };
}
}
[Fact]
public void Should_use_the_custom_translator()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ConvertUsing<Converter>());
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
[Fact]
public void Should_ignore_other_mapping_rules()
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(src => src.AnotherValue))
.ConvertUsing(s => new Destination { Value = s.Value + 10 }));
_dest = config.CreateMapper().Map<Source, Destination>(_source);
_dest.Value.ShouldBe(20);
}
}
public class When_specifying_a_custom_constructor_function_for_custom_converters : AutoMapperSpecBase
{
private Destination _result;
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public class CustomConverter : ITypeConverter<Source, Destination>
{
private readonly int _value;
public CustomConverter()
: this(5)
{
}
public CustomConverter(int value)
{
_value = value;
}
public Destination Convert(Source source, Destination destination, ResolutionContext context)
{
return new Destination { Value = source.Value + _value };
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.ConstructServicesUsing(t => new CustomConverter(10));
cfg.CreateMap<Source, Destination>()
.ConvertUsing<CustomConverter>();
});
protected override void Because_of()
{
_result = Mapper.Map<Source, Destination>(new Source { Value = 5 });
}
[Fact]
public void Should_use_the_custom_constructor_function()
{
_result.Value.ShouldBe(15);
}
}
public class When_specifying_a_custom_translator_with_mismatched_properties : AutoMapperSpecBase
{
public class Source
{
public int Value1 { get; set; }
public int AnotherValue { get; set; }
}
public class Destination
{
public int Value2 { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ConvertUsing(s => new Destination {Value2 = s.Value1 + 10});
});
[Fact]
public void Should_pass_all_configuration_checks()
{
Exception thrown = null;
try
{
Configuration.AssertConfigurationIsValid();
}
catch (Exception ex)
{
thrown = ex;
}
thrown.ShouldBeNull();
}
}
public class When_configuring_a_global_constructor_function_for_resolvers : AutoMapperSpecBase
{
private Destination _result;
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public class CustomValueResolver : IMemberValueResolver<object, object, int, int>
{
private readonly int _toAdd;
public CustomValueResolver() { _toAdd = 11; }
public CustomValueResolver(int toAdd)
{
_toAdd = toAdd;
}
public int Resolve(object s, object d, int source, int dest, ResolutionContext context)
{
return source + _toAdd;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.ConstructServicesUsing(type => new CustomValueResolver(5));
cfg.CreateMap<Source, Destination>()
.ForMember(d => d.Value, opt => opt.MapFrom<CustomValueResolver, int>(src => src.Value));
});
protected override void Because_of()
{
_result = Mapper.Map<Source, Destination>(new Source { Value = 5 });
}
[Fact]
public void Should_use_the_specified_constructor()
{
_result.Value.ShouldBe(10);
}
}
public class When_custom_resolver_requests_property_to_be_ignored : AutoMapperSpecBase
{
private Destination _result = new Destination() { Value = 55 };
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public int Value { get; set; }
}
public class CustomValueResolver : IMemberValueResolver<object, object, int, int>
{
public int Resolve(object s, object d, int source, int dest, ResolutionContext context)
{
return dest;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ForMember(d => d.Value, opt => opt.MapFrom<CustomValueResolver, int>(src => src.Value));
});
protected override void Because_of()
{
_result = Mapper.Map(new Source { Value = 5 }, _result);
}
[Fact]
public void Should_not_overwrite_destination_value()
{
_result.Value.ShouldBe(55);
}
}
public class When_using_inheritance_with_value_resoluvers : AutoMapperSpecBase
{
public class SourceDto
{
public int Id { get; set; }
public string NumberValue { get; set; }
}
public class SourceChildDto : SourceDto
{
public string ChildField { get; set; }
}
public class DestinationDto
{
public int Ident { get; set; }
public int Number { get; set; }
}
public class DestinationChildDto : DestinationDto
{
public string ChildField { get; set; }
}
public class CustomResolver : IMemberValueResolver<SourceDto, object, string, int>
{
public int Resolve(SourceDto src, object dest, string source, int member, ResolutionContext context)
{
return int.Parse(source);
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg => {
cfg.CreateMap<SourceDto, DestinationDto>()
.ForMember(dest => dest.Ident, opt => opt.MapFrom(x => x.Id))
.ForMember(dest => dest.Number, opt => opt.MapFrom<CustomResolver, string>(src => src.NumberValue))
;
cfg.CreateMap<SourceChildDto, DestinationChildDto>()
.IncludeBase<SourceDto, DestinationDto>()
;
});
[Fact]
public void Should_inherit_value_resolver()
{
var sourceChild = new SourceChildDto
{
Id = 1,
NumberValue = "13",
ChildField = "alpha"
};
// destination = { Ident: 1, Number: 0 /* should be 13 */, ChildField: "alpha" }
var destination = Mapper.Map<DestinationChildDto>(sourceChild);
destination.Number.ShouldBe(13);
}
}
public class When_specifying_member_and_member_resolver_using_string_property_names : AutoMapperSpecBase
{
private Destination _result;
public class Source
{
public int SourceValue { get; set; }
}
public class Destination
{
public int DestinationValue { get; set; }
}
public class CustomValueResolver : IMemberValueResolver<object, object, int, object>
{
public CustomValueResolver()
{
}
public object Resolve(object s, object d, int source, object dest, ResolutionContext context)
{
return source + 5;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.ConstructServicesUsing(type => new CustomValueResolver());
cfg.CreateMap<Source, Destination>()
.ForMember("DestinationValue",
opt => opt.MapFrom<CustomValueResolver, int>("SourceValue"));
});
protected override void Because_of()
{
_result = Mapper.Map<Source, Destination>(new Source { SourceValue = 5 });
}
[Fact]
public void Should_translate_the_property()
{
_result.DestinationValue.ShouldBe(10);
}
}
public class When_specifying_a_custom_member_mapping_to_a_nested_object
{
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public SubDest Dest { get; set; }
}
public class SubDest
{
public int Value { get; set; }
}
[Fact]
public void Should_fail_with_an_exception_during_configuration()
{
typeof(ArgumentException).ShouldBeThrownBy(() =>
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Destination>()
.ForMember(dest => dest.Dest.Value, opt => opt.MapFrom(src => src.Value)));
});
}
}
public class When_specifying_a_custom_member_mapping_with_a_cast : NonValidatingSpecBase
{
private Source _source;
private Destination _dest;
public class Source
{
public string MyName { get; set; }
}
public class Destination : ISomeInterface
{
public string Name { get; set; }
}
public interface ISomeInterface
{
string Name { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ForMember(dest => ((ISomeInterface) dest).Name, opt => opt.MapFrom(src => src.MyName));
});
protected override void Because_of()
{
_source = new Source {MyName = "jon"};
_dest = Mapper.Map<Source, Destination>(_source);
}
[Fact]
public void Should_perform_the_translation()
{
_dest.Name.ShouldBe("jon");
}
}
public class When_destination_property_does_not_have_a_setter : AutoMapperSpecBase
{
private Source _source;
private Destination _dest;
public class Source
{
public string Name { get; set; }
public string Value { get; set; }
public string Foo { get; set; }
}
public class Destination
{
private DateTime _today;
public string Name { get; private set; }
public string Foo { get; protected set; }
public DateTime Today { get { return _today; } }
public string Value { get; set; }
public Destination()
{
_today = DateTime.Today;
Name = "name";
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>();
});
protected override void Because_of()
{
_source = new Source {Name = "jon", Value = "value", Foo = "bar"};
_dest = new Destination();
_dest = Mapper.Map<Source, Destination>(_source);
}
[Fact]
public void Should_copy_to_properties_that_have_setters()
{
_dest.Value.ShouldBe("value");
}
[Fact]
public void Should_not_attempt_to_translate_to_properties_that_do_not_have_a_setter()
{
_dest.Today.ShouldBe(DateTime.Today);
}
[Fact]
public void Should_translate_to_properties_that_have_a_private_setters()
{
_dest.Name.ShouldBe("jon");
}
[Fact]
public void Should_translate_to_properties_that_have_a_protected_setters()
{
_dest.Foo.ShouldBe("bar");
}
}
public class When_destination_property_does_not_have_a_getter : AutoMapperSpecBase
{
private Source _source;
private Destination _dest;
private SourceWithList _sourceWithList;
private DestinationWithList _destWithList;
public class Source
{
public string Value { get; set; }
}
public class Destination
{
private string _value;
public string Value
{
set { _value = value; }
}
public string GetValue()
{
return _value;
}
}
public class SourceWithList
{
public IList SomeList { get; set; }
}
public class DestinationWithList
{
private IList _someList;
public IList SomeList
{
set { _someList = value; }
}
public IList GetSomeList()
{
return _someList;
}
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>();
cfg.CreateMap<SourceWithList, DestinationWithList>();
});
protected override void Because_of()
{
_source = new Source { Value = "jon" };
_dest = new Destination();
_sourceWithList = new SourceWithList { SomeList = new[] { 1, 2 } };
_destWithList = new DestinationWithList();
_dest = Mapper.Map<Source, Destination>(_source);
_destWithList = Mapper.Map<SourceWithList, DestinationWithList>(_sourceWithList);
}
[Fact]
public void Should_translate_to_properties_that_doesnt_have_a_getter()
{
_dest.GetValue().ShouldBe("jon");
}
[Fact]
public void Should_translate_to_enumerable_properties_that_doesnt_have_a_getter()
{
new[] { 1, 2 }.ShouldBe(_destWithList.GetSomeList());
}
}
public class When_destination_type_requires_a_constructor : AutoMapperSpecBase
{
private Destination _destination;
public class Source
{
public int Value { get; set; }
}
public class Destination
{
public Destination(int otherValue)
{
OtherValue = otherValue;
}
public int Value { get; set; }
public int OtherValue { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Destination>()
.ConstructUsing(src => new Destination(src.Value + 4))
.ForMember(dest => dest.OtherValue, opt => opt.Ignore());
});
protected override void Because_of()
{
_destination = Mapper.Map<Source, Destination>(new Source { Value = 5 });
}
[Fact]
public void Should_use_supplied_constructor_to_map()
{
_destination.OtherValue.ShouldBe(9);
}
[Fact]
public void Should_map_other_members()
{
_destination.Value.ShouldBe(5);
}
}
public class When_mapping_from_a_constant_value : AutoMapperSpecBase
{
private Dest _dest;
public class Source
{
}
public class Dest
{
public int Value { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.CreateMap<Source, Dest>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(src => 5));
});
protected override void Because_of()
{
_dest = Mapper.Map<Source, Dest>(new Source());
}
[Fact]
public void Should_map_from_that_constant_value()
{
_dest.Value.ShouldBe(5);
}
}
public class When_building_custom_configuration_mapping_to_itself
{
private Exception _e;
public class Source
{
}
public class Dest
{
public int Value { get; set; }
}
[Fact]
public void Should_map_from_that_constant_value()
{
try
{
var config = new MapperConfiguration(cfg => cfg.CreateMap<Source, Dest>()
.ForMember(dest => dest, opt => opt.MapFrom(src => 5)));
}
catch (Exception e)
{
_e = e;
}
_e.ShouldNotBeNull();
}
}
public class When_mapping_from_one_type_to_another : AutoMapperSpecBase
{
private Dest _dest;
public class Source
{
public string Value { get; set; }
}
public class Dest
{
// AutoMapper tries to map source to this constructor's parameter,
// but does not take its member configuration into account
public Dest(int value)
{
Value = value;
}
public Dest()
{
}
public int Value { get; set; }
}
protected override MapperConfiguration Configuration { get; } = new MapperConfiguration(cfg =>
{
cfg.DisableConstructorMapping();
cfg.CreateMap<Source, Dest>()
.ForMember(dest => dest.Value, opt => opt.MapFrom(s => ParseValue(s.Value)));
});
protected override void Because_of()
{
var source = new Source { Value = "a1" };
_dest = Mapper.Map<Source, Dest>(source);
}
[Fact]
public void Should_use_member_configuration()
{
_dest.Value.ShouldBe(1);
}
private static int ParseValue(string value)
{
return int.Parse(value.Substring(1));
}
}
}
| |
using System;
using System.IO;
using System.Threading;
using Platform.IO;
using Platform.Text;
using Platform.Models;
using Platform.VirtualFileSystem.Providers;
namespace Platform.VirtualFileSystem
{
public class OldStandardFileTransferService
: AbstractRunnableService, IFileTransferService
{
private IFile m_Source;
private IFile m_Destination;
private FileTransferServiceType m_ServiceType;
public virtual INode OperatingNode
{
get
{
return m_Source;
}
}
public virtual INode TargetNode
{
get
{
return m_Destination;
}
}
public OldStandardFileTransferService(IFile source, IFile destination)
: this(source, new FileTransferServiceType(destination, true))
{
}
public OldStandardFileTransferService(IFile source, FileTransferServiceType serviceType)
{
m_ServiceType = serviceType;
m_Source = source;
m_Destination = m_ServiceType.Destination;
m_Progress = new TransferProgress(this);
}
public override Platform.Models.IMeter Progress
{
get
{
return m_Progress;
}
}
private TransferProgress m_Progress;
#region Types
private class TransferProgress
: AbstractMeter
{
private OldStandardFileTransferService m_Service;
public TransferProgress(OldStandardFileTransferService service)
{
m_Service = service;
}
public override object Owner
{
get
{
return m_Service;
}
}
public override object MaximumValue
{
get
{
return m_Service.GetBytesToTransfer();
}
}
public override object MinimumValue
{
get
{
return 0;
}
}
public override object CurrentValue
{
get
{
return m_Service.GetBytesTransferred();
}
}
public override string Units
{
get
{
return "bytes";
}
}
public override string ToString()
{
switch (m_Service.m_TransferState)
{
case TransferState.Finished:
return String.Format("Finished {0}/{1} bytes ({2:0}%)", CurrentValue, MaximumValue, Convert.ToDouble(CurrentValue) / Convert.ToDouble(MaximumValue) * 100.0);
case TransferState.Transferring:
return String.Format("Transferring {0}/{1} bytes ({2:0.##}%)", CurrentValue, MaximumValue, Convert.ToDouble(CurrentValue) / Convert.ToDouble(MaximumValue) * 100.0);
default:
return Enum.GetName(typeof(TransferState), m_Service.m_TransferState);
}
}
public virtual void RaiseValueChanged(object oldValue, object newValue)
{
OnValueChanged(oldValue, newValue);
}
public virtual void RaiseMajorChange()
{
OnMajorChange();
}
private bool m_PumpRegistered = false;
public virtual void RaiseStateChanged()
{
if (m_Service.m_TransferState == TransferState.Transferring)
{
lock (this)
{
if (!m_PumpRegistered)
{
m_Service.m_Pump.Progress.ValueChanged += PumpProgress_ValueChanged;
m_Service.m_Pump.Progress.MajorChange += new EventHandler(PumpProgress_MajorChange);
m_PumpRegistered = true;
}
}
}
}
private void PumpProgress_ValueChanged(object sender, MeterEventArgs eventArgs)
{
OnValueChanged((long)eventArgs.OldValue + m_Service.m_Offset, (long)eventArgs.NewValue + m_Service.m_Offset);
}
private void PumpProgress_MajorChange(object sender, EventArgs e)
{
OnMajorChange();
}
}
#endregion
protected static TaskState ToTaskState(TransferState transferState)
{
switch (transferState)
{
case TransferState.NotStarted:
return Platform.Models.TaskState.NotStarted;
case TransferState.Preparing:
case TransferState.Comparing:
case TransferState.Transferring:
case TransferState.Copying:
case TransferState.Tidying:
return Platform.Models.TaskState.Running;
case TransferState.Finished:
return Platform.Models.TaskState.Finished;
case TransferState.Stopped:
return Platform.Models.TaskState.Stopped;
default:
return Platform.Models.TaskState.Unknown;
}
}
protected enum TransferState
{
NotStarted,
Preparing,
Comparing,
Transferring,
Copying,
Tidying,
Finished,
Stopped
}
private TransferState m_TransferState = TransferState.NotStarted;
private void SetTransferState(TransferState value)
{
lock (this)
{
object oldValue;
oldValue = m_TransferState;
m_TransferState = value;
SetTaskState(ToTaskState(m_TransferState));
Monitor.PulseAll(this);
m_Progress.RaiseStateChanged();
}
}
private StreamPump m_Pump;
private Thread m_TaskThread;
private long m_BytesTransferred = 0L;
private long GetBytesTransferred()
{
if (m_Pump != null)
{
return Convert.ToInt64(m_Pump.Progress.CurrentValue) + m_Offset;
}
else
{
return m_BytesTransferred + m_Offset;
}
}
private long GetBytesToTransfer()
{
if (m_Pump != null)
{
return Convert.ToInt64(m_Pump.Progress.MaximumValue) + m_Offset;
}
else
{
return m_Source.Length;
}
}
protected override Thread GetTaskThread()
{
return m_TaskThread;
}
private long m_Offset = 0;
public override void Run()
{
long x;
string id;
IFile destTemp;
ITempIdentityFileService destTempService;
IFile dest;
Stream srcStream = null, destStream = null;
IFileHashingService hasher;
string sourceHash, destTempHash;
try
{
lock (this.SyncLock)
{
m_TaskThread = Thread.CurrentThread;
if (m_TransferState != TransferState.NotStarted)
{
throw new InvalidOperationException();
}
SetTransferState(TransferState.Preparing);
ProcessTaskStateRequest();
}
id = m_Source.Address.Uri + m_Source.Length.ToString()
+ (m_Source.Attributes.CreationTime ?? DateTime.MinValue).ToBinary().ToString();
destTempService = (ITempIdentityFileService)m_Destination.GetService(new TempIdentityFileServiceType(id));
destTemp = destTempService.GetTempFile();
for (;;)
{
try
{
x = destTemp.Length;
}
catch (FileNotFoundException)
{
x = 0;
}
dest = m_Destination.ParentDirectory.ResolveFile("$TMP_" + m_Destination.Address.Name + "_" + Guid.NewGuid().ToString("N"));
try
{
if (x == m_Source.Length)
{
try
{
if (m_Source.IdenticalTo(destTemp, FileComparingFlags.CompareContents))
{
SetTransferState(TransferState.Copying);
ProcessTaskStateRequest();
m_Progress.RaiseValueChanged(m_Progress.CurrentValue, 0);
destTemp.MoveTo(dest, true);
if (!m_Source.IdenticalTo(dest, FileComparingFlags.CompareContents))
{
continue;
}
dest.RenameTo(m_Destination.Address.NameAndQuery, true);
m_BytesTransferred = m_Destination.Length;
m_Progress.RaiseValueChanged(m_Progress.CurrentValue, m_BytesTransferred);
SetTransferState(TransferState.Finished);
ProcessTaskStateRequest();
return;
}
}
catch (IOException)
{
}
}
srcStream = m_Source.GetContent().GetInputStream(FileShare.Read);
if (!srcStream.CanSeek)
{
destStream = destTemp.GetContent().GetOutputStream(FileMode.Create, FileShare.Read);
}
else
{
destStream = destTemp.GetContent().GetOutputStream(FileMode.Append, FileShare.Read);
SetTransferState(TransferState.Comparing);
ProcessTaskStateRequest();
hasher = (IFileHashingService)m_Source.GetService(new FileHashingServiceType("md5"));
sourceHash = hasher.ComputeHash(0, destStream.Length).TextValue;
hasher = (IFileHashingService)destTemp.GetService(new FileHashingServiceType("md5"));
destTempHash = hasher.ComputeHash().TextValue;
if (sourceHash != destTempHash)
{
destStream.Close();
destStream = destTemp.GetContent().GetOutputStream(FileMode.Create, FileShare.Read);
}
else
{
m_Offset = destStream.Length;
if (m_Offset > 0)
{
srcStream = new PartialStream(srcStream, m_Offset);
}
}
}
m_Progress.RaiseValueChanged(0, m_Offset);
ProcessTaskStateRequest();
m_Pump = new StreamPump(srcStream, destStream, true, false, m_ServiceType.BufferSize);
m_Pump.TaskStateChanged += new TaskEventHandler(Pump_TaskStateChanged);
SetTransferState(TransferState.Transferring);
ProcessTaskStateRequest();
m_Pump.Run();
if (m_Pump.TaskState == TaskState.Stopped)
{
throw new StopRequestedException();
}
SetTransferState(TransferState.Copying);
ProcessTaskStateRequest();
}
finally
{
if (srcStream != null)
{
Routines.IgnoreExceptions(delegate
{
srcStream.Close();
});
}
if (destStream != null)
{
Routines.IgnoreExceptions(delegate
{
destStream.Close();
});
}
}
break;
}
SetTransferState(TransferState.Tidying);
destTemp.MoveTo(dest, true);
///
/// Aquire an UpdateContext for the attributes
/// so that all updates to the attributes are
/// commited in a single operation
///
using (dest.Attributes.AquireUpdateContext())
{
foreach (string s in this.m_ServiceType.AttributesToTransfer)
{
dest.Attributes[s] = m_Source.Attributes[s];
}
}
dest.RenameTo(m_Destination.Address.Name, true);
SetTransferState(TransferState.Finished);
}
finally
{
if (m_TransferState != TransferState.Stopped)
{
SetTransferState(TransferState.Finished);
}
}
}
private void Pump_TaskStateChanged(object sender, TaskEventArgs eventArgs)
{
lock (this)
{
if (eventArgs.TaskState == TaskState.Running
|| eventArgs.TaskState == TaskState.Paused)
{
SetTaskState(eventArgs.TaskState);
}
}
}
public override void Stop()
{
lock (this)
{
if (m_Pump != null && m_Pump.TaskState == TaskState.Running)
{
m_Pump.Stop();
}
else
{
base.Stop();
}
}
}
public override void Pause()
{
lock (this)
{
if (m_Pump != null && m_Pump.TaskState == TaskState.Running)
{
m_Pump.Pause();
}
else
{
base.Pause();
}
}
}
public override void Resume()
{
lock (this)
{
if (m_Pump != null && m_Pump.TaskState == TaskState.Running)
{
m_Pump.Resume();
}
else
{
base.Resume();
}
}
}
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace WebsitePanel.Portal.ProviderControls {
public partial class HyperV_Settings {
/// <summary>
/// ValidationSummary control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.ValidationSummary ValidationSummary;
/// <summary>
/// locHyperVServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locHyperVServer;
/// <summary>
/// radioServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RadioButtonList radioServer;
/// <summary>
/// ServerNameRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow ServerNameRow;
/// <summary>
/// locServerName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locServerName;
/// <summary>
/// txtServerName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtServerName;
/// <summary>
/// btnConnect control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Button btnConnect;
/// <summary>
/// ServerNameValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator ServerNameValidator;
/// <summary>
/// ServerErrorRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow ServerErrorRow;
/// <summary>
/// locErrorReadingNetworksList control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Label locErrorReadingNetworksList;
/// <summary>
/// locGeneralSettings control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locGeneralSettings;
/// <summary>
/// locVpsRootFolder control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locVpsRootFolder;
/// <summary>
/// txtVpsRootFolder control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtVpsRootFolder;
/// <summary>
/// RootFolderValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator RootFolderValidator;
/// <summary>
/// locFolderVariables control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locFolderVariables;
/// <summary>
/// locOSTemplatesPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locOSTemplatesPath;
/// <summary>
/// txtOSTemplatesPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtOSTemplatesPath;
/// <summary>
/// TemplatesPathValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator TemplatesPathValidator;
/// <summary>
/// locExportedVpsPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locExportedVpsPath;
/// <summary>
/// txtExportedVpsPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtExportedVpsPath;
/// <summary>
/// ExportedVpsPathValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator ExportedVpsPathValidator;
/// <summary>
/// locProcessorSettings control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locProcessorSettings;
/// <summary>
/// locCpuReserve control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locCpuReserve;
/// <summary>
/// txtCpuReserve control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtCpuReserve;
/// <summary>
/// CpuReserveValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator CpuReserveValidator;
/// <summary>
/// locCpuLimit control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locCpuLimit;
/// <summary>
/// txtCpuLimit control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtCpuLimit;
/// <summary>
/// CpuLimitValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator CpuLimitValidator;
/// <summary>
/// locCpuWeight control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locCpuWeight;
/// <summary>
/// txtCpuWeight control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtCpuWeight;
/// <summary>
/// CpuWeightValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator CpuWeightValidator;
/// <summary>
/// locMediaLibrary control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locMediaLibrary;
/// <summary>
/// locDvdIsoPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locDvdIsoPath;
/// <summary>
/// txtDvdLibraryPath control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtDvdLibraryPath;
/// <summary>
/// DvdLibraryPathValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator DvdLibraryPathValidator;
/// <summary>
/// locVhd control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locVhd;
/// <summary>
/// locDiskType control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locDiskType;
/// <summary>
/// radioVirtualDiskType control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RadioButtonList radioVirtualDiskType;
/// <summary>
/// locExternalNetwork control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locExternalNetwork;
/// <summary>
/// locExternalNetworkName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locExternalNetworkName;
/// <summary>
/// ddlExternalNetworks control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlExternalNetworks;
/// <summary>
/// locPreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPreferredNameServer;
/// <summary>
/// externalPreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl externalPreferredNameServer;
/// <summary>
/// locAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locAlternateNameServer;
/// <summary>
/// externalAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl externalAlternateNameServer;
/// <summary>
/// chkAssignIPAutomatically control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.CheckBox chkAssignIPAutomatically;
/// <summary>
/// ManageUpdatePanel control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.UpdatePanel ManageUpdatePanel;
/// <summary>
/// locManagementNetwork control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locManagementNetwork;
/// <summary>
/// locManagementNetworkName control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locManagementNetworkName;
/// <summary>
/// ddlManagementNetworks control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlManagementNetworks;
/// <summary>
/// ManageNicConfigRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow ManageNicConfigRow;
/// <summary>
/// locManageNicConfig control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locManageNicConfig;
/// <summary>
/// ddlManageNicConfig control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlManageNicConfig;
/// <summary>
/// ManagePreferredNameServerRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow ManagePreferredNameServerRow;
/// <summary>
/// locManagePreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locManagePreferredNameServer;
/// <summary>
/// managePreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl managePreferredNameServer;
/// <summary>
/// ManageAlternateNameServerRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow ManageAlternateNameServerRow;
/// <summary>
/// locManageAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locManageAlternateNameServer;
/// <summary>
/// manageAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl manageAlternateNameServer;
/// <summary>
/// PrivUpdatePanel control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.UpdatePanel PrivUpdatePanel;
/// <summary>
/// locPrivateNetwork control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPrivateNetwork;
/// <summary>
/// locIPFormat control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locIPFormat;
/// <summary>
/// ddlPrivateNetworkFormat control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlPrivateNetworkFormat;
/// <summary>
/// PrivCustomFormatRow control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlTableRow PrivCustomFormatRow;
/// <summary>
/// locPrivCustomFormat control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPrivCustomFormat;
/// <summary>
/// privateIPAddress control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl privateIPAddress;
/// <summary>
/// privateSubnetMask control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox privateSubnetMask;
/// <summary>
/// privateSubnetMaskValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator privateSubnetMaskValidator;
/// <summary>
/// locPrivDefaultGateway control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPrivDefaultGateway;
/// <summary>
/// privateDefaultGateway control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl privateDefaultGateway;
/// <summary>
/// locPrivPreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPrivPreferredNameServer;
/// <summary>
/// privatePreferredNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl privatePreferredNameServer;
/// <summary>
/// locPrivAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPrivAlternateNameServer;
/// <summary>
/// privateAlternateNameServer control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::WebsitePanel.Portal.UserControls.EditIPAddressControl privateAlternateNameServer;
/// <summary>
/// locHostname control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locHostname;
/// <summary>
/// locHostnamePattern control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locHostnamePattern;
/// <summary>
/// txtHostnamePattern control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtHostnamePattern;
/// <summary>
/// HostnamePatternValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator HostnamePatternValidator;
/// <summary>
/// locPatternText control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locPatternText;
/// <summary>
/// locStartAction control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStartAction;
/// <summary>
/// locStartOptionsText control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStartOptionsText;
/// <summary>
/// radioStartAction control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RadioButtonList radioStartAction;
/// <summary>
/// locStartupDelayText control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStartupDelayText;
/// <summary>
/// locStartupDelay control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStartupDelay;
/// <summary>
/// txtStartupDelay control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtStartupDelay;
/// <summary>
/// locSeconds control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locSeconds;
/// <summary>
/// StartupDelayValidator control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RequiredFieldValidator StartupDelayValidator;
/// <summary>
/// locStopAction control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStopAction;
/// <summary>
/// locStopActionText control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Localize locStopActionText;
/// <summary>
/// radioStopAction control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.RadioButtonList radioStopAction;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Tests.Services
{
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Apache.Ignite.Core.Cluster;
using Apache.Ignite.Core.Services;
/// <summary>
/// Services async wrapper to simplify testing.
/// </summary>
public class ServicesAsyncWrapper : IServices
{
/** Wrapped async services. */
private readonly IServices _services;
/// <summary>
/// Initializes a new instance of the <see cref="ServicesAsyncWrapper"/> class.
/// </summary>
/// <param name="services">Services to wrap.</param>
public ServicesAsyncWrapper(IServices services)
{
_services = services;
}
/** <inheritDoc /> */
public IClusterGroup ClusterGroup
{
get { return _services.ClusterGroup; }
}
/** <inheritDoc /> */
public void DeployClusterSingleton(string name, IService service)
{
_services.DeployClusterSingletonAsync(name, service).Wait();
}
/** <inheritDoc /> */
public Task DeployClusterSingletonAsync(string name, IService service)
{
return _services.DeployClusterSingletonAsync(name, service);
}
/** <inheritDoc /> */
public void DeployNodeSingleton(string name, IService service)
{
_services.DeployNodeSingletonAsync(name, service).Wait();
}
/** <inheritDoc /> */
public Task DeployNodeSingletonAsync(string name, IService service)
{
return _services.DeployNodeSingletonAsync(name, service);
}
/** <inheritDoc /> */
public void DeployKeyAffinitySingleton<TK>(string name, IService service, string cacheName, TK affinityKey)
{
_services.DeployKeyAffinitySingletonAsync(name, service, cacheName, affinityKey).Wait();
}
/** <inheritDoc /> */
public Task DeployKeyAffinitySingletonAsync<TK>(string name, IService service, string cacheName, TK affinityKey)
{
return _services.DeployKeyAffinitySingletonAsync(name, service, cacheName, affinityKey);
}
/** <inheritDoc /> */
public void DeployMultiple(string name, IService service, int totalCount, int maxPerNodeCount)
{
try
{
_services.DeployMultipleAsync(name, service, totalCount, maxPerNodeCount).Wait();
}
catch (AggregateException ex)
{
throw ex.InnerException ?? ex;
}
}
/** <inheritDoc /> */
public Task DeployMultipleAsync(string name, IService service, int totalCount, int maxPerNodeCount)
{
return _services.DeployMultipleAsync(name, service, totalCount, maxPerNodeCount);
}
/** <inheritDoc /> */
public void Deploy(ServiceConfiguration configuration)
{
try
{
_services.DeployAsync(configuration).Wait();
}
catch (AggregateException ex)
{
throw ex.InnerException ?? ex;
}
}
/** <inheritDoc /> */
public Task DeployAsync(ServiceConfiguration configuration)
{
return _services.DeployAsync(configuration);
}
/** <inheritDoc /> */
public void DeployAll(IEnumerable<ServiceConfiguration> configurations)
{
try
{
_services.DeployAllAsync(configurations).Wait();
}
catch (AggregateException ex)
{
throw ex.InnerException ?? ex;
}
}
/** <inheritDoc /> */
public Task DeployAllAsync(IEnumerable<ServiceConfiguration> configurations)
{
return _services.DeployAllAsync(configurations);
}
/** <inheritDoc /> */
public void Cancel(string name)
{
_services.CancelAsync(name).Wait();
}
/** <inheritDoc /> */
public Task CancelAsync(string name)
{
return _services.CancelAsync(name);
}
/** <inheritDoc /> */
public void CancelAll()
{
_services.CancelAllAsync().Wait();
}
/** <inheritDoc /> */
public Task CancelAllAsync()
{
return _services.CancelAllAsync();
}
/** <inheritDoc /> */
public ICollection<IServiceDescriptor> GetServiceDescriptors()
{
return _services.GetServiceDescriptors();
}
/** <inheritDoc /> */
public T GetService<T>(string name)
{
return _services.GetService<T>(name);
}
/** <inheritDoc /> */
public ICollection<T> GetServices<T>(string name)
{
return _services.GetServices<T>(name);
}
/** <inheritDoc /> */
public T GetServiceProxy<T>(string name) where T : class
{
return _services.GetServiceProxy<T>(name);
}
/** <inheritDoc /> */
public T GetServiceProxy<T>(string name, bool sticky) where T : class
{
return _services.GetServiceProxy<T>(name, sticky);
}
/** <inheritDoc /> */
public dynamic GetDynamicServiceProxy(string name)
{
return _services.GetDynamicServiceProxy(name);
}
/** <inheritDoc /> */
public dynamic GetDynamicServiceProxy(string name, bool sticky)
{
return _services.GetDynamicServiceProxy(name, sticky);
}
/** <inheritDoc /> */
public IServices WithKeepBinary()
{
return new ServicesAsyncWrapper(_services.WithKeepBinary());
}
/** <inheritDoc /> */
public IServices WithServerKeepBinary()
{
return new ServicesAsyncWrapper(_services.WithServerKeepBinary());
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="SqlChars.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <owner current="true" primary="true">junfang</owner>
// <owner current="true" primary="false">[....]</owner>
// <owner current="true" primary="false">[....]</owner>
//------------------------------------------------------------------------------
//**************************************************************************
// @File: SqlChars.cs
// @Owner: junfang
//
// Created by: JunFang
//
// Description: Class SqlChars is used to represent a char/varchar/nchar/nvarchar
// data from SQL Server. It contains a char array buffer, which can
// be refilled. For example, in data access, user could use one instance
// of SqlChars to bind to a binary column, and we will just keep copying
// the data into the same instance, and avoid allocation per row.
//
// Notes:
//
// History:
//
// @Version: Yukon
// 120214 JXF 09/23/02 SqlBytes/SqlChars class indexer
// 112296 AZA 07/06/02 Seal SqlAccess classes.
// 107151 AZA 04/18/02 Track byte array buffer as well as SqlBytes in
// sqlaccess.
// 107216 JXF 04/17/02 Bug 514927
// 106854 JXF 04/15/02 Fix http suites due to SqlChars
// 106448 JXF 04/12/02 Bugs on sqlchars
// 105715 JXF 04/05/02 Handle NULL properly in SqlBytes.SetLength
// 91128 JXF 10/17/01 Make SqlBytes not unsafe
//
// 04/20/01 JunFang Created.
//
// @EndHeader@
//**************************************************************************
namespace System.Data.SqlTypes {
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Diagnostics;
using System.Data.Common;
using System.Data.Sql;
using System.Data.SqlClient;
using System.Data.SqlTypes;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
using System.Runtime.Serialization;
using System.Security.Permissions;
[Serializable,XmlSchemaProvider("GetXsdType")]
public sealed class SqlChars : System.Data.SqlTypes.INullable, IXmlSerializable, ISerializable {
// --------------------------------------------------------------
// Data members
// --------------------------------------------------------------
// SqlChars has five possible states
// 1) SqlChars is Null
// - m_stream must be null, m_lCuLen must be x_lNull
// 2) SqlChars contains a valid buffer,
// - m_rgchBuf must not be null, and m_stream must be null
// 3) SqlChars contains a valid pointer
// - m_rgchBuf could be null or not,
// if not null, content is garbage, should never look into it.
// - m_stream must be null.
// 4) SqlChars contains a SqlStreamChars
// - m_stream must not be null
// - m_rgchBuf could be null or not. if not null, content is garbage, should never look into it.
// - m_lCurLen must be x_lNull.
// 5) SqlChars contains a Lazy Materialized Blob (ie, StorageState.Delayed)
//
internal char[] m_rgchBuf; // Data buffer
private long m_lCurLen; // Current data length
internal SqlStreamChars m_stream;
private SqlBytesCharsState m_state;
private char[] m_rgchWorkBuf; // A 1-char work buffer.
// The max data length that we support at this time.
private const long x_lMaxLen = (long)System.Int32.MaxValue;
private const long x_lNull = -1L;
// --------------------------------------------------------------
// Constructor(s)
// --------------------------------------------------------------
// Public default constructor used for XML serialization
public SqlChars() {
SetNull();
}
// Create a SqlChars with an in-memory buffer
public SqlChars(char[] buffer) {
m_rgchBuf = buffer;
m_stream = null;
if (m_rgchBuf == null) {
m_state = SqlBytesCharsState.Null;
m_lCurLen = x_lNull;
}
else {
m_state = SqlBytesCharsState.Buffer;
m_lCurLen = (long)m_rgchBuf.Length;
}
m_rgchWorkBuf = null;
AssertValid();
}
// Create a SqlChars from a SqlString
public SqlChars(SqlString value) : this (value.IsNull ? (char[])null : value.Value.ToCharArray()) {
}
// Create a SqlChars from a SqlStreamChars
internal SqlChars(SqlStreamChars s) {
m_rgchBuf = null;
m_lCurLen = x_lNull;
m_stream = s;
m_state = (s == null) ? SqlBytesCharsState.Null : SqlBytesCharsState.Stream;
m_rgchWorkBuf = null;
AssertValid();
}
// Constructor required for serialization. Deserializes as a Buffer. If the bits have been tampered with
// then this will throw a SerializationException or a InvalidCastException.
private SqlChars(SerializationInfo info, StreamingContext context)
{
m_stream = null;
m_rgchWorkBuf = null;
if (info.GetBoolean("IsNull"))
{
m_state = SqlBytesCharsState.Null;
m_rgchBuf = null;
}
else
{
m_state = SqlBytesCharsState.Buffer;
m_rgchBuf = (char[]) info.GetValue("data", typeof(char[]));
m_lCurLen = m_rgchBuf.Length;
}
AssertValid();
}
// --------------------------------------------------------------
// Public properties
// --------------------------------------------------------------
// INullable
public bool IsNull {
get {
return m_state == SqlBytesCharsState.Null;
}
}
// Property: the in-memory buffer of SqlChars
// Return Buffer even if SqlChars is Null.
public char[] Buffer {
get {
if (FStream()) {
CopyStreamToBuffer();
}
return m_rgchBuf;
}
}
// Property: the actual length of the data
public long Length {
get {
switch (m_state) {
case SqlBytesCharsState.Null:
throw new SqlNullValueException();
case SqlBytesCharsState.Stream:
return m_stream.Length;
default:
return m_lCurLen;
}
}
}
// Property: the max length of the data
// Return MaxLength even if SqlChars is Null.
// When the buffer is also null, return -1.
// If containing a Stream, return -1.
public long MaxLength {
get {
switch (m_state) {
case SqlBytesCharsState.Stream:
return -1L;
default:
return (m_rgchBuf == null) ? -1L : (long)m_rgchBuf.Length;
}
}
}
// Property: get a copy of the data in a new char[] array.
public char[] Value {
get {
char[] buffer;
switch (m_state) {
case SqlBytesCharsState.Null:
throw new SqlNullValueException();
case SqlBytesCharsState.Stream:
if (m_stream.Length > x_lMaxLen)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_BufferInsufficientMessage));
buffer = new char[m_stream.Length];
if (m_stream.Position != 0)
m_stream.Seek(0, SeekOrigin.Begin);
m_stream.Read(buffer, 0, checked((int)m_stream.Length));
break;
default:
buffer = new char[m_lCurLen];
Array.Copy(m_rgchBuf, buffer, (int)m_lCurLen);
break;
}
return buffer;
}
}
// class indexer
public char this[long offset] {
get {
if (offset < 0 || offset >= this.Length)
throw new ArgumentOutOfRangeException("offset");
if (m_rgchWorkBuf == null)
m_rgchWorkBuf = new char[1];
Read(offset, m_rgchWorkBuf, 0, 1);
return m_rgchWorkBuf[0];
}
set {
if (m_rgchWorkBuf == null)
m_rgchWorkBuf = new char[1];
m_rgchWorkBuf[0] = value;
Write(offset, m_rgchWorkBuf, 0, 1);
}
}
internal SqlStreamChars Stream {
get {
return FStream() ? m_stream : new StreamOnSqlChars(this);
}
set {
m_lCurLen = x_lNull;
m_stream = value;
m_state = (value == null) ? SqlBytesCharsState.Null : SqlBytesCharsState.Stream;
AssertValid();
}
}
public StorageState Storage {
get {
switch (m_state) {
case SqlBytesCharsState.Null:
throw new SqlNullValueException();
case SqlBytesCharsState.Stream:
return StorageState.Stream;
case SqlBytesCharsState.Buffer:
return StorageState.Buffer;
default:
return StorageState.UnmanagedBuffer;
}
}
}
// --------------------------------------------------------------
// Public methods
// --------------------------------------------------------------
public void SetNull() {
m_lCurLen = x_lNull;
m_stream = null;
m_state = SqlBytesCharsState.Null;
AssertValid();
}
// Set the current length of the data
// If the SqlChars is Null, setLength will make it non-Null.
public void SetLength(long value) {
if (value < 0)
throw new ArgumentOutOfRangeException("value");
if (FStream()) {
m_stream.SetLength(value);
}
else {
// If there is a buffer, even the value of SqlChars is Null,
// still allow setting length to zero, which will make it not Null.
// If the buffer is null, raise exception
//
if (null == m_rgchBuf)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_NoBufferMessage));
if (value > (long)m_rgchBuf.Length)
throw new ArgumentOutOfRangeException("value");
else if (IsNull)
// At this point we know that value is small enough
// Go back in buffer mode
m_state = SqlBytesCharsState.Buffer;
m_lCurLen = value;
}
AssertValid();
}
// Read data of specified length from specified offset into a buffer
public long Read(long offset, char[] buffer, int offsetInBuffer, int count) {
if (IsNull)
throw new SqlNullValueException();
// Validate the arguments
if (buffer == null)
throw new ArgumentNullException("buffer");
if (offset > this.Length || offset < 0)
throw new ArgumentOutOfRangeException("offset");
if (offsetInBuffer > buffer.Length || offsetInBuffer < 0)
throw new ArgumentOutOfRangeException("offsetInBuffer");
if (count < 0 || count > buffer.Length - offsetInBuffer)
throw new ArgumentOutOfRangeException("count");
// Adjust count based on data length
if (count > this.Length - offset)
count = (int)(this.Length - offset);
if (count != 0) {
switch (m_state) {
case SqlBytesCharsState.Stream:
if (m_stream.Position != offset)
m_stream.Seek(offset, SeekOrigin.Begin);
m_stream.Read(buffer, offsetInBuffer, count);
break;
default:
Array.Copy(m_rgchBuf, offset, buffer, offsetInBuffer, count);
break;
}
}
return count;
}
// Write data of specified length into the SqlChars from specified offset
public void Write(long offset, char[] buffer, int offsetInBuffer, int count) {
if (FStream()) {
if (m_stream.Position != offset)
m_stream.Seek(offset, SeekOrigin.Begin);
m_stream.Write(buffer, offsetInBuffer, count);
}
else {
// Validate the arguments
if (buffer == null)
throw new ArgumentNullException("buffer");
if (m_rgchBuf == null)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_NoBufferMessage));
if (offset < 0)
throw new ArgumentOutOfRangeException("offset");
if (offset > m_rgchBuf.Length)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_BufferInsufficientMessage));
if (offsetInBuffer < 0 || offsetInBuffer > buffer.Length)
throw new ArgumentOutOfRangeException("offsetInBuffer");
if (count < 0 || count > buffer.Length - offsetInBuffer)
throw new ArgumentOutOfRangeException("count");
if (count > m_rgchBuf.Length - offset)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_BufferInsufficientMessage));
if (IsNull) {
// If NULL and there is buffer inside, we only allow writing from
// offset zero.
//
if (offset != 0)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_WriteNonZeroOffsetOnNullMessage));
// treat as if our current length is zero.
// Note this has to be done after all inputs are validated, so that
// we won't throw exception after this point.
//
m_lCurLen = 0;
m_state = SqlBytesCharsState.Buffer;
}
else if (offset > m_lCurLen) {
// Don't allow writing from an offset that this larger than current length.
// It would leave uninitialized data in the buffer.
//
throw new SqlTypeException(Res.GetString(Res.SqlMisc_WriteOffsetLargerThanLenMessage));
}
if (count != 0) {
Array.Copy(buffer, offsetInBuffer, m_rgchBuf, offset, count);
// If the last position that has been written is after
// the current data length, reset the length
if (m_lCurLen < offset + count)
m_lCurLen = offset + count;
}
}
AssertValid();
}
public SqlString ToSqlString() {
return IsNull ? SqlString.Null : new String(Value);
}
// --------------------------------------------------------------
// Conversion operators
// --------------------------------------------------------------
// Alternative method: ToSqlString()
public static explicit operator SqlString(SqlChars value) {
return value.ToSqlString();
}
// Alternative method: constructor SqlChars(SqlString)
public static explicit operator SqlChars(SqlString value) {
return new SqlChars(value);
}
// --------------------------------------------------------------
// Private utility functions
// --------------------------------------------------------------
[System.Diagnostics.Conditional("DEBUG")]
private void AssertValid() {
Debug.Assert(m_state >= SqlBytesCharsState.Null && m_state <= SqlBytesCharsState.Stream);
if (IsNull) {
}
else {
Debug.Assert((m_lCurLen >= 0 && m_lCurLen <= x_lMaxLen) || FStream());
Debug.Assert(FStream() || (m_rgchBuf != null && m_lCurLen <= m_rgchBuf.Length));
Debug.Assert(!FStream() || (m_lCurLen == x_lNull));
}
Debug.Assert(m_rgchWorkBuf == null || m_rgchWorkBuf.Length == 1);
}
// whether the SqlChars contains a Stream
internal bool FStream() {
return m_state == SqlBytesCharsState.Stream;
}
// Copy the data from the Stream to the array buffer.
// If the SqlChars doesn't hold a buffer or the buffer
// is not big enough, allocate new char array.
private void CopyStreamToBuffer() {
Debug.Assert(FStream());
long lStreamLen = m_stream.Length;
if (lStreamLen >= x_lMaxLen)
throw new SqlTypeException(Res.GetString(Res.SqlMisc_BufferInsufficientMessage));
if (m_rgchBuf == null || m_rgchBuf.Length < lStreamLen)
m_rgchBuf = new char[lStreamLen];
if (m_stream.Position != 0)
m_stream.Seek(0, SeekOrigin.Begin);
m_stream.Read(m_rgchBuf, 0, (int)lStreamLen);
m_stream = null;
m_lCurLen = lStreamLen;
m_state = SqlBytesCharsState.Buffer;
AssertValid();
}
private void SetBuffer(char[] buffer) {
m_rgchBuf = buffer;
m_lCurLen = (m_rgchBuf == null) ? x_lNull : (long)m_rgchBuf.Length;
m_stream = null;
m_state = (m_rgchBuf == null) ? SqlBytesCharsState.Null : SqlBytesCharsState.Buffer;
AssertValid();
}
// --------------------------------------------------------------
// XML Serialization
// --------------------------------------------------------------
XmlSchema IXmlSerializable.GetSchema() {
return null;
}
void IXmlSerializable.ReadXml(XmlReader r) {
char[] value = null;
string isNull = r.GetAttribute("nil", XmlSchema.InstanceNamespace);
if (isNull != null && XmlConvert.ToBoolean(isNull)) {
// VSTFDevDiv# 479603 - SqlTypes read null value infinitely and never read the next value. Fix - Read the next value.
r.ReadElementString();
SetNull();
}
else {
value = r.ReadElementString().ToCharArray();
SetBuffer(value);
}
}
void IXmlSerializable.WriteXml(XmlWriter writer) {
if (IsNull) {
writer.WriteAttributeString("xsi", "nil", XmlSchema.InstanceNamespace, "true");
}
else {
char[] value = this.Buffer;
writer.WriteString(new String(value, 0, (int)(this.Length)));
}
}
public static XmlQualifiedName GetXsdType(XmlSchemaSet schemaSet) {
return new XmlQualifiedName("string", XmlSchema.Namespace);
}
// --------------------------------------------------------------
// Serialization using ISerializable
// --------------------------------------------------------------
// State information is not saved. The current state is converted to Buffer and only the underlying
// array is serialized, except for Null, in which case this state is kept.
[SecurityPermissionAttribute(SecurityAction.LinkDemand,SerializationFormatter=true)]
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) {
switch (m_state)
{
case SqlBytesCharsState.Null:
info.AddValue("IsNull", true);
break;
case SqlBytesCharsState.Buffer:
info.AddValue("IsNull", false);
info.AddValue("data", m_rgchBuf);
break;
case SqlBytesCharsState.Stream:
CopyStreamToBuffer();
goto case SqlBytesCharsState.Buffer;
default:
Debug.Assert(false);
goto case SqlBytesCharsState.Null;
}
}
// --------------------------------------------------------------
// Static fields, properties
// --------------------------------------------------------------
// Get a Null instance.
// Since SqlChars is mutable, have to be property and create a new one each time.
public static SqlChars Null {
get {
return new SqlChars((char[])null);
}
}
} // class SqlChars
// StreamOnSqlChars is a stream build on top of SqlChars, and
// provides the Stream interface. The purpose is to help users
// to read/write SqlChars object.
internal sealed class StreamOnSqlChars : SqlStreamChars
{
// --------------------------------------------------------------
// Data members
// --------------------------------------------------------------
private SqlChars m_sqlchars; // the SqlChars object
private long m_lPosition;
// --------------------------------------------------------------
// Constructor(s)
// --------------------------------------------------------------
internal StreamOnSqlChars(SqlChars s) {
m_sqlchars = s;
m_lPosition = 0;
}
// --------------------------------------------------------------
// Public properties
// --------------------------------------------------------------
public override bool IsNull {
get {
return m_sqlchars == null || m_sqlchars.IsNull;
}
}
// Always can read/write/seek, unless sb is null,
// which means the stream has been closed.
public override bool CanRead {
get {
return m_sqlchars != null && !m_sqlchars.IsNull;
}
}
public override bool CanSeek {
get {
return m_sqlchars != null;
}
}
public override bool CanWrite {
get {
return m_sqlchars != null && (!m_sqlchars.IsNull || m_sqlchars.m_rgchBuf != null);
}
}
public override long Length {
get {
CheckIfStreamClosed("get_Length");
return m_sqlchars.Length;
}
}
public override long Position {
get {
CheckIfStreamClosed("get_Position");
return m_lPosition;
}
set {
CheckIfStreamClosed("set_Position");
if (value < 0 || value > m_sqlchars.Length)
throw new ArgumentOutOfRangeException("value");
else
m_lPosition = value;
}
}
// --------------------------------------------------------------
// Public methods
// --------------------------------------------------------------
public override long Seek(long offset, SeekOrigin origin) {
CheckIfStreamClosed("Seek");
long lPosition = 0;
switch(origin) {
case SeekOrigin.Begin:
if (offset < 0 || offset > m_sqlchars.Length)
throw ADP.ArgumentOutOfRange("offset");
m_lPosition = offset;
break;
case SeekOrigin.Current:
lPosition = m_lPosition + offset;
if (lPosition < 0 || lPosition > m_sqlchars.Length)
throw ADP.ArgumentOutOfRange("offset");
m_lPosition = lPosition;
break;
case SeekOrigin.End:
lPosition = m_sqlchars.Length + offset;
if (lPosition < 0 || lPosition > m_sqlchars.Length)
throw ADP.ArgumentOutOfRange("offset");
m_lPosition = lPosition;
break;
default:
throw ADP.ArgumentOutOfRange("offset");;
}
return m_lPosition;
}
// The Read/Write/Readchar/Writechar simply delegates to SqlChars
public override int Read(char[] buffer, int offset, int count) {
CheckIfStreamClosed("Read");
if (buffer==null)
throw new ArgumentNullException("buffer");
if (offset < 0 || offset > buffer.Length)
throw new ArgumentOutOfRangeException("offset");
if (count < 0 || count > buffer.Length - offset)
throw new ArgumentOutOfRangeException("count");
int icharsRead = (int)m_sqlchars.Read(m_lPosition, buffer, offset, count);
m_lPosition += icharsRead;
return icharsRead;
}
public override void Write(char[] buffer, int offset, int count) {
CheckIfStreamClosed("Write");
if (buffer==null)
throw new ArgumentNullException("buffer");
if (offset < 0 || offset > buffer.Length)
throw new ArgumentOutOfRangeException("offset");
if (count < 0 || count > buffer.Length - offset)
throw new ArgumentOutOfRangeException("count");
m_sqlchars.Write(m_lPosition, buffer, offset, count);
m_lPosition += count;
}
public override int ReadChar() {
CheckIfStreamClosed("ReadChar");
// If at the end of stream, return -1, rather than call SqlChars.Readchar,
// which will throw exception. This is the behavior for Stream.
//
if (m_lPosition >= m_sqlchars.Length)
return -1;
int ret = m_sqlchars[m_lPosition];
m_lPosition ++;
return ret;
}
public override void WriteChar(char value) {
CheckIfStreamClosed("WriteChar");
m_sqlchars[m_lPosition] = value;
m_lPosition ++;
}
public override void SetLength(long value) {
CheckIfStreamClosed("SetLength");
m_sqlchars.SetLength(value);
if (m_lPosition > value)
m_lPosition = value;
}
// Flush is a no-op if underlying SqlChars is not a stream on SqlChars
public override void Flush() {
if (m_sqlchars.FStream())
m_sqlchars.m_stream.Flush();
}
protected override void Dispose(bool disposing) {
// When m_sqlchars is null, it means the stream has been closed, and
// any opearation in the future should fail.
// This is the only case that m_sqlchars is null.
m_sqlchars = null;
}
// --------------------------------------------------------------
// Private utility functions
// --------------------------------------------------------------
private bool FClosed() {
return m_sqlchars == null;
}
private void CheckIfStreamClosed(string methodname) {
if (FClosed())
throw ADP.StreamClosed(methodname);
}
} // class StreamOnSqlChars
} // namespace System.Data.SqlTypes
| |
/*
* SymInfoEnumerator.cs - Implementation of the
* "System.Diagnostics.SymbolStore.SymInfoEnumerator" class.
*
* Copyright (C) 2003 Southern Storm Software, Pty Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
namespace System.Diagnostics.SymbolStore
{
#if CONFIG_EXTENDED_DIAGNOSTICS
using System.Collections;
// Utility class for enumerating over all of the data blocks
// that are associated with a particular token.
internal class SymInfoEnumerator : IEnumerator
{
// Internal state.
private SymReader reader;
private int token;
private int start;
private int num;
private int index;
private int type;
private int offset;
private int length;
private int readPosn;
// Constructors.
public SymInfoEnumerator(SymReader reader, String name)
: this(reader, Utils.CreatePseudoToken(name)) {}
public SymInfoEnumerator(SymReader reader, int token)
{
// Save the reader and token information for later.
this.reader = reader;
this.token = token;
this.index = -1;
if(reader == null || reader.data == null)
{
// There is no symbol information to be processed.
this.start = 0;
this.num = 0;
return;
}
// Locate the token information in the symbol data.
int left = 0;
int right = reader.numIndexEntries - 1;
int middle, temp;
while(left <= right)
{
middle = left + (right - left) / 2;
temp = Utils.ReadInt32
(reader.data, reader.indexOffset + middle * 8);
if(((uint)temp) < ((uint)token))
{
left = middle + 1;
}
else if(((uint)temp) > ((uint)token))
{
right = middle - 1;
}
else
{
// We've found an entry: search forwards and
// backwards to find the extent of the token.
left = middle;
while(left > 0)
{
temp = Utils.ReadInt32
(reader.data,
reader.indexOffset + (left - 1) * 8);
if(temp == token)
{
--left;
}
else
{
break;
}
}
right = middle;
while(right < (reader.numIndexEntries - 1))
{
temp = Utils.ReadInt32
(reader.data,
reader.indexOffset + (right + 1) * 8);
if(temp == token)
{
++right;
}
else
{
break;
}
}
this.start = left;
this.num = right - left + 1;
return;
}
}
// We were unable to find the token data.
this.start = 0;
this.num = 0;
}
public SymInfoEnumerator(SymReader reader)
{
// This version enumerates over all of the data blocks.
this.reader = reader;
this.token = 0;
this.start = 0;
if(reader != null)
{
this.num = reader.numIndexEntries;
}
else
{
this.num = 0;
}
this.index = -1;
}
// Implement the IEnumerator interface.
public bool MoveNext()
{
if(++index < num)
{
// Read the information about this data block.
token = Utils.ReadInt32
(reader.data,
reader.indexOffset + (start + index) * 8);
offset = Utils.ReadInt32
(reader.data,
reader.indexOffset + (start + index) * 8 + 4);
if(offset < 0 || offset >= reader.data.Length)
{
return false;
}
// Get the type and length information.
int size;
type = Utils.ReadMetaInt
(reader.data, offset, out size);
offset += size;
length = Utils.ReadMetaInt
(reader.data, offset, out size);
offset += size;
if(length < 0 || (reader.data.Length - offset) < length)
{
return false;
}
// Ready to process this data item.
readPosn = offset;
return true;
}
else
{
return false;
}
}
public void Reset()
{
index = -1;
}
public Object Current
{
get
{
return token;
}
}
// Get the number of data blocks that will be enumerated.
public int Count
{
get
{
return num;
}
}
// Get additional information about the data block.
public int Token
{
get
{
return token;
}
}
public int Type
{
get
{
return type;
}
}
public int Offset
{
get
{
return offset;
}
}
public int Length
{
get
{
return length;
}
}
public byte[] Data
{
get
{
byte[] data = new byte [length];
Array.Copy(reader.data, offset, data, 0, length);
return data;
}
}
// Get the next metadata-encoded integer from the data section.
public int GetNextInt()
{
int value, size;
if(readPosn < (offset + length))
{
value = Utils.ReadMetaInt(reader.data, readPosn, out size);
readPosn += size;
return value;
}
else
{
return -1;
}
}
}; // class SymInfoEnumerator
#endif // CONFIG_EXTENDED_DIAGNOSTICS
}; // namespace System.Diagnostics.SymbolStore
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using java.lang;
using java.util;
using stab.query;
using stab.reflection;
using cnatural.syntaxtree;
namespace cnatural.compiler {
class TargetLabels {
LabelMarker ifTrue;
LabelMarker ifFalse;
TargetLabels(LabelMarker ifTrue, LabelMarker ifFalse) {
this.ifTrue = ifTrue;
this.ifFalse = ifFalse;
}
}
class ExpressionGenerator : ExpressionHandler<TargetLabels, Void> {
private BytecodeGenerator bytecodeGenerator;
private CompilerContext context;
private ExpressionTreeGenerator expressionTreeGenerator;
ExpressionGenerator(BytecodeGenerator bytecodeGenerator, CompilerContext context)
: super(true) {
this.bytecodeGenerator = bytecodeGenerator;
this.context = context;
this.expressionTreeGenerator = new ExpressionTreeGenerator(context);
}
public override Void handleExpression(ExpressionNode expression, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
if (expression.Line > context.MethodGenerationContext.PreviousLineNumber) {
var label = generator.defineLabel();
generator.markLabel(label);
generator.lineNumber(expression.Line + 1, label);
context.MethodGenerationContext.PreviousLineNumber = expression.Line;
}
var info = expression.getUserData(typeof(ExpressionInfo));
if (info == null) {
generator.emit(Opcode.Aconst_Null);
return null;
}
if (!info.IsConstant) {
return super.handleExpression(expression, labels, nested);
}
var value = info.Value;
if (info.Type.IsBoolean) {
if (((Boolean)value).booleanValue()) {
generator.emit(Opcode.Iconst_1);
} else {
generator.emit(Opcode.Iconst_0);
}
emitBoxing(expression);
if (labels != null) {
generator.emit((info.Negate) ? Opcode.Ifeq : Opcode.Ifne, labels.ifFalse);
}
} else {
switch (info.Type.NumericTypeKind) {
case Char:
case Byte:
case Short:
case Int:
BytecodeHelper.emitIntConstant(generator, value);
break;
case Long:
BytecodeHelper.emitLongConstant(generator, value);
break;
case Float:
BytecodeHelper.emitFloatConstant(generator, value);
break;
case Double:
BytecodeHelper.emitDoubleConstant(generator, value);
break;
default:
generator.emit(Opcode.Ldc, value);
break;
}
emitBoxing(expression);
}
return null;
}
protected override Void handleAnonymousObjectCreation(AnonymousObjectCreationExpressionNode anonymousObject,
TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var type = anonymousObject.getUserData(typeof(ExpressionInfo)).Type;
CompilerHelper.initializeAnonymousType(context, type);
var constructor = type.Methods.where(p => p.Name.equals("<init>")).first();
generator.emit(Opcode.New, type);
if (nested) {
generator.emit(Opcode.Dup);
}
foreach (var decl in anonymousObject.MemberDeclarators) {
handleExpression(decl.Value, null, true);
}
generator.emit(Opcode.Invokespecial, constructor);
return null;
}
protected override Void handleArrayCreation(ArrayCreationExpressionNode arrayCreation, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
foreach (var e in arrayCreation.DimensionExpressions) {
emitNestedExpression(e, context.TypeSystem.IntType);
}
var type = arrayCreation.getUserData(typeof(ExpressionInfo)).Type;
var initializer = arrayCreation.Initializer;
int dimensions = arrayCreation.DimensionExpressions.size();
if (dimensions <= 1) {
if (dimensions == 0) {
BytecodeHelper.emitIntConstant(generator, initializer.Values.size());
}
}
bytecodeGenerator.emitArray(dimensions, type, (initializer == null) ? null : initializer.Values.iterator());
return null;
}
protected override Void handleArrayInitializer(ArrayInitializerExpressionNode arrayInitializer, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var type = arrayInitializer.getUserData(typeof(ExpressionInfo)).Type;
BytecodeHelper.emitIntConstant(generator, arrayInitializer.Values.size());
bytecodeGenerator.emitArray(0, type, arrayInitializer.Values.iterator());
return null;
}
protected override Void handleAssign(AssignExpressionNode assign, TargetLabels labels, bool nested) {
new AssignExpressionGenerator(context, this).handleExpression(assign.Left, assign, nested);
emitBoxing(assign);
emitTest(assign.getUserData(typeof(ExpressionInfo)), labels);
return null;
}
protected override Void handleBinary(BinaryExpressionNode binary, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var info = binary.getUserData(typeof(ExpressionInfo));
var left = binary.LeftOperand;
var right = binary.RightOperand;
var li = left.getUserData(typeof(ExpressionInfo));
var ri = right.getUserData(typeof(ExpressionInfo));
var leftIsZero = li != null && CompilerHelper.isZero(li);
var rightIsZero = ri != null && CompilerHelper.isZero(ri);
switch (binary.Operator) {
case Add:
var stringType = context.TypeSystem.StringType;
if (info.Type == stringType) {
var isBuildingString = context.MethodGenerationContext.IsBuildingString;
if (!isBuildingString) {
generator.emit(Opcode.New, context.StringBuilderType);
generator.emit(Opcode.Dup);
generator.emit(Opcode.Invokespecial, context.StringBuilderConstructor);
}
var isString = li != null && li.Type == stringType;
var isStringAdd = left.ExpressionKind == ExpressionKind.Binary && isString;
context.MethodGenerationContext.IsBuildingString = isStringAdd;
handleExpression(left, null, true);
if (!isStringAdd || (isString && li.IsConstant)) {
BytecodeGenerator.emitStringBuilderAppend(context, li);
}
isString = ri != null && ri.Type == stringType;
isStringAdd = right.ExpressionKind == ExpressionKind.Binary && isString;
context.MethodGenerationContext.IsBuildingString = isStringAdd;
handleExpression(right, null, true);
if (!isStringAdd || (isString && li.IsConstant)) {
BytecodeGenerator.emitStringBuilderAppend(context, ri);
}
if (!isBuildingString) {
generator.emit(Opcode.Invokevirtual, context.ObjectToStringMethod);
}
context.MethodGenerationContext.IsBuildingString = isBuildingString;
} else {
emitOperands(left, false, right, false);
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
generator.emit(Opcode.Iadd);
break;
case Long:
generator.emit(Opcode.Ladd);
break;
case Float:
generator.emit(Opcode.Fadd);
break;
case Double:
generator.emit(Opcode.Dadd);
break;
default:
if (BytecodeHelper.isDelegateType(info.Type)) {
var delegateType = context.TypeSystem.getType("stab/lang/Delegate");
var argTypes = new ArrayList<TypeInfo> { delegateType, delegateType };
generator.emit(Opcode.Invokestatic, delegateType.getMethod("combine", argTypes));
generator.emit(Opcode.Checkcast, info.Type);
} else {
throw new Exception("Internal error");
}
break;
}
emitBoxing(binary);
}
break;
case Subtract:
emitOperands(left, false, right, false);
switch (info.getType().NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
generator.emit(Opcode.Isub);
break;
case Long:
generator.emit(Opcode.Lsub);
break;
case Float:
generator.emit(Opcode.Fsub);
break;
case Double:
generator.emit(Opcode.Dsub);
break;
default:
if (BytecodeHelper.isDelegateType(info.Type)) {
var delegateType = context.getTypeSystem().getType("stab/lang/Delegate");
var argTypes = new ArrayList<TypeInfo> { delegateType, delegateType };
generator.emit(Opcode.Invokestatic, delegateType.getMethod("remove", argTypes));
generator.emit(Opcode.Checkcast, info.Type);
} else {
throw new RuntimeException("Internal error");
}
break;
}
emitBoxing(binary);
break;
case Multiply:
emitOperands(left, false, right, false);
Opcode opcode;
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Imul;
break;
case Long:
opcode = Opcode.Lmul;
break;
case Float:
opcode = Opcode.Fmul;
break;
case Double:
opcode = Opcode.Dmul;
break;
default:
throw new Exception("Internal error");
}
generator.emit(opcode);
emitBoxing(binary);
break;
case Divide:
emitOperands(left, false, right, false);
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Idiv;
break;
case Long:
opcode = Opcode.Ldiv;
break;
case Float:
opcode = Opcode.Fdiv;
break;
case Double:
opcode = Opcode.Ddiv;
break;
default:
throw new Exception("Internal error");
}
generator.emit(opcode);
emitBoxing(binary);
break;
case Modulo:
emitOperands(left, false, right, false);
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Irem;
break;
case Long:
opcode = Opcode.Lrem;
break;
case Float:
opcode = Opcode.Frem;
break;
case Double:
opcode = Opcode.Drem;
break;
default:
throw new RuntimeException("Internal error");
}
generator.emit(opcode);
emitBoxing(binary);
break;
case And:
emitOperands(left, false, right, false);
if (info.Type.IsBoolean) {
opcode = Opcode.Iand;
} else {
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Iand;
break;
case Long:
opcode = Opcode.Land;
break;
default:
throw new Exception("Internal error");
}
}
generator.emit(opcode);
emitBoxing(binary);
break;
case Or:
emitOperands(left, false, right, false);
if (info.Type.IsBoolean) {
opcode = Opcode.Ior;
} else {
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Ior;
break;
case Long:
opcode = Opcode.Lor;
break;
default:
throw new Exception("Internal error");
}
}
generator.emit(opcode);
emitBoxing(binary);
break;
case Xor:
emitOperands(left, false, right, false);
if (info.Type.IsBoolean) {
opcode = Opcode.Ixor;
} else {
switch (info.Type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
opcode = Opcode.Ixor;
break;
case Long:
opcode = Opcode.Lxor;
break;
default:
throw new Exception("Internal error");
}
}
generator.emit(opcode);
emitBoxing(binary);
break;
case LessThan:
if (labels != null) {
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifle, Opcode.Ifge, Opcode.If_icmpge, labels.ifFalse);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifgt, Opcode.Iflt, Opcode.If_icmplt, labels.ifFalse);
}
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifle, Opcode.Ifge, Opcode.If_icmpge, elseLabel);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifgt, Opcode.Iflt, Opcode.If_icmplt, elseLabel);
}
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case LessThanOrEqual:
if (labels != null) {
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Iflt, Opcode.Ifgt, Opcode.If_icmpgt, labels.ifFalse);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifge, Opcode.Ifle, Opcode.If_icmple, labels.ifFalse);
}
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Iflt, Opcode.Ifgt, Opcode.If_icmpgt, elseLabel);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifge, Opcode.Ifle, Opcode.If_icmple, elseLabel);
}
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case GreaterThan:
if (labels != null) {
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifge, Opcode.Ifle, Opcode.If_icmple, labels.ifFalse);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Iflt, Opcode.Ifgt, Opcode.If_icmpgt, labels.ifFalse);
}
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifge, Opcode.Ifle, Opcode.If_icmple, elseLabel);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Iflt, Opcode.Ifgt, Opcode.If_icmpgt, elseLabel);
}
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case GreaterThanOrEqual:
if (labels != null) {
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifgt, Opcode.Iflt, Opcode.If_icmplt, labels.ifFalse);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifle, Opcode.Ifge, Opcode.If_icmpge, labels.ifFalse);
}
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
if (info.Negate) {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifgt, Opcode.Iflt, Opcode.If_icmplt, elseLabel);
} else {
emitComparison(binary, leftIsZero, rightIsZero, Opcode.Ifle, Opcode.Ifge, Opcode.If_icmpge, elseLabel);
}
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case Equal:
if (labels != null) {
emitEquality(binary, leftIsZero, rightIsZero, labels.ifFalse, info.Negate);
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
emitEquality(binary, leftIsZero, rightIsZero, elseLabel, info.Negate);
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case NotEqual:
if (labels != null) {
emitEquality(binary, leftIsZero, rightIsZero, labels.ifFalse, !info.Negate);
} else {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
emitEquality(binary, leftIsZero, rightIsZero, elseLabel, !info.Negate);
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case LogicalAnd:
if (labels != null) {
if (info.Negate) {
var then2Label = generator.defineLabel();
handleExpression(left, new TargetLabels(then2Label, labels.ifFalse), true);
generator.markLabel(then2Label);
handleExpression(right, labels, true);
} else {
var then2Label = generator.defineLabel();
handleExpression(left, new TargetLabels(then2Label, labels.ifTrue), true);
ri.Negate = false;
handleExpression(right, labels, true);
generator.markLabel(then2Label);
}
} else {
var thenLabel = generator.defineLabel();
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
handleExpression(left, new TargetLabels(thenLabel, elseLabel), true);
handleExpression(right, new TargetLabels(thenLabel, elseLabel), true);
generator.markLabel(thenLabel);
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
case LogicalOr: {
if (labels != null) {
if (info.Negate) {
var then2Label = generator.defineLabel();
li.Negate = false;
handleExpression(left, new TargetLabels(then2Label, labels.ifTrue), true);
generator.markLabel(then2Label);
handleExpression(right, labels, true);
} else {
var then2Label = generator.defineLabel();
li.Negate = false;
handleExpression(left, new TargetLabels(then2Label, labels.ifFalse), true);
generator.markLabel(then2Label);
ri.Negate = false;
handleExpression(right, labels, true);
}
} else {
var thenLabel = generator.defineLabel();
var then2Label = generator.defineLabel();
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
li.Negate = false;
handleExpression(left, new TargetLabels(then2Label, thenLabel), true);
generator.markLabel(then2Label);
handleExpression(right, new TargetLabels(thenLabel, elseLabel), true);
generator.markLabel(thenLabel);
generator.emit(Opcode.Iconst_1);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_0);
generator.markLabel(endLabel);
}
emitBoxing(binary);
break;
}
case LeftShift:
handleExpression(left, null, true);
handleExpression(right, null, true);
if (info.Type == context.TypeSystem.IntType) {
generator.emit(Opcode.Ishl);
} else if (info.Type == context.TypeSystem.LongType) {
generator.emit(Opcode.Lshl);
} else {
throw new Exception("Internal error");
}
emitBoxing(binary);
break;
case RightShift:
handleExpression(left, null, true);
handleExpression(right, null, true);
if (info.Type == context.TypeSystem.IntType) {
generator.emit(Opcode.Ishr);
} else if (info.Type == context.TypeSystem.LongType) {
generator.emit(Opcode.Lshr);
} else {
throw new Exception("Internal error");
}
emitBoxing(binary);
break;
case UnsignedRightShift:
handleExpression(left, null, true);
handleExpression(right, null, true);
if (info.Type == context.TypeSystem.IntType) {
generator.emit(Opcode.Iushr);
} else if (info.Type == context.TypeSystem.LongType) {
generator.emit(Opcode.Lushr);
} else {
throw new Exception("Internal error");
}
emitBoxing(binary);
break;
case Instanceof:
handleExpression(left, null, true);
generator.emit(Opcode.Instanceof, ri.Type);
emitBoxing(binary);
if (labels != null) {
generator.emit(Opcode.Ifeq, labels.ifFalse);
}
break;
case As: {
handleExpression(left, null, true);
generator.emit(Opcode.Dup);
generator.emit(Opcode.Instanceof, ri.Type);
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
generator.emit(Opcode.Ifne, elseLabel);
generator.emit(Opcode.Pop);
generator.emit(Opcode.Aconst_Null);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Checkcast, ri.Type);
generator.markLabel(endLabel);
emitBoxing(binary);
break;
}
case NullCoalescing: {
handleExpression(left, null, true);
generator.emit(Opcode.Dup);
var endLabel = generator.defineLabel();
generator.emit(Opcode.Ifnonnull, endLabel);
generator.emit(Opcode.Pop);
handleExpression(right, null, true);
generator.markLabel(endLabel);
emitBoxing(binary);
break;
}
default:
throw new Exception("Internal error");
}
return null;
}
protected override Void handleCast(CastExpressionNode cast, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
handleExpression(cast.Expression, null, nested);
var einfo = cast.Expression.getUserData(typeof(ExpressionInfo));
var info = cast.getUserData(typeof(ExpressionInfo));
var type = info.Type;
if (einfo != null && einfo.Type != type) {
if (einfo.Type.IsNumeric) {
BytecodeHelper.emitNumericConversion(generator, einfo.Type, type);
} else if (info.OriginalType != null) {
generator.emit(Opcode.Checkcast, info.OriginalType);
} else if (info.Type.OriginalTypeDefinition != einfo.Type.OriginalTypeDefinition) {
if (!info.Type.IsGenericParameter || info.Type.GenericParameterBounds.any()) {
if (type.IsPrimitive) {
if (type.TypeKind == TypeKind.Boolean) {
var method = context.TypeSystem.getUnboxingMethod(type);
generator.emit(Opcode.Checkcast, method.DeclaringType);
generator.emit(Opcode.Invokevirtual, method);
} else {
if (einfo.Type.IsPrimitive) {
BytecodeHelper.emitNumericConversion(generator, einfo.Type, type);
} else {
var method = context.TypeSystem.getUnboxingMethod(type);
generator.emit(Opcode.Checkcast, method.DeclaringType);
generator.emit(Opcode.Invokevirtual, method);
}
}
} else {
generator.emit(Opcode.Checkcast, type);
}
}
}
}
emitBoxing(cast);
emitTest(info, labels);
return null;
}
protected override Void handleConditional(ConditionalExpressionNode conditional, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var thenLabel = generator.defineLabel();
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
var info = conditional.getUserData(typeof(ExpressionInfo));
handleExpression(conditional.Condition, new TargetLabels(thenLabel, elseLabel), true);
generator.markLabel(thenLabel);
handleExpression(conditional.IfTrue, null, true);
BytecodeGenerator.emitConversion(context, info.Type, conditional.IfTrue);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
handleExpression(conditional.IfFalse, null, true);
BytecodeGenerator.emitConversion(context, info.Type, conditional.IfFalse);
generator.markLabel(endLabel);
emitBoxing(conditional);
emitTest(info, labels);
return null;
}
protected override Void handleElementAccess(ElementAccessExpressionNode elementAccess, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var target = elementAccess.TargetObject;
var tinfo = target.getUserData(typeof(ExpressionInfo));
var ttype = tinfo.Type;
if (!ttype.IsArray) {
var method = elementAccess.getUserData(typeof(ExpressionInfo)).Member.GetAccessor;
if (!method.IsStatic) {
handleExpression(target, null, true);
}
var arguments = elementAccess.Indexes;
emitArguments(arguments, method.Parameters, method.Parameters.count(), method.IsVarargs);
CompilerHelper.emitIndexerAccess(context, method);
BytecodeGenerator.emitGenericCast(context, method.ReturnType, method.OriginalMethodDefinition.ReturnType);
} else {
handleExpression(target, null, true);
var index = elementAccess.Indexes[0];
handleExpression(index, null, true);
BytecodeHelper.emitNumericConversion(generator, index.getUserData(typeof(ExpressionInfo)).Type, context.TypeSystem.IntType);
generator.emit(BytecodeHelper.getAloadOpcode(ttype.ElementType));
}
emitBoxing(elementAccess);
emitTest(elementAccess.getUserData(typeof(ExpressionInfo)), labels);
return null;
}
protected override Void handleInvocation(InvocationExpressionNode invocation, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var info = invocation.getUserData(typeof(ExpressionInfo));
var method = info.Method;
if (method.IsExcludedFromCompilation || CompilerHelper.shouldIgnoreCalls(context, method)) {
return null;
}
if (!method.IsStatic) {
if (BytecodeHelper.isDelegateType(method.DeclaringType) && method.Name.equals("invoke")) {
handleExpression(invocation.TargetObject, null, true);
} else if (invocation.TargetObject.ExpressionKind == ExpressionKind.MemberAccess || invocation.TargetObject.ExpressionKind == ExpressionKind.NullSafeMemberAccess) {
var targetTarget = ((MemberAccessExpressionNode)invocation.TargetObject).TargetObject;
handleExpression(targetTarget, null, true);
BytecodeGenerator.emitGenericCast(context, method.DeclaringType, targetTarget.getUserData(typeof(ExpressionInfo)).Type);
} else { // SimpleName
BytecodeGenerator.emitThisAccess(context, generator);
}
}
var arguments = invocation.Arguments;
if (info.IsExtension) {
var target = (MemberAccessExpressionNode)invocation.TargetObject;
arguments = new ArrayList<ExpressionNode> { target.TargetObject };
arguments.addAll(invocation.Arguments);
}
emitArguments(arguments, method.Parameters, method.Parameters.count(), method.IsVarargs);
CompilerHelper.emitMethodAccess(context, method, invocation.TargetObject.getUserData(typeof(ExpressionInfo)).IsSuperCall);
if (nested) {
var gm = method.OriginalMethodDefinition;
if (method != gm) {
BytecodeGenerator.emitGenericCast(context, method.ReturnType, gm.ReturnType);
}
} else {
switch (method.ReturnType.TypeKind) {
case Long:
case Double:
generator.emit(Opcode.Pop2);
break;
case Void:
break;
default:
generator.emit(Opcode.Pop);
break;
}
}
emitBoxing(invocation);
emitTest(info, labels);
return null;
}
protected override Void handleLambda(LambdaExpressionNode lambda, TargetLabels labels, bool nested) {
var targetType = lambda.getUserData(typeof(ExpressionInfo)).Type;
var typeBuilder = context.MethodGenerationContext.LambdaScope;
if (targetType.FullName.equals("stab/tree/ExpressionTree")) {
expressionTreeGenerator.generateExpressionTree(lambda);
return null;
}
var methodBuilder = lambda.getUserData(typeof(MethodBuilder));
context.MethodGenerationContext.enterLambda(methodBuilder);
var gen = methodBuilder.CodeGenerator;
var oldIsLambdaScopeUsed = context.MethodGenerationContext.IsLambdaScopeUsed;
context.MethodGenerationContext.IsLambdaScopeUsed = false;
gen.beginScope();
bytecodeGenerator.handleStatement(lambda.Body, null);
gen.endScope();
var isLambdaScopeUsed = context.MethodGenerationContext.IsLambdaScopeUsed;
if (!isLambdaScopeUsed) {
gen.removeThis();
methodBuilder.setStatic(true);
typeBuilder.undefineMethod(methodBuilder);
var currentType = (TypeBuilder)context.MethodGenerationContext.RootMethod.DeclaringType;
currentType.defineMethod(methodBuilder);
if (typeBuilder.Methods.count() == 1) {
context.TypeBuilders.remove(typeBuilder);
currentType.undefineNestedType(typeBuilder);
}
}
context.MethodGenerationContext.leaveLambda();
context.MethodGenerationContext.IsLambdaScopeUsed = oldIsLambdaScopeUsed;
if (isLambdaScopeUsed) {
BytecodeGenerator.getLambdaScope(context, context.MethodGenerationContext.CurrentMethod);
}
ExpressionNode arg = null;
if (isLambdaScopeUsed) {
arg = new SimpleNameExpressionNode();
var ainfo = new ExpressionInfo(context.MethodGenerationContext.LambdaScope);
if (context.MethodGenerationContext.IsInLambda) {
ainfo.Member = new LocalMemberInfo("this", typeBuilder, context.MethodGenerationContext.CurrentMethod, false);
} else {
ainfo.Member = new LocalMemberInfo("lambda$scope", typeBuilder, context.MethodGenerationContext.CurrentMethod, false);
}
arg.addUserData(ainfo);
}
if (BytecodeHelper.isDelegateType(targetType)) {
emitDelegateCreation(targetType, methodBuilder, arg, nested);
} else {
emitInterfaceCreation(targetType, methodBuilder, arg, nested);
}
return null;
}
protected override Void handleMemberAccess(MemberAccessExpressionNode memberAccess, TargetLabels labels, bool nested) {
var info = memberAccess.getUserData(typeof(ExpressionInfo));
if (info.Method != null) {
emitDelegateCreation(info.Type, info.Method, memberAccess, nested);
return null;
}
var member = info.Member;
if (member == null) {
handleExpression(memberAccess.TargetObject, null, true);
return null;
}
switch (member.MemberKind) {
case Field: {
var field = member.Field;
if (!field.IsStatic) {
handleExpression(memberAccess.TargetObject, null, true);
}
CompilerHelper.emitFieldAccess(context, field);
BytecodeGenerator.emitGenericCast(context, field.Type, field.DeclaringType.OriginalTypeDefinition.getField(field.Name).Type);
emitBoxing(memberAccess);
emitTest(info, labels);
return null;
}
case Property: {
var method = member.GetAccessor;
if (!method.IsStatic) {
handleExpression(memberAccess.TargetObject, null, true);
}
CompilerHelper.emitPropertyAccess(context, method);
BytecodeGenerator.emitGenericCast(context, method.ReturnType, method.OriginalMethodDefinition.ReturnType);
emitBoxing(memberAccess);
emitTest(info, labels);
return null;
}
default:
throw new IllegalStateException("Internal error: unhandled member kind: " + member.MemberKind);
}
}
protected override Void handleObjectCreation(ObjectCreationExpressionNode objectCreation, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var info = objectCreation.getUserData(typeof(ExpressionInfo));
var method = info.Method;
if (BytecodeHelper.isDelegateType(info.Type)) {
emitDelegateCreation(info.Type, method, objectCreation.Arguments[0], nested);
return null;
}
if (CompilerHelper.requireAccessor(context, method.DeclaringType, method.IsPublic, method.IsProtected, method.IsPrivate)) {
var accessor = context.PrivateAccessors[method];
if (accessor == null) {
var outerClass = (TypeBuilder)context.CurrentType;
while ((outerClass = (TypeBuilder)outerClass.DeclaringType) != null) {
if (method.DeclaringType.isAssignableFrom(outerClass)) {
break;
}
}
int n = outerClass.Methods.count(p => p.Name.startsWith("accessor$"));
var methodBuilder = outerClass.defineMethod("accessor$" + n);
methodBuilder.setReturnType(method.DeclaringType);
methodBuilder.setStatic(true);
methodBuilder.setSynthetic(true);
methodBuilder.setVarargs(method.IsVarargs);
foreach (var p in method.Parameters) {
var pb = methodBuilder.addParameter(p.Type);
pb.setName(p.Name);
}
foreach (var t in method.Exceptions) {
methodBuilder.addException(t);
}
var gen = methodBuilder.CodeGenerator;
gen.beginScope();
gen.emit(Opcode.New, method.DeclaringType);
gen.emit(Opcode.Dup);
foreach (var p in method.Parameters) {
gen.emit(BytecodeHelper.getLoadOpcode(p.Type), gen.getLocal(p.Name));
}
gen.emit(Opcode.Invokespecial, method);
gen.emit(Opcode.Areturn);
gen.endScope();
accessor = methodBuilder;
}
method = accessor;
}
if (method != info.Method) {
emitArguments(objectCreation.Arguments, method.Parameters, method.Parameters.count(), method.IsVarargs);
generator.emit(Opcode.Invokestatic, method);
if (!nested) {
generator.emit(Opcode.Pop);
}
} else {
generator.emit(Opcode.New, method.DeclaringType);
if (nested) {
generator.emit(Opcode.Dup);
}
emitArguments(objectCreation.Arguments, method.Parameters, method.Parameters.count(), method.IsVarargs);
generator.emit(Opcode.Invokespecial, method);
}
emitBoxing(objectCreation);
var init = objectCreation.Initializer;
if (init != null) {
if (init.ExpressionKind == ExpressionKind.ObjectInitializer) {
var initializer = (ObjectInitializerExpressionNode)init;
foreach (var mi in initializer.MemberInitializers) {
MemberInfo memb = mi.getUserData(typeof(MemberInfo));
if (!memb.IsStatic) {
generator.emit(Opcode.Dup);
}
handleExpression(mi.Value, null, true);
emitBoxing(mi.Value);
if (memb.MemberKind == MemberKind.Property) {
CompilerHelper.emitPropertyOrIndexerModification(context, memb.SetAccessor);
if (memb.SetAccessor.ReturnType != context.TypeSystem.VoidType) {
generator.emit((memb.SetAccessor.ReturnType.IsCategory2) ? Opcode.Pop2 : Opcode.Pop);
}
} else {
CompilerHelper.emitFieldModification(context, memb.Field);
}
}
} else {
var initializer = (CollectionInitializerExpressionNode)init;
var addMethod = initializer.getUserData(typeof(MethodInfo));
foreach (var args in initializer.Values) {
generator.emit(Opcode.Dup);
foreach (var e in args) {
handleExpression(e, null, true);
}
CompilerHelper.emitMethodAccess(context, addMethod, false);
if (addMethod.ReturnType != context.TypeSystem.VoidType) {
generator.emit((addMethod.ReturnType.IsCategory2) ? Opcode.Pop2 : Opcode.Pop);
}
}
}
}
emitTest(info, labels);
return null;
}
protected override Void handleSimpleName(SimpleNameExpressionNode simpleName, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var info = simpleName.getUserData(typeof(ExpressionInfo));
var member = info.Member;
switch (member.MemberKind) {
case Local: {
var local = (LocalMemberInfo)member;
if (local.IsUsedFromLambda) {
BytecodeGenerator.emitLoadLambdaScope(context, generator, local.Method);
generator.emit(Opcode.Getfield, BytecodeGenerator.getLambdaScopeField(context, local));
} else {
generator.emit(BytecodeHelper.getLoadOpcode(local.Type), generator.getLocal(local.Name));
}
emitBoxing(simpleName);
emitTest(info, labels);
return null;
}
case Field: {
var field = member.Field;
if (!field.IsStatic) {
BytecodeGenerator.emitThisAccess(context, generator);
}
CompilerHelper.emitFieldAccess(context, field);
BytecodeGenerator.emitGenericCast(context, field.Type, field.DeclaringType.OriginalTypeDefinition.getField(field.Name).Type);
emitBoxing(simpleName);
emitTest(info, labels);
return null;
}
case Method: {
if (info.Method != null) {
var e = new ThisAccessExpressionNode();
e.addUserData(new ExpressionInfo(context.CurrentType));
emitDelegateCreation(info.Type, info.Method, e, nested);
} else {
BytecodeGenerator.emitThisAccess(context, generator);
emitBoxing(simpleName);
}
return null;
}
case Property: {
var method = member.GetAccessor;
if (!method.IsStatic) {
BytecodeGenerator.emitThisAccess(context, generator);
}
CompilerHelper.emitPropertyAccess(context, method);
BytecodeGenerator.emitGenericCast(context, method.ReturnType, method.OriginalMethodDefinition.ReturnType);
emitBoxing(simpleName);
emitTest(info, labels);
return null;
}
default:
throw new Exception("Internal error: unhandled name kind: " + member.MemberKind);
}
}
protected override Void handleSizeof(SizeofExpressionNode sizeofExpression, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
handleExpression(sizeofExpression.Expression, null, true);
generator.emit(Opcode.Arraylength);
return null;
}
protected override Void handleSuperAccess(SuperAccessExpressionNode superAccess, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
BytecodeGenerator.emitThisAccess(context, generator);
return null;
}
protected override Void handleThisAccess(ThisAccessExpressionNode thisAccess, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
BytecodeGenerator.emitThisAccess(context, generator);
return null;
}
protected override Void handleTypeof(TypeofExpressionNode typeofExpression, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var type = typeofExpression.getUserData(typeof(TypeInfo));
BytecodeHelper.emitTypeof(generator, context.TypeSystem, type);
return null;
}
protected override Void handleUnary(UnaryExpressionNode unary, TargetLabels labels, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var info = unary.getUserData(typeof(ExpressionInfo));
var type = info.Type;
switch (unary.Operator) {
case Complement: {
handleExpression(unary.Operand, null, true);
Opcode opcode;
switch (type.NumericTypeKind) {
case Byte:
case Char:
case Short:
case Int:
generator.emit(Opcode.Iconst_M1);
opcode = Opcode.Ixor;
break;
case Long:
generator.emit(Opcode.Ldc, new Long(-1));
opcode = Opcode.Lxor;
break;
default:
throw new Exception("Internal error");
}
generator.emit(opcode);
emitBoxing(unary);
break;
}
case Minus: {
handleExpression(unary.Operand, null, true);
Opcode opcode;
switch (type.NumericTypeKind) {
case Byte:
case Char:
case Int:
case Short:
opcode = Opcode.Ineg;
break;
case Long:
opcode = Opcode.Lneg;
break;
case Double:
opcode = Opcode.Dneg;
break;
case Float:
opcode = Opcode.Fneg;
break;
default:
throw new Exception("Internal error");
}
generator.emit(opcode);
emitBoxing(unary);
break;
}
case Not: {
handleExpression(unary.Operand, null, true);
if (labels == null) {
var elseLabel = generator.defineLabel();
var endLabel = generator.defineLabel();
generator.emit(Opcode.Ifeq, elseLabel);
generator.emit(Opcode.Iconst_0);
generator.emit(Opcode.Goto, endLabel);
generator.markLabel(elseLabel);
generator.emit(Opcode.Iconst_1);
generator.markLabel(endLabel);
} else {
if (info.Negate) {
generator.emit(Opcode.Ifne, labels.ifFalse);
} else {
generator.emit(Opcode.Ifeq, labels.ifFalse);
}
}
emitBoxing(unary);
break;
}
case Plus: {
handleExpression(unary.Operand, null, true);
switch (type.NumericTypeKind) {
case Byte:
case Int:
case Short:
case Long:
case Double:
case Float:
break;
default:
throw new Exception("Internal error");
}
emitBoxing(unary);
break;
}
case PostIncrement:
case PostDecrement:
case PreDecrement:
case PreIncrement: {
new AssignExpressionGenerator(context, this).handleExpression(unary.Operand, unary, nested);
emitBoxing(unary);
break;
}
default:
throw new RuntimeException("Internal error: unhandled unary operator " + unary.Operator);
}
return null;
}
private void emitTest(ExpressionInfo info, TargetLabels labels) {
if (labels != null) {
var generator = context.MethodGenerationContext.Generator;
generator.emit((info.Negate) ? Opcode.Ifeq : Opcode.Ifne, labels.ifFalse);
}
}
private void emitBoxing(ExpressionNode expression) {
BytecodeGenerator.emitBoxing(context, expression);
}
void emitNestedExpression(ExpressionNode expression, TypeInfo targetType) {
handleExpression(expression, null, true);
BytecodeGenerator.emitConversion(context, targetType, expression);
}
void emitArguments(List<ExpressionNode> arguments, Iterable<ParameterInfo> parameters, int nparams, bool varargs) {
int fixedLength = (varargs) ? nparams - 1 : nparams;
var generator = context.MethodGenerationContext.Generator;
var it1 = parameters.iterator();
var it2 = arguments.iterator();
int i;
for (i = 0; i < fixedLength; i++) {
var p = it1.next();
var e = it2.next();
emitNestedExpression(e, p.Type);
}
if (varargs) {
int nvarargs = arguments.size() - fixedLength;
if (nvarargs == 1) {
var paramType = it1.next().Type;
var e = arguments[i];
var ei = e.getUserData(typeof(ExpressionInfo));
if (ei == null) {
generator.emit(Opcode.Aconst_Null);
} else if (ei.Type.IsArray && paramType.isAssignableFrom(ei.Type)) {
handleExpression(e, null, true);
BytecodeGenerator.emitConversion(context, paramType, e);
} else {
BytecodeHelper.emitIntConstant(generator, 1);
bytecodeGenerator.emitArray(1, paramType, it2);
}
} else {
BytecodeHelper.emitIntConstant(generator, nvarargs);
bytecodeGenerator.emitArray(1, it1.next().Type, it2);
}
}
}
void emitDelegateCreation(TypeInfo delegateType, MethodInfo method, ExpressionNode arg, bool nested) {
var generator = context.MethodGenerationContext.Generator;
if (arg != null) {
var argType = arg.getUserData(typeof(ExpressionInfo)).Type;
if (argType != null && BytecodeHelper.isDelegateType(argType)) {
handleExpression(arg, null, nested);
return;
}
}
var typeInfo = CompilerHelper.createDelegateType(context, delegateType, method);
generator.emit(Opcode.New, typeInfo);
if (nested) {
generator.emit(Opcode.Dup);
}
if (!method.IsStatic) {
handleExpression(arg, null, true);
}
generator.emit(Opcode.Invokespecial, typeInfo.Methods.where(p => p.Name.equals("<init>")).first());
}
private void emitInterfaceCreation(TypeInfo interfaceType, MethodInfo method, ExpressionNode arg, bool nested) {
var generator = context.MethodGenerationContext.Generator;
var currentType = (TypeBuilder)context.CurrentType;
var prefix = currentType.getFullName() + "$Interface";
int n = currentType.NestedTypes.count(p => p.FullName.startsWith(prefix));
var typeBuilder = currentType.defineNestedType("Interface" + n);
context.TypeBuilders.add(typeBuilder);
typeBuilder.setSynthetic(true);
var objectType = context.TypeSystem.getType("java/lang/Object");
typeBuilder.setBaseType(objectType);
typeBuilder.addInterface(interfaceType);
FieldBuilder targetField = null;
if (!method.IsStatic) {
targetField = typeBuilder.defineField("target", method.DeclaringType);
targetField.setPrivate(true);
}
var interfaceMethod = interfaceType.Methods.single();
// Interface method
var methodBuilder = typeBuilder.defineMethod(interfaceMethod.Name);
methodBuilder.setPublic(true);
methodBuilder.setFinal(true);
methodBuilder.setVarargs(method.IsVarargs);
methodBuilder.setReturnType(method.ReturnType);
foreach (var p in method.Parameters) {
var pb = methodBuilder.addParameter(p.Type);
pb.setName(p.Name);
}
foreach (var t in method.Exceptions) {
methodBuilder.addException(t);
}
var gen = methodBuilder.CodeGenerator;
gen.beginScope();
if (!method.IsStatic) {
gen.emit(Opcode.Aload, gen.getLocal("this"));
gen.emit(Opcode.Getfield, targetField);
}
foreach (var p in method.Parameters) {
gen.emit(BytecodeHelper.getLoadOpcode(p.getType()), gen.getLocal(p.getName()));
}
gen.emit((method.IsStatic) ? Opcode.Invokestatic : Opcode.Invokevirtual, method);
gen.emit(BytecodeHelper.getReturnOpcode(method.ReturnType));
gen.endScope();
// Bridge
if (interfaceMethod.DeclaringType.GenericArguments.any()) {
BytecodeGenerator.emitBridgeMethod(context, typeBuilder, methodBuilder, interfaceMethod);
}
// Constructor T(scope)
methodBuilder = typeBuilder.defineMethod("<init>");
methodBuilder.setReturnType(context.TypeSystem.VoidType);
if (!method.IsStatic) {
var parameterBuilder = methodBuilder.addParameter(method.DeclaringType);
parameterBuilder.setName("target");
}
var baseConstructor = objectType.getMethod("<init>", Query.empty<TypeInfo>());
gen = methodBuilder.CodeGenerator;
gen.beginScope();
gen.emit(Opcode.Aload, gen.getLocal("this"));
gen.emit(Opcode.Invokespecial, baseConstructor);
if (!method.IsStatic) {
gen.emit(Opcode.Aload, gen.getLocal("this"));
gen.emit(Opcode.Aload, gen.getLocal("target"));
gen.emit(Opcode.Putfield, targetField);
}
gen.emit(Opcode.Return);
gen.endScope();
//
// Interface object creation
//
generator.emit(Opcode.New, typeBuilder);
if (nested) {
generator.emit(Opcode.Dup);
}
if (!method.IsStatic) {
handleExpression(arg, null, true);
}
generator.emit(Opcode.Invokespecial, methodBuilder);
}
private void emitOperands(ExpressionNode left, bool leftIsZero, ExpressionNode right, bool rightIsZero) {
var generator = context.MethodGenerationContext.Generator;
var li = left.getUserData(typeof(ExpressionInfo));
var ri = right.getUserData(typeof(ExpressionInfo));
switch (li.Type.TypeKind) {
case Byte:
case Char:
case Short:
case Int: {
switch (ri.Type.TypeKind) {
case Byte:
case Char:
case Short:
case Int: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
break;
}
case Long: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.I2l);
}
break;
}
case Float: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.I2f);
}
break;
}
case Double: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.I2d);
}
break;
}
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
case Long: {
switch (ri.Type.TypeKind) {
case Byte:
case Char:
case Short:
case Int: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.I2l);
}
break;
}
case Long: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
case Float: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.L2f);
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
case Double: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.L2d);
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
}
break;
}
case Float: {
switch (ri.Type.TypeKind) {
case Byte:
case Char:
case Short:
case Int: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.I2f);
}
break;
}
case Long: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.L2f);
}
break;
}
case Float: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
case Double: {
if (!leftIsZero) {
handleExpression(left, null, true);
generator.emit(Opcode.F2d);
}
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
}
break;
}
case Double: {
if (!leftIsZero) {
handleExpression(left, null, true);
}
switch (ri.Type.TypeKind) {
case Byte:
case Char:
case Short:
case Int: {
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.I2d);
}
break;
}
case Long: {
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.L2d);
}
break;
}
case Float: {
if (!rightIsZero) {
handleExpression(right, null, true);
generator.emit(Opcode.F2d);
}
break;
}
case Double: {
if (!rightIsZero) {
handleExpression(right, null, true);
}
break;
}
}
break;
}
default:
handleExpression(left, null, true);
handleExpression(right, null, true);
break;
}
}
private void emitComparison(BinaryExpressionNode binary, bool leftIsZero, bool rightIsZero,
Opcode ifLeftZero, Opcode ifRightZero, Opcode intOperation, LabelMarker elseLabel) {
var generator = context.MethodGenerationContext.Generator;
switch (binary.getUserData(typeof(TypeInfo)).TypeKind) {
case Byte:
case Char:
case Short:
case Int:
emitOperands(binary.LeftOperand, leftIsZero, binary.RightOperand, rightIsZero);
if (rightIsZero) {
generator.emit(ifRightZero, elseLabel);
} else if (leftIsZero) {
generator.emit(ifLeftZero, elseLabel);
} else {
generator.emit(intOperation, elseLabel);
}
break;
case Long:
if (leftIsZero) {
generator.emit(Opcode.Lconst_0);
}
emitOperands(binary.LeftOperand, leftIsZero, binary.RightOperand, rightIsZero);
if (rightIsZero) {
generator.emit(Opcode.Lconst_0);
}
generator.emit(Opcode.Lcmp);
generator.emit(ifRightZero, elseLabel);
break;
case Float:
if (leftIsZero) {
generator.emit(Opcode.Fconst_0);
}
emitOperands(binary.LeftOperand, leftIsZero, binary.RightOperand, rightIsZero);
if (rightIsZero) {
generator.emit(Opcode.Fconst_0);
}
generator.emit((ifRightZero == Opcode.Ifgt || ifRightZero == Opcode.Ifge) ? Opcode.Fcmpg : Opcode.Fcmpl);
generator.emit(ifRightZero, elseLabel);
break;
case Double:
if (leftIsZero) {
generator.emit(Opcode.Dconst_0);
}
emitOperands(binary.LeftOperand, leftIsZero, binary.RightOperand, rightIsZero);
if (rightIsZero) {
generator.emit(Opcode.Dconst_0);
}
generator.emit((ifRightZero == Opcode.Ifgt || ifRightZero == Opcode.Ifge) ? Opcode.Dcmpg : Opcode.Dcmpl);
generator.emit(ifRightZero, elseLabel);
break;
default:
throw new Exception("Internal error");
}
}
private void emitEquality(BinaryExpressionNode binary, bool leftIsZero, bool rightIsZero, LabelMarker label, bool negate) {
var generator = context.MethodGenerationContext.Generator;
TypeInfo type = binary.getUserData(typeof(TypeInfo));
switch (type.TypeKind) {
case Boolean:
case Byte:
case Char:
case Short:
case Int:
emitOperands(binary.getLeftOperand(), leftIsZero, binary.getRightOperand(), rightIsZero);
if (leftIsZero || rightIsZero) {
generator.emit((negate) ? Opcode.Ifne : Opcode.Ifeq, label);
} else {
generator.emit((negate) ? Opcode.If_icmpne : Opcode.If_icmpeq, label);
}
break;
case Long:
emitOperands(binary.getLeftOperand(), false, binary.getRightOperand(), false);
generator.emit(Opcode.Lcmp);
generator.emit((negate) ? Opcode.Ifne : Opcode.Ifeq, label);
break;
case Float:
emitOperands(binary.getLeftOperand(), false, binary.getRightOperand(), false);
generator.emit(Opcode.Fcmpg);
generator.emit((negate) ? Opcode.Ifne : Opcode.Ifeq, label);
break;
case Double:
emitOperands(binary.getLeftOperand(), false, binary.getRightOperand(), false);
generator.emit(Opcode.Dcmpg);
generator.emit((negate) ? Opcode.Ifne : Opcode.Ifeq, label);
break;
default:
var li = binary.getLeftOperand().getUserData(typeof(ExpressionInfo));
var ri = binary.getRightOperand().getUserData(typeof(ExpressionInfo));
if (li == null) {
handleExpression(binary.getRightOperand(), null, true);
generator.emit((negate) ? Opcode.Ifnonnull : Opcode.Ifnull, label);
} else if (ri == null) {
handleExpression(binary.getLeftOperand(), null, true);
generator.emit((negate) ? Opcode.Ifnonnull : Opcode.Ifnull, label);
} else {
handleExpression(binary.getLeftOperand(), null, true);
handleExpression(binary.getRightOperand(), null, true);
generator.emit((negate) ? Opcode.If_acmpne : Opcode.If_acmpeq, label);
}
break;
}
}
void emitArray(int dimensions, TypeInfo type, Iterator<ExpressionNode> values) {
bytecodeGenerator.emitArray(dimensions, type, values);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing.Imaging;
using System.Drawing.Internal;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using Gdip = System.Drawing.SafeNativeMethods.Gdip;
namespace System.Drawing
{
public abstract partial class Image
{
#if FINALIZATION_WATCH
private string allocationSite = Graphics.GetAllocationStack();
#endif
public static Image FromStream(Stream stream, bool useEmbeddedColorManagement, bool validateImageData)
{
if (stream == null)
throw new ArgumentNullException(nameof(stream));
IntPtr image = IntPtr.Zero;
if (useEmbeddedColorManagement)
{
Gdip.CheckStatus(Gdip.GdipLoadImageFromStreamICM(new GPStream(stream), out image));
}
else
{
Gdip.CheckStatus(Gdip.GdipLoadImageFromStream(new GPStream(stream), out image));
}
if (validateImageData)
ValidateImage(image);
Image img = CreateImageObject(image);
EnsureSave(img, null, stream);
return img;
}
// Used for serialization
private IntPtr InitializeFromStream(Stream stream)
{
IntPtr image = IntPtr.Zero;
Gdip.CheckStatus(Gdip.GdipLoadImageFromStream(new GPStream(stream), out image));
ValidateImage(image);
nativeImage = image;
int type = -1;
Gdip.CheckStatus(Gdip.GdipGetImageType(new HandleRef(this, nativeImage), out type));
EnsureSave(this, null, stream);
return image;
}
internal Image(IntPtr nativeImage) => SetNativeImage(nativeImage);
/// <summary>
/// Creates an exact copy of this <see cref='Image'/>.
/// </summary>
public object Clone()
{
IntPtr cloneImage = IntPtr.Zero;
Gdip.CheckStatus(Gdip.GdipCloneImage(new HandleRef(this, nativeImage), out cloneImage));
ValidateImage(cloneImage);
return CreateImageObject(cloneImage);
}
protected virtual void Dispose(bool disposing)
{
#if FINALIZATION_WATCH
if (!disposing && nativeImage != IntPtr.Zero)
Debug.WriteLine("**********************\nDisposed through finalization:\n" + allocationSite);
#endif
if (nativeImage == IntPtr.Zero)
return;
try
{
#if DEBUG
int status = !Gdip.Initialized ? Gdip.Ok :
#endif
Gdip.GdipDisposeImage(new HandleRef(this, nativeImage));
#if DEBUG
Debug.Assert(status == Gdip.Ok, "GDI+ returned an error status: " + status.ToString(CultureInfo.InvariantCulture));
#endif
}
catch (Exception ex)
{
if (ClientUtils.IsSecurityOrCriticalException(ex))
{
throw;
}
Debug.Fail("Exception thrown during Dispose: " + ex.ToString());
}
finally
{
nativeImage = IntPtr.Zero;
}
}
/// <summary>
/// Saves this <see cref='Image'/> to the specified file in the specified format.
/// </summary>
public void Save(string filename, ImageFormat format)
{
if (format == null)
throw new ArgumentNullException(nameof(format));
ImageCodecInfo codec = format.FindEncoder();
if (codec == null)
codec = ImageFormat.Png.FindEncoder();
Save(filename, codec, null);
}
/// <summary>
/// Saves this <see cref='Image'/> to the specified file in the specified format and with the specified encoder parameters.
/// </summary>
public void Save(string filename, ImageCodecInfo encoder, EncoderParameters encoderParams)
{
if (filename == null)
throw new ArgumentNullException(nameof(filename));
if (encoder == null)
throw new ArgumentNullException(nameof(encoder));
IntPtr encoderParamsMemory = IntPtr.Zero;
if (encoderParams != null)
{
_rawData = null;
encoderParamsMemory = encoderParams.ConvertToMemory();
}
try
{
Guid g = encoder.Clsid;
bool saved = false;
if (_rawData != null)
{
ImageCodecInfo rawEncoder = RawFormat.FindEncoder();
if (rawEncoder != null && rawEncoder.Clsid == g)
{
using (FileStream fs = File.OpenWrite(filename))
{
fs.Write(_rawData, 0, _rawData.Length);
saved = true;
}
}
}
if (!saved)
{
Gdip.CheckStatus(Gdip.GdipSaveImageToFile(
new HandleRef(this, nativeImage),
filename,
ref g,
new HandleRef(encoderParams, encoderParamsMemory)));
}
}
finally
{
if (encoderParamsMemory != IntPtr.Zero)
{
Marshal.FreeHGlobal(encoderParamsMemory);
}
}
}
private void Save(MemoryStream stream)
{
// Jpeg loses data, so we don't want to use it to serialize...
ImageFormat dest = RawFormat;
if (dest.Guid == ImageFormat.Jpeg.Guid)
dest = ImageFormat.Png;
// If we don't find an Encoder (for things like Icon), we just switch back to PNG...
ImageCodecInfo codec = dest.FindEncoder() ?? ImageFormat.Png.FindEncoder();
Save(stream, codec, null);
}
/// <summary>
/// Saves this <see cref='Image'/> to the specified stream in the specified format.
/// </summary>
public void Save(Stream stream, ImageFormat format)
{
if (format == null)
throw new ArgumentNullException(nameof(format));
ImageCodecInfo codec = format.FindEncoder();
Save(stream, codec, null);
}
/// <summary>
/// Saves this <see cref='Image'/> to the specified stream in the specified format.
/// </summary>
public void Save(Stream stream, ImageCodecInfo encoder, EncoderParameters encoderParams)
{
if (stream == null)
throw new ArgumentNullException(nameof(stream));
if (encoder == null)
throw new ArgumentNullException(nameof(encoder));
IntPtr encoderParamsMemory = IntPtr.Zero;
if (encoderParams != null)
{
_rawData = null;
encoderParamsMemory = encoderParams.ConvertToMemory();
}
try
{
Guid g = encoder.Clsid;
bool saved = false;
if (_rawData != null)
{
ImageCodecInfo rawEncoder = RawFormat.FindEncoder();
if (rawEncoder != null && rawEncoder.Clsid == g)
{
stream.Write(_rawData, 0, _rawData.Length);
saved = true;
}
}
if (!saved)
{
Gdip.CheckStatus(Gdip.GdipSaveImageToStream(
new HandleRef(this, nativeImage),
new GPStream(stream),
ref g,
new HandleRef(encoderParams, encoderParamsMemory)));
}
}
finally
{
if (encoderParamsMemory != IntPtr.Zero)
{
Marshal.FreeHGlobal(encoderParamsMemory);
}
}
}
/// <summary>
/// Adds an <see cref='EncoderParameters'/> to this <see cref='Image'/>.
/// </summary>
public void SaveAdd(EncoderParameters encoderParams)
{
IntPtr encoder = IntPtr.Zero;
if (encoderParams != null)
encoder = encoderParams.ConvertToMemory();
_rawData = null;
try
{
Gdip.CheckStatus(Gdip.GdipSaveAdd(new HandleRef(this, nativeImage), new HandleRef(encoderParams, encoder)));
}
finally
{
if (encoder != IntPtr.Zero)
{
Marshal.FreeHGlobal(encoder);
}
}
}
/// <summary>
/// Adds an <see cref='EncoderParameters'/> to the specified <see cref='Image'/>.
/// </summary>
public void SaveAdd(Image image, EncoderParameters encoderParams)
{
IntPtr encoder = IntPtr.Zero;
if (image == null)
throw new ArgumentNullException(nameof(image));
if (encoderParams != null)
encoder = encoderParams.ConvertToMemory();
_rawData = null;
try
{
Gdip.CheckStatus(Gdip.GdipSaveAddImage(
new HandleRef(this, nativeImage),
new HandleRef(image, image.nativeImage),
new HandleRef(encoderParams, encoder)));
}
finally
{
if (encoder != IntPtr.Zero)
{
Marshal.FreeHGlobal(encoder);
}
}
}
/// <summary>
/// Gets a bounding rectangle in the specified units for this <see cref='Image'/>.
/// </summary>
public RectangleF GetBounds(ref GraphicsUnit pageUnit)
{
Gdip.CheckStatus(Gdip.GdipGetImageBounds(new HandleRef(this, nativeImage), out RectangleF bounds, out pageUnit));
return bounds;
}
/// <summary>
/// Gets or sets the color palette used for this <see cref='Image'/>.
/// </summary>
[Browsable(false)]
public ColorPalette Palette
{
get
{
Gdip.CheckStatus(Gdip.GdipGetImagePaletteSize(new HandleRef(this, nativeImage), out int size));
// "size" is total byte size:
// sizeof(ColorPalette) + (pal->Count-1)*sizeof(ARGB)
ColorPalette palette = new ColorPalette(size);
// Memory layout is:
// UINT Flags
// UINT Count
// ARGB Entries[size]
IntPtr memory = Marshal.AllocHGlobal(size);
try
{
Gdip.CheckStatus(Gdip.GdipGetImagePalette(new HandleRef(this, nativeImage), memory, size));
palette.ConvertFromMemory(memory);
}
finally
{
Marshal.FreeHGlobal(memory);
}
return palette;
}
set
{
IntPtr memory = value.ConvertToMemory();
try
{
Gdip.CheckStatus(Gdip.GdipSetImagePalette(new HandleRef(this, nativeImage), memory));
}
finally
{
if (memory != IntPtr.Zero)
{
Marshal.FreeHGlobal(memory);
}
}
}
}
// Thumbnail support
/// <summary>
/// Returns the thumbnail for this <see cref='Image'/>.
/// </summary>
public Image GetThumbnailImage(int thumbWidth, int thumbHeight, GetThumbnailImageAbort callback, IntPtr callbackData)
{
IntPtr thumbImage = IntPtr.Zero;
Gdip.CheckStatus(Gdip.GdipGetImageThumbnail(
new HandleRef(this, nativeImage),
thumbWidth,
thumbHeight,
out thumbImage,
callback,
callbackData));
return CreateImageObject(thumbImage);
}
/// <summary>
/// Gets an array of the property IDs stored in this <see cref='Image'/>.
/// </summary>
[Browsable(false)]
public int[] PropertyIdList
{
get
{
Gdip.CheckStatus(Gdip.GdipGetPropertyCount(new HandleRef(this, nativeImage), out int count));
int[] propid = new int[count];
//if we have a 0 count, just return our empty array
if (count == 0)
return propid;
Gdip.CheckStatus(Gdip.GdipGetPropertyIdList(new HandleRef(this, nativeImage), count, propid));
return propid;
}
}
/// <summary>
/// Gets the specified property item from this <see cref='Image'/>.
/// </summary>
public PropertyItem GetPropertyItem(int propid)
{
Gdip.CheckStatus(Gdip.GdipGetPropertyItemSize(new HandleRef(this, nativeImage), propid, out int size));
if (size == 0)
return null;
IntPtr propdata = Marshal.AllocHGlobal(size);
if (propdata == IntPtr.Zero)
throw Gdip.StatusException(Gdip.OutOfMemory);
try
{
Gdip.CheckStatus(Gdip.GdipGetPropertyItem(new HandleRef(this, nativeImage), propid, size, propdata));
return PropertyItemInternal.ConvertFromMemory(propdata, 1)[0];
}
finally
{
Marshal.FreeHGlobal(propdata);
}
}
/// <summary>
/// Sets the specified property item to the specified value.
/// </summary>
public void SetPropertyItem(PropertyItem propitem)
{
PropertyItemInternal propItemInternal = PropertyItemInternal.ConvertFromPropertyItem(propitem);
using (propItemInternal)
{
Gdip.CheckStatus(Gdip.GdipSetPropertyItem(new HandleRef(this, nativeImage), propItemInternal));
}
}
/// <summary>
/// Gets an array of <see cref='PropertyItem'/> objects that describe this <see cref='Image'/>.
/// </summary>
[Browsable(false)]
public PropertyItem[] PropertyItems
{
get
{
Gdip.CheckStatus(Gdip.GdipGetPropertyCount(new HandleRef(this, nativeImage), out int count));
Gdip.CheckStatus(Gdip.GdipGetPropertySize(new HandleRef(this, nativeImage), out int size, ref count));
if (size == 0 || count == 0)
return Array.Empty<PropertyItem>();
IntPtr propdata = Marshal.AllocHGlobal(size);
try
{
Gdip.CheckStatus(Gdip.GdipGetAllPropertyItems(new HandleRef(this, nativeImage), size, count, propdata));
return PropertyItemInternal.ConvertFromMemory(propdata, count);
}
finally
{
Marshal.FreeHGlobal(propdata);
}
}
}
internal static void ValidateImage(IntPtr image)
{
try
{
Gdip.CheckStatus(Gdip.GdipImageForceValidation(image));
}
catch
{
Gdip.GdipDisposeImage(image);
throw;
}
}
}
}
| |
/*
*************************************************************************
** Custom classes used by C#
*************************************************************************
*/
using System;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using i64 = System.Int64;
using u32 = System.UInt32;
using time_t = System.Int64;
namespace System.Data.SQLite
{
using sqlite3_value = Sqlite3.Mem;
public partial class Sqlite3
{
static int atoi(byte[] inStr)
{
return atoi(Encoding.UTF8.GetString(inStr, 0, inStr.Length));
}
static int atoi(string inStr)
{
int i;
for (i = 0; i < inStr.Length; i++)
{
if (!sqlite3Isdigit(inStr[i]) && inStr[i] != '-')
break;
}
int result = 0;
return (Int32.TryParse(inStr.Substring(0, i), out result) ? result : 0);
}
static void fprintf(TextWriter tw, string zFormat, params object[] ap)
{
tw.Write(sqlite3_mprintf(zFormat, ap));
}
static void printf(string zFormat, params object[] ap)
{
Console.Out.Write(sqlite3_mprintf(zFormat, ap));
}
//Byte Buffer Testing
static int memcmp(byte[] bA, byte[] bB, int Limit)
{
if (bA.Length < Limit)
return (bA.Length < bB.Length) ? -1 : +1;
if (bB.Length < Limit)
return +1;
for (int i = 0; i < Limit; i++)
{
if (bA[i] != bB[i])
return (bA[i] < bB[i]) ? -1 : 1;
}
return 0;
}
//Byte Buffer & String Testing
static int memcmp(string A, byte[] bB, int Limit)
{
if (A.Length < Limit)
return (A.Length < bB.Length) ? -1 : +1;
if (bB.Length < Limit)
return +1;
char[] cA = A.ToCharArray();
for (int i = 0; i < Limit; i++)
{
if (cA[i] != bB[i])
return (cA[i] < bB[i]) ? -1 : 1;
}
return 0;
}
//byte with Offset & String Testing
static int memcmp(byte[] a, int Offset, byte[] b, int Limit)
{
if (a.Length < Offset + Limit)
return (a.Length - Offset < b.Length) ? -1 : +1;
if (b.Length < Limit)
return +1;
for (int i = 0; i < Limit; i++)
{
if (a[i + Offset] != b[i])
return (a[i + Offset] < b[i]) ? -1 : 1;
}
return 0;
}
//byte with Offset & String Testing
static int memcmp(byte[] a, int Aoffset, byte[] b, int Boffset, int Limit)
{
if (a.Length < Aoffset + Limit)
return (a.Length - Aoffset < b.Length - Boffset) ? -1 : +1;
if (b.Length < Boffset + Limit)
return +1;
for (int i = 0; i < Limit; i++)
{
if (a[i + Aoffset] != b[i + Boffset])
return (a[i + Aoffset] < b[i + Boffset]) ? -1 : 1;
}
return 0;
}
static int memcmp(byte[] a, int Offset, string b, int Limit)
{
if (a.Length < Offset + Limit)
return (a.Length - Offset < b.Length) ? -1 : +1;
if (b.Length < Limit)
return +1;
for (int i = 0; i < Limit; i++)
{
if (a[i + Offset] != b[i])
return (a[i + Offset] < b[i]) ? -1 : 1;
}
return 0;
}
//String Testing
static int memcmp(string A, string B, int Limit)
{
if (A.Length < Limit)
return (A.Length < B.Length) ? -1 : +1;
if (B.Length < Limit)
return +1;
int rc;
if ((rc = String.Compare(A, 0, B, 0, Limit, StringComparison.Ordinal)) == 0)
return 0;
return rc < 0 ? -1 : +1;
}
// ----------------------------
// ** Builtin Functions
// ----------------------------
static Regex oRegex = null;
/*
** The regexp() function. two arguments are both strings
** Collating sequences are not used.
*/
static void regexpFunc(
sqlite3_context context,
int argc,
sqlite3_value[] argv
)
{
string zTest; /* The input string A */
string zRegex; /* The regex string B */
Debug.Assert(argc == 2);
UNUSED_PARAMETER(argc);
zRegex = sqlite3_value_text(argv[0]);
zTest = sqlite3_value_text(argv[1]);
if (zTest == null || String.IsNullOrEmpty(zRegex))
{
sqlite3_result_int(context, 0);
return;
}
if (oRegex == null || oRegex.ToString() == zRegex)
{
oRegex = new Regex(zRegex, RegexOptions.IgnoreCase);
}
sqlite3_result_int(context, oRegex.IsMatch(zTest) ? 1 : 0);
}
// ----------------------------
// ** Convertion routines
// ----------------------------
static Object lock_va_list = new Object();
static string vaFORMAT;
static int vaNEXT;
static void va_start(object[] ap, string zFormat)
{
vaFORMAT = zFormat;
vaNEXT = 0;
}
static Boolean va_arg(object[] ap, Boolean sysType)
{
return Convert.ToBoolean(ap[vaNEXT++]);
}
static Byte[] va_arg(object[] ap, Byte[] sysType)
{
return (Byte[])ap[vaNEXT++];
}
static Byte[][] va_arg(object[] ap, Byte[][] sysType)
{
if (ap[vaNEXT] == null)
{
{
vaNEXT++;
return null;
}
}
else
{
return (Byte[][])ap[vaNEXT++];
}
}
static Char va_arg(object[] ap, Char sysType)
{
if (ap[vaNEXT] is Int32 && (int)ap[vaNEXT] == 0)
{
vaNEXT++;
return (char)'0';
}
else
{
if (ap[vaNEXT] is Int64)
if ((i64)ap[vaNEXT] == 0)
{
vaNEXT++;
return (char)'0';
}
else
return (char)((i64)ap[vaNEXT++]);
else
return (char)ap[vaNEXT++];
}
}
static Double va_arg(object[] ap, Double sysType)
{
return Convert.ToDouble(ap[vaNEXT++]);
}
static dxLog va_arg(object[] ap, dxLog sysType)
{
return (dxLog)ap[vaNEXT++];
}
static Int64 va_arg(object[] ap, Int64 sysType)
{
if (ap[vaNEXT] is System.Int64)
return Convert.ToInt64(ap[vaNEXT++]);
else
return (Int64)(ap[vaNEXT++].GetHashCode());
}
static Int32 va_arg(object[] ap, Int32 sysType)
{
if (Convert.ToInt64(ap[vaNEXT]) > 0 && (Convert.ToUInt32(ap[vaNEXT]) > Int32.MaxValue))
return (Int32)(Convert.ToUInt32(ap[vaNEXT++]) - System.UInt32.MaxValue - 1);
else
return (Int32)Convert.ToInt32(ap[vaNEXT++]);
}
static Int32[] va_arg(object[] ap, Int32[] sysType)
{
if (ap[vaNEXT] == null)
{
{
vaNEXT++;
return null;
}
}
else
{
return (Int32[])ap[vaNEXT++];
}
}
static MemPage va_arg(object[] ap, MemPage sysType)
{
return (MemPage)ap[vaNEXT++];
}
static Object va_arg(object[] ap, Object sysType)
{
return (Object)ap[vaNEXT++];
}
static sqlite3 va_arg(object[] ap, sqlite3 sysType)
{
return (sqlite3)ap[vaNEXT++];
}
static sqlite3_mem_methods va_arg(object[] ap, sqlite3_mem_methods sysType)
{
return (sqlite3_mem_methods)ap[vaNEXT++];
}
static sqlite3_mutex_methods va_arg(object[] ap, sqlite3_mutex_methods sysType)
{
return (sqlite3_mutex_methods)ap[vaNEXT++];
}
static SrcList va_arg(object[] ap, SrcList sysType)
{
return (SrcList)ap[vaNEXT++];
}
static String va_arg(object[] ap, String sysType)
{
if (ap.Length < vaNEXT - 1 || ap[vaNEXT] == null)
{
vaNEXT++;
return "NULL";
}
else
{
if (ap[vaNEXT] is Byte[])
if (Encoding.UTF8.GetString((byte[])ap[vaNEXT], 0, ((byte[])ap[vaNEXT]).Length) == "\0")
{
vaNEXT++;
return "";
}
else
return Encoding.UTF8.GetString((byte[])ap[vaNEXT], 0, ((byte[])ap[vaNEXT++]).Length);
else if (ap[vaNEXT] is Int32)
{
vaNEXT++;
return null;
}
else if (ap[vaNEXT] is StringBuilder)
return (String)ap[vaNEXT++].ToString();
else if (ap[vaNEXT] is Char)
return ((Char)ap[vaNEXT++]).ToString();
else
return (String)ap[vaNEXT++];
}
}
static Token va_arg(object[] ap, Token sysType)
{
return (Token)ap[vaNEXT++];
}
static UInt32 va_arg(object[] ap, UInt32 sysType)
{
if (ap[vaNEXT].GetType().IsClass)
{
return (UInt32)ap[vaNEXT++].GetHashCode();
}
else
{
return (UInt32)Convert.ToUInt32(ap[vaNEXT++]);
}
}
static UInt64 va_arg(object[] ap, UInt64 sysType)
{
if (ap[vaNEXT].GetType().IsClass)
{
return (UInt64)ap[vaNEXT++].GetHashCode();
}
else
{
return (UInt64)Convert.ToUInt64(ap[vaNEXT++]);
}
}
static void_function va_arg(object[] ap, void_function sysType)
{
return (void_function)ap[vaNEXT++];
}
static void va_end(ref string[] ap)
{
ap = null;
vaNEXT = -1;
vaFORMAT = "";
}
static void va_end(ref object[] ap)
{
ap = null;
vaNEXT = -1;
vaFORMAT = "";
}
public static tm localtime(time_t baseTime)
{
System.DateTime RefTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0);
RefTime = RefTime.AddSeconds(Convert.ToDouble(baseTime)).ToLocalTime();
tm tm = new tm();
tm.tm_sec = RefTime.Second;
tm.tm_min = RefTime.Minute;
tm.tm_hour = RefTime.Hour;
tm.tm_mday = RefTime.Day;
tm.tm_mon = RefTime.Month;
tm.tm_year = RefTime.Year;
tm.tm_wday = (int)RefTime.DayOfWeek;
tm.tm_yday = RefTime.DayOfYear;
tm.tm_isdst = RefTime.IsDaylightSavingTime() ? 1 : 0;
return tm;
}
public static long ToUnixtime(System.DateTime date)
{
System.DateTime unixStartTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0);
System.TimeSpan timeSpan = date - unixStartTime;
return Convert.ToInt64(timeSpan.TotalSeconds);
}
public static System.DateTime ToCSharpTime(long unixTime)
{
System.DateTime unixStartTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0);
return unixStartTime.AddSeconds(Convert.ToDouble(unixTime));
}
public class tm
{
public int tm_sec; /* seconds after the minute - [0,59] */
public int tm_min; /* minutes after the hour - [0,59] */
public int tm_hour; /* hours since midnight - [0,23] */
public int tm_mday; /* day of the month - [1,31] */
public int tm_mon; /* months since January - [0,11] */
public int tm_year; /* years since 1900 */
public int tm_wday; /* days since Sunday - [0,6] */
public int tm_yday; /* days since January 1 - [0,365] */
public int tm_isdst; /* daylight savings time flag */
};
public struct FILETIME
{
public u32 dwLowDateTime;
public u32 dwHighDateTime;
}
// Example (C#)
public static int GetbytesPerSector(StringBuilder diskPath)
{
return 4096;
}
static void SWAP<T>(ref T A, ref T B)
{
T t = A;
A = B;
B = t;
}
static void x_CountStep(
sqlite3_context context,
int argc,
sqlite3_value[] argv
)
{
SumCtx p;
int type;
Debug.Assert(argc <= 1);
Mem pMem = sqlite3_aggregate_context(context, 1);//sizeof(*p));
if (pMem._SumCtx == null)
pMem._SumCtx = new SumCtx();
p = pMem._SumCtx;
if (p.Context == null)
p.Context = pMem;
if (argc == 0 || SQLITE_NULL == sqlite3_value_type(argv[0]))
{
p.cnt++;
p.iSum += 1;
}
else
{
type = sqlite3_value_numeric_type(argv[0]);
if (p != null && type != SQLITE_NULL)
{
p.cnt++;
if (type == SQLITE_INTEGER)
{
i64 v = sqlite3_value_int64(argv[0]);
if (v == 40 || v == 41)
{
sqlite3_result_error(context, "value of " + v + " handed to x_count", -1);
return;
}
else
{
p.iSum += v;
if (!(p.approx | p.overflow != 0))
{
i64 iNewSum = p.iSum + v;
int s1 = (int)(p.iSum >> (sizeof(i64) * 8 - 1));
int s2 = (int)(v >> (sizeof(i64) * 8 - 1));
int s3 = (int)(iNewSum >> (sizeof(i64) * 8 - 1));
p.overflow = ((s1 & s2 & ~s3) | (~s1 & ~s2 & s3)) != 0 ? 1 : 0;
p.iSum = iNewSum;
}
}
}
else
{
p.rSum += sqlite3_value_double(argv[0]);
p.approx = true;
}
}
}
}
static void x_CountFinalize(sqlite3_context context)
{
SumCtx p;
Mem pMem = sqlite3_aggregate_context(context, 0);
p = pMem._SumCtx;
if (p != null && p.cnt > 0)
{
if (p.overflow != 0)
{
sqlite3_result_error(context, "integer overflow", -1);
}
else if (p.approx)
{
sqlite3_result_double(context, p.rSum);
}
else if (p.iSum == 42)
{
sqlite3_result_error(context, "x_count totals to 42", -1);
}
else
{
sqlite3_result_int64(context, p.iSum);
}
}
}
#if SQLITE_MUTEX_W32
//---------------------WIN32 Definitions
static int GetCurrentThreadId()
{
return Thread.CurrentThread.ManagedThreadId;
}
static long InterlockedIncrement( long location )
{
Interlocked.Increment( ref location );
return location;
}
static void EnterCriticalSection( Object mtx )
{
//long mid = mtx.GetHashCode();
//int tid = Thread.CurrentThread.ManagedThreadId;
//long ticks = cnt++;
//Debug.WriteLine(String.Format( "{2}: +EnterCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, ticks) );
Monitor.Enter( mtx );
}
static void InitializeCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format( "{2}: +InitializeCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks ));
Monitor.Enter( mtx );
}
static void DeleteCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format( "{2}: +DeleteCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks) );
Monitor.Exit( mtx );
}
static void LeaveCriticalSection( Object mtx )
{
//Debug.WriteLine(String.Format("{2}: +LeaveCriticalSection; Mutex {0} Thread {1}", mtx.GetHashCode(), Thread.CurrentThread.ManagedThreadId, System.DateTime.Now.Ticks ));
Monitor.Exit( mtx );
}
#endif
// Miscellaneous Windows Constants
//#define ERROR_FILE_NOT_FOUND 2L
//#define ERROR_HANDLE_DISK_FULL 39L
//#define ERROR_NOT_SUPPORTED 50L
//#define ERROR_DISK_FULL 112L
const long ERROR_FILE_NOT_FOUND = 2L;
const long ERROR_HANDLE_DISK_FULL = 39L;
const long ERROR_NOT_SUPPORTED = 50L;
const long ERROR_DISK_FULL = 112L;
private class SQLite3UpperToLower
{
static int[] sqlite3UpperToLower = new int[] {
#if SQLITE_ASCII
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 97, 98, 99,100,101,102,103,
104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,
122, 91, 92, 93, 94, 95, 96, 97, 98, 99,100,101,102,103,104,105,106,107,
108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,
126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,
144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,
162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,
180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,
198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,
216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,
234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,
252,253,254,255
#endif
};
public int this[int index]
{
get
{
if (index < sqlite3UpperToLower.Length)
return sqlite3UpperToLower[index];
else
return index;
}
}
public int this[u32 index]
{
get
{
if (index < sqlite3UpperToLower.Length)
return sqlite3UpperToLower[index];
else
return (int)index;
}
}
}
static SQLite3UpperToLower sqlite3UpperToLower = new SQLite3UpperToLower();
static SQLite3UpperToLower UpperToLower = sqlite3UpperToLower;
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
using System;
using System.Diagnostics;
using SharpBox2D.Common;
namespace SharpBox2D.Collision.Shapes
{
/**
* A chain shape is a free form sequence of line segments. The chain has two-sided collision, so you
* can use inside and outside collision. Therefore, you may use any winding order. Connectivity
* information is used to create smooth collisions. WARNING: The chain will not collide properly if
* there are self-intersections.
*
* @author Daniel
*/
public class ChainShape : Shape
{
public Vec2[] m_vertices;
public int m_count;
public Vec2 m_prevVertex = new Vec2(), m_nextVertex = new Vec2();
public bool m_hasPrevVertex = false, m_hasNextVertex = false;
private EdgeShape pool0 = new EdgeShape();
public ChainShape() : base(ShapeType.CHAIN)
{
m_vertices = null;
m_radius = Settings.polygonRadius;
m_count = 0;
}
public override int getChildCount()
{
return m_count - 1;
}
/**
* Get a child edge.
*/
public void getChildEdge(EdgeShape edge, int index)
{
Debug.Assert(0 <= index && index < m_count - 1);
edge.m_radius = m_radius;
Vec2 v0 = m_vertices[index + 0];
Vec2 v1 = m_vertices[index + 1];
edge.m_vertex1.x = v0.x;
edge.m_vertex1.y = v0.y;
edge.m_vertex2.x = v1.x;
edge.m_vertex2.y = v1.y;
if (index > 0)
{
Vec2 v = m_vertices[index - 1];
edge.m_vertex0.x = v.x;
edge.m_vertex0.y = v.y;
edge.m_hasVertex0 = true;
}
else
{
edge.m_vertex0.x = m_prevVertex.x;
edge.m_vertex0.y = m_prevVertex.y;
edge.m_hasVertex0 = m_hasPrevVertex;
}
if (index < m_count - 2)
{
Vec2 v = m_vertices[index + 2];
edge.m_vertex3.x = v.x;
edge.m_vertex3.y = v.y;
edge.m_hasVertex3 = true;
}
else
{
edge.m_vertex3.x = m_nextVertex.x;
edge.m_vertex3.y = m_nextVertex.y;
edge.m_hasVertex3 = m_hasNextVertex;
}
}
public override float computeDistanceToOut(Transform xf, Vec2 p, int childIndex, Vec2 normalOut)
{
EdgeShape edge = pool0;
getChildEdge(edge, childIndex);
return edge.computeDistanceToOut(xf, p, 0, normalOut);
}
public override bool testPoint(Transform xf, Vec2 p)
{
return false;
}
public override bool raycast(RayCastOutput output, RayCastInput input, Transform xf, int childIndex)
{
Debug.Assert(childIndex < m_count);
EdgeShape edgeShape = pool0;
int i1 = childIndex;
int i2 = childIndex + 1;
if (i2 == m_count)
{
i2 = 0;
}
Vec2 v = m_vertices[i1];
edgeShape.m_vertex1.x = v.x;
edgeShape.m_vertex1.y = v.y;
Vec2 v1 = m_vertices[i2];
edgeShape.m_vertex2.x = v1.x;
edgeShape.m_vertex2.y = v1.y;
return edgeShape.raycast(output, input, xf, 0);
}
public override void computeAABB(AABB aabb, Transform xf, int childIndex)
{
Debug.Assert(childIndex < m_count);
Vec2 lower = aabb.lowerBound;
Vec2 upper = aabb.upperBound;
int i1 = childIndex;
int i2 = childIndex + 1;
if (i2 == m_count)
{
i2 = 0;
}
Vec2 vi1 = m_vertices[i1];
Vec2 vi2 = m_vertices[i2];
Rot xfq = xf.q;
Vec2 xfp = xf.p;
float v1x = (xfq.c*vi1.x - xfq.s*vi1.y) + xfp.x;
float v1y = (xfq.s*vi1.x + xfq.c*vi1.y) + xfp.y;
float v2x = (xfq.c*vi2.x - xfq.s*vi2.y) + xfp.x;
float v2y = (xfq.s*vi2.x + xfq.c*vi2.y) + xfp.y;
lower.x = v1x < v2x ? v1x : v2x;
lower.y = v1y < v2y ? v1y : v2y;
upper.x = v1x > v2x ? v1x : v2x;
upper.y = v1y > v2y ? v1y : v2y;
}
public override void computeMass(MassData massData, float density)
{
massData.mass = 0.0f;
massData.center.setZero();
massData.I = 0.0f;
}
public override Shape clone()
{
ChainShape clone = new ChainShape();
clone.createChain(m_vertices, m_count);
clone.m_prevVertex.set(m_prevVertex);
clone.m_nextVertex.set(m_nextVertex);
clone.m_hasPrevVertex = m_hasPrevVertex;
clone.m_hasNextVertex = m_hasNextVertex;
return clone;
}
/**
* Create a loop. This automatically adjusts connectivity.
*
* @param vertices an array of vertices, these are copied
* @param count the vertex count
*/
public void createLoop(Vec2[] vertices, int count)
{
Debug.Assert(m_vertices == null && m_count == 0);
Debug.Assert(count >= 3);
m_count = count + 1;
m_vertices = new Vec2[m_count];
for (int i = 1; i < count; i++)
{
Vec2 v1 = vertices[i - 1];
Vec2 v2 = vertices[i];
// If the code crashes here, it means your vertices are too close together.
if (MathUtils.distanceSquared(v1, v2) < Settings.linearSlop*Settings.linearSlop)
{
throw new InvalidOperationException("Vertices of chain shape are too close together");
}
}
for (int i = 0; i < count; i++)
{
m_vertices[i] = new Vec2(vertices[i]);
}
m_vertices[count] = new Vec2(m_vertices[0]);
m_prevVertex.set(m_vertices[m_count - 2]);
m_nextVertex.set(m_vertices[1]);
m_hasPrevVertex = true;
m_hasNextVertex = true;
}
/**
* Create a chain with isolated end vertices.
*
* @param vertices an array of vertices, these are copied
* @param count the vertex count
*/
public void createChain(Vec2[] vertices, int count)
{
Debug.Assert(m_vertices == null && m_count == 0);
Debug.Assert(count >= 2);
m_count = count;
m_vertices = new Vec2[m_count];
for (int i = 1; i < m_count; i++)
{
Vec2 v1 = vertices[i - 1];
Vec2 v2 = vertices[i];
// If the code crashes here, it means your vertices are too close together.
if (MathUtils.distanceSquared(v1, v2) < Settings.linearSlop*Settings.linearSlop)
{
throw new InvalidOperationException("Vertices of chain shape are too close together");
}
}
for (int i = 0; i < m_count; i++)
{
m_vertices[i] = new Vec2(vertices[i]);
}
m_hasPrevVertex = false;
m_hasNextVertex = false;
m_prevVertex.setZero();
m_nextVertex.setZero();
}
/**
* Establish connectivity to a vertex that precedes the first vertex. Don't call this for loops.
*
* @param prevVertex
*/
public void setPrevVertex(Vec2 prevVertex)
{
m_prevVertex.set(prevVertex);
m_hasPrevVertex = true;
}
/**
* Establish connectivity to a vertex that follows the last vertex. Don't call this for loops.
*
* @param nextVertex
*/
public void setNextVertex(Vec2 nextVertex)
{
m_nextVertex.set(nextVertex);
m_hasNextVertex = true;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void CompareNotLessThanSingle()
{
var test = new SimpleBinaryOpTest__CompareNotLessThanSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__CompareNotLessThanSingle
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Single> _fld1;
public Vector128<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__CompareNotLessThanSingle testClass)
{
var result = Sse.CompareNotLessThan(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareNotLessThanSingle testClass)
{
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector128<Single> _clsVar1;
private static Vector128<Single> _clsVar2;
private Vector128<Single> _fld1;
private Vector128<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__CompareNotLessThanSingle()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
}
public SimpleBinaryOpTest__CompareNotLessThanSingle()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse.CompareNotLessThan(
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse.CompareNotLessThan(
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse).GetMethod(nameof(Sse.CompareNotLessThan), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse).GetMethod(nameof(Sse.CompareNotLessThan), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse).GetMethod(nameof(Sse.CompareNotLessThan), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse.CompareNotLessThan(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Single>* pClsVar1 = &_clsVar1)
fixed (Vector128<Single>* pClsVar2 = &_clsVar2)
{
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(pClsVar1)),
Sse.LoadVector128((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr);
var result = Sse.CompareNotLessThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.CompareNotLessThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.CompareNotLessThan(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__CompareNotLessThanSingle();
var result = Sse.CompareNotLessThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__CompareNotLessThanSingle();
fixed (Vector128<Single>* pFld1 = &test._fld1)
fixed (Vector128<Single>* pFld2 = &test._fld2)
{
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse.CompareNotLessThan(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse.CompareNotLessThan(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse.CompareNotLessThan(
Sse.LoadVector128((Single*)(&test._fld1)),
Sse.LoadVector128((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Single> op1, Vector128<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.SingleToInt32Bits(result[0]) != (!(left[0] < right[0]) ? -1 : 0))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(result[i]) != (!(left[i] < right[i]) ? -1 : 0))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse)}.{nameof(Sse.CompareNotLessThan)}<Single>(Vector128<Single>, Vector128<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void Permute2x128UInt322()
{
var test = new ImmBinaryOpTest__Permute2x128UInt322();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmBinaryOpTest__Permute2x128UInt322
{
private struct TestStruct
{
public Vector256<UInt32> _fld1;
public Vector256<UInt32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref testStruct._fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref testStruct._fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
return testStruct;
}
public void RunStructFldScenario(ImmBinaryOpTest__Permute2x128UInt322 testClass)
{
var result = Avx.Permute2x128(_fld1, _fld2, 2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt32>>() / sizeof(UInt32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<UInt32>>() / sizeof(UInt32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt32>>() / sizeof(UInt32);
private static UInt32[] _data1 = new UInt32[Op1ElementCount];
private static UInt32[] _data2 = new UInt32[Op2ElementCount];
private static Vector256<UInt32> _clsVar1;
private static Vector256<UInt32> _clsVar2;
private Vector256<UInt32> _fld1;
private Vector256<UInt32> _fld2;
private SimpleBinaryOpTest__DataTable<UInt32, UInt32, UInt32> _dataTable;
static ImmBinaryOpTest__Permute2x128UInt322()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _clsVar1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _clsVar2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
}
public ImmBinaryOpTest__Permute2x128UInt322()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _fld1), ref Unsafe.As<UInt32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt32>, byte>(ref _fld2), ref Unsafe.As<UInt32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetUInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetUInt32(); }
_dataTable = new SimpleBinaryOpTest__DataTable<UInt32, UInt32, UInt32>(_data1, _data2, new UInt32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx.Permute2x128(
Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray2Ptr),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx.Permute2x128(
Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt32*)(_dataTable.inArray2Ptr)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx.Permute2x128(
Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray2Ptr)),
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute2x128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector256<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray2Ptr),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute2x128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector256<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((UInt32*)(_dataTable.inArray2Ptr)),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx).GetMethod(nameof(Avx.Permute2x128), new Type[] { typeof(Vector256<UInt32>), typeof(Vector256<UInt32>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray2Ptr)),
(byte)2
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx.Permute2x128(
_clsVar1,
_clsVar2,
2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var left = Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<UInt32>>(_dataTable.inArray2Ptr);
var result = Avx.Permute2x128(left, right, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var left = Avx.LoadVector256((UInt32*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((UInt32*)(_dataTable.inArray2Ptr));
var result = Avx.Permute2x128(left, right, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var left = Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((UInt32*)(_dataTable.inArray2Ptr));
var result = Avx.Permute2x128(left, right, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmBinaryOpTest__Permute2x128UInt322();
var result = Avx.Permute2x128(test._fld1, test._fld2, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx.Permute2x128(_fld1, _fld2, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx.Permute2x128(test._fld1, test._fld2, 2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<UInt32> left, Vector256<UInt32> right, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), left);
Unsafe.WriteUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
UInt32[] inArray1 = new UInt32[Op1ElementCount];
UInt32[] inArray2 = new UInt32[Op2ElementCount];
UInt32[] outArray = new UInt32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt32[] left, UInt32[] right, UInt32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != right[0])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (i < 4 ? right[i] : left[i-4]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.Permute2x128)}<UInt32>(Vector256<UInt32>.2, Vector256<UInt32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Xml;
using Microsoft.Build.BuildEngine.Shared;
using Microsoft.Win32;
namespace Microsoft.Build.BuildEngine
{
internal class VCWrapperProject
{
/// <summary>
/// Add a target for a VC project file into the XML doc that's being generated.
/// This is used only when building standalone VC projects
/// </summary>
/// <param name="msbuildProject"></param>
/// <param name="projectPath"></param>
/// <param name="targetName"></param>
/// <param name="subTargetName"></param>
/// <owner>RGoel</owner>
static private void AddVCBuildTarget
(
Project msbuildProject,
string projectPath,
string targetName,
string subTargetName
)
{
Target newTarget = msbuildProject.Targets.AddNewTarget(targetName);
if (subTargetName == "Publish")
{
// Well, hmmm. The VCBuild doesn't support any kind of
// a "Publish" operation. The best we can really do is offer up a
// message saying so.
SolutionWrapperProject.AddErrorWarningMessageElement(newTarget, XMakeElements.error, true, "SolutionVCProjectNoPublish");
}
else
{
SolutionWrapperProject.AddErrorWarningMessageElement(newTarget, XMakeElements.warning, true, "StandaloneVCProjectP2PRefsBroken");
string projectFullPath = Path.GetFullPath(projectPath);
AddVCBuildTaskElement(msbuildProject, newTarget, "$(VCBuildSolutionFile)", projectFullPath, subTargetName, "$(PlatformName)", "$(ConfigurationName)");
}
}
/// <summary>
/// Adds a new VCBuild task element to the specified target
/// </summary>
/// <param name="target">The target to add the VCBuild task to</param>
/// <param name="solutionPath">Path to the solution if any</param>
/// <param name="projectPath">Path to the solution if any</param>
/// <param name="vcbuildTargetName">The VCBuild target name</param>
/// <param name="platformName">The platform parameter to VCBuild</param>
/// <param name="fullConfigurationName">Configuration property value</param>
/// <returns></returns>
static internal BuildTask AddVCBuildTaskElement
(
Project msbuildProject,
Target target,
string solutionPath,
string projectPath,
string vcbuildTargetName,
string platformName,
string fullConfigurationName
)
{
// The VCBuild task (which we already shipped) has a bug - it cannot
// find vcbuild.exe when running in MSBuild 64 bit unless it's on the path.
// So, pass it here, unless some explicit path was passed.
// Note, we have to do this even if we're in a 32 bit MSBuild, because we save the .sln.cache
// file, and the next build of the solution could be a 64 bit MSBuild.
if (VCBuildLocationHint != null) // Should only be null if vcbuild truly isn't installed; in that case, let the task log its error
{
BuildTask createProperty = target.AddNewTask("CreateProperty");
createProperty.SetParameterValue("Value", VCBuildLocationHint);
createProperty.Condition = "'$(VCBuildToolPath)' == ''";
createProperty.AddOutputProperty("Value", "VCBuildToolPath");
}
BuildTask newTask = target.AddNewTask("VCBuild");
newTask.SetParameterValue("Projects", projectPath, true /* treat as literal */);
// Add the toolpath so that the user can override if necessary
newTask.SetParameterValue("ToolPath", "$(VCBuildToolPath)");
newTask.SetParameterValue("Configuration", fullConfigurationName);
if (!string.IsNullOrEmpty(platformName))
{
newTask.SetParameterValue("Platform", platformName);
}
newTask.SetParameterValue("SolutionFile", solutionPath);
if (!string.IsNullOrEmpty(vcbuildTargetName))
{
newTask.SetParameterValue(vcbuildTargetName, "true");
}
// Add the override switch so that the user can supply one if necessary
newTask.SetParameterValue("Override", "$(VCBuildOverride)");
// Add any additional lib paths
newTask.SetParameterValue("AdditionalLibPaths", "$(VCBuildAdditionalLibPaths)");
// Only use new properties if we're not emitting a 2.0 project
if (!String.Equals(msbuildProject.ToolsVersion, "2.0", StringComparison.OrdinalIgnoreCase))
{
// Add any additional link library paths
newTask.SetParameterValue("AdditionalLinkLibraryPaths", "$(VCBuildAdditionalLinkLibraryPaths)");
// Add the useenv switch so that the user can supply one if necessary
// Note: "VCBuildUserEnvironment" is included for backwards-compatibility; the correct
// property name is "VCBuildUseEnvironment" to match the task parameter. When the old name is
// used the task will emit a warning.
newTask.SetParameterValue("UseEnvironment", "$(VCBuildUseEnvironment)");
}
newTask.SetParameterValue("UserEnvironment", "$(VCBuildUserEnvironment)");
// Add the additional options switches
newTask.SetParameterValue("AdditionalOptions", "$(VCBuildAdditionalOptions)");
return newTask;
}
/// <summary>
/// This method generates an XmlDocument representing an MSBuild project wrapper for a VC project
/// </summary>
/// <owner>LukaszG</owner>
static internal XmlDocument GenerateVCWrapperProject(Engine parentEngine, string vcProjectFilename, string toolsVersion)
{
string projectPath = Path.GetFullPath(vcProjectFilename);
Project msbuildProject;
try
{
msbuildProject = new Project(parentEngine, toolsVersion);
}
catch (InvalidOperationException)
{
BuildEventFileInfo fileInfo = new BuildEventFileInfo(projectPath);
string errorCode;
string helpKeyword;
string message = ResourceUtilities.FormatResourceString(out errorCode, out helpKeyword, "UnrecognizedToolsVersion", toolsVersion);
throw new InvalidProjectFileException(projectPath, fileInfo.Line, fileInfo.Column, fileInfo.EndLine, fileInfo.EndColumn, message, null, errorCode, helpKeyword);
}
msbuildProject.IsLoadedByHost = false;
msbuildProject.DefaultTargets = "Build";
string wrapperProjectToolsVersion = SolutionWrapperProject.DetermineWrapperProjectToolsVersion(toolsVersion);
msbuildProject.DefaultToolsVersion = wrapperProjectToolsVersion;
BuildPropertyGroup propertyGroup = msbuildProject.AddNewPropertyGroup(true /* insertAtEndOfProject = true */);
propertyGroup.Condition = " ('$(Configuration)' != '') and ('$(Platform)' == '') ";
propertyGroup.AddNewProperty("ConfigurationName", "$(Configuration)");
propertyGroup = msbuildProject.AddNewPropertyGroup(true /* insertAtEndOfProject = true */);
propertyGroup.Condition = " ('$(Configuration)' != '') and ('$(Platform)' != '') ";
propertyGroup.AddNewProperty("ConfigurationName", "$(Configuration)|$(Platform)");
// only use PlatformName if we only have the platform part
propertyGroup = msbuildProject.AddNewPropertyGroup(true /* insertAtEndOfProject = true */);
propertyGroup.Condition = " ('$(Configuration)' == '') and ('$(Platform)' != '') ";
propertyGroup.AddNewProperty("PlatformName", "$(Platform)");
AddVCBuildTarget(msbuildProject, projectPath, "Build", null);
AddVCBuildTarget(msbuildProject, projectPath, "Clean", "Clean");
AddVCBuildTarget(msbuildProject, projectPath, "Rebuild", "Rebuild");
AddVCBuildTarget(msbuildProject, projectPath, "Publish", "Publish");
// Special environment variable to allow people to see the in-memory MSBuild project generated
// to represent the VC project.
if (Environment.GetEnvironmentVariable("MSBuildEmitSolution") != null)
{
msbuildProject.Save(vcProjectFilename + ".proj");
}
return msbuildProject.XmlDocument;
}
/// <summary>
/// Hint to give the VCBuild task to help it find vcbuild.exe.
/// </summary>
static private string path;
/// <summary>
/// Hint to give the VCBuild task to help it find vcbuild.exe.
/// Directory in which vcbuild.exe is found.
/// </summary>
static internal string VCBuildLocationHint
{
get
{
if (path == null)
{
path = GenerateFullPathToTool(RegistryView.Default);
if (path == null && Environment.Is64BitProcess)
{
path = GenerateFullPathToTool(RegistryView.Registry32);
}
if (path != null)
{
path = Path.GetDirectoryName(path);
}
}
return path;
}
}
// The code below is mostly copied from the VCBuild task that we shipped in 3.5.
// It is the logic it uses to find vcbuild.exe. That logic had a flaw -
// in 64 bit MSBuild, in a vanilla command window (like in Team Build) it would not
// find vcbuild.exe. We use the logic below to predict whether VCBuild will find it,
// and if it won't, we will pass the "hint" to use the 64 bit program files location.
/// <summary>
/// constants for VS9 Pro and above SKUs
/// </summary>
// root registry key for VS9
private const string vs9RegKey = @"SOFTWARE\Microsoft\VisualStudio\9.0";
// the name of the value containing disk install directory for the IDE components
// ("...\common7\ide" for layouts)
private const string vs9InstallDirValueName = "InstallDir";
// relative path from the above directory to vcbuild.exe on layouts
private const string vs9RelativePathToVCBuildLayouts = @"..\..\vc\vcpackages\vcbuild.exe";
// relative path from the above directory to vcbuild.exe on batch
private const string vs9RelativePathToVCBuildBatch = @"vcbuild.exe";
/// <summary>
/// constants for the VC9 Express SKU
/// </summary>
// root registry key for VC9
private const string vc9RegKey = @"SOFTWARE\Microsoft\VCExpress\9.0";
// the name of the value containing disk install directory for the IDE components
// ("...\common7\ide" for layouts)
private const string vc9InstallDirValueName = "InstallDir";
// relative path from the above directory to vcbuild.exe on layouts
private const string vc9RelativePathToVCBuildLayouts = @"..\..\vc\vcpackages\vcbuild.exe";
// relative path from the above directory to vcbuild.exe on batch
private const string vc9RelativePathToVCBuildBatch = @"vcbuild.exe";
// name of the tool
private const string vcbuildName = "vcbuild.exe";
/// <summary>
/// Determing the path to vcbuild.exe
/// </summary>
/// <returns>path to vcbuild.exe, or null if it's not found</returns>
private static string GenerateFullPathToTool(RegistryView registryView)
{
using (RegistryKey baseKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, registryView))
{
// try VS9 professional and above SKUs first
string location = TryLocationFromRegistry(baseKey, vs9RegKey, vs9InstallDirValueName,
vs9RelativePathToVCBuildLayouts, vs9RelativePathToVCBuildBatch);
if (location != null)
{
return location;
}
// fall back to the VC Express SKU
location = TryLocationFromRegistry(baseKey, vc9RegKey, vc9InstallDirValueName,
vc9RelativePathToVCBuildLayouts, vc9RelativePathToVCBuildBatch);
if (location != null)
{
return location;
}
// finally, try looking in the system path
if (Microsoft.Build.BuildEngine.Shared.NativeMethods.FindOnPath(vcbuildName) == null)
{
// If SearchPath didn't find the file, it's not on the system path and we have no chance of finding it.
return null;
}
return null;
}
}
/// <summary>
/// Looks up a path from the registry if present, and checks whether VCBuild.exe is there.
/// </summary>
/// <param name="subKey">Registry key to open</param>
/// <param name="valueName">Value under that key to read</param>
/// <param name="messageToLogIfNotFound">Low-pri message to log if registry key isn't found</param>
/// <param name="relativePathFromValueOnLayout">Relative path from the key value to vcbuild.exe for layout installs</param>
/// <param name="relativePathFromValueOnBatch">Relative path from the key value to vcbuild.exe for batch installs</param>
/// <returns>Path to vcbuild.exe, or null if it's not found</returns>
/// <owner>danmose</owner>
private static string TryLocationFromRegistry(RegistryKey root, string subKeyName, string valueName,
string relativePathFromValueOnLayout, string relativePathFromValueOnBatch)
{
using (RegistryKey subKey = root.OpenSubKey(subKeyName))
{
if (subKey == null)
{
// We couldn't find an installation of the product we were looking for.
return null;
}
else
{
string rootDir = (string)subKey.GetValue(valueName);
if (rootDir != null)
{
// first try the location for layouts
string vcBuildPath = Path.Combine(rootDir, relativePathFromValueOnLayout);
if (File.Exists(vcBuildPath))
{
return vcBuildPath;
}
// if not found in layouts location, try the alternate dir if any,
// which contains vcbuild for batch installs
if (relativePathFromValueOnBatch != null)
{
vcBuildPath = Path.Combine(rootDir, relativePathFromValueOnBatch);
if (File.Exists(vcBuildPath))
{
return vcBuildPath;
}
}
}
}
// Didn't find it
return null;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Reflection.Internal;
using System.Reflection.Metadata;
namespace System.Reflection.PortableExecutable
{
public abstract class PEBuilder
{
public PEHeaderBuilder Header { get; }
public Func<IEnumerable<Blob>, BlobContentId> IdProvider { get; }
public bool IsDeterministic { get; }
private readonly Lazy<ImmutableArray<Section>> _lazySections;
private Blob _lazyChecksum;
protected struct Section
{
public readonly string Name;
public readonly SectionCharacteristics Characteristics;
public Section(string name, SectionCharacteristics characteristics)
{
if (name == null)
{
Throw.ArgumentNull(nameof(name));
}
Name = name;
Characteristics = characteristics;
}
}
private struct SerializedSection
{
public readonly BlobBuilder Builder;
public readonly string Name;
public readonly SectionCharacteristics Characteristics;
public readonly int RelativeVirtualAddress;
public readonly int SizeOfRawData;
public readonly int PointerToRawData;
public SerializedSection(BlobBuilder builder, string name, SectionCharacteristics characteristics, int relativeVirtualAddress, int sizeOfRawData, int pointerToRawData)
{
Name = name;
Characteristics = characteristics;
Builder = builder;
RelativeVirtualAddress = relativeVirtualAddress;
SizeOfRawData = sizeOfRawData;
PointerToRawData = pointerToRawData;
}
public int VirtualSize => Builder.Count;
}
protected PEBuilder(PEHeaderBuilder header, Func<IEnumerable<Blob>, BlobContentId> deterministicIdProvider)
{
if (header == null)
{
Throw.ArgumentNull(nameof(header));
}
IdProvider = deterministicIdProvider ?? BlobContentId.GetTimeBasedProvider();
IsDeterministic = deterministicIdProvider != null;
Header = header;
_lazySections = new Lazy<ImmutableArray<Section>>(CreateSections);
}
protected ImmutableArray<Section> GetSections()
{
var sections = _lazySections.Value;
if (sections.IsDefault)
{
throw new InvalidOperationException(SR.Format(SR.MustNotReturnNull, nameof(CreateSections)));
}
return sections;
}
protected abstract ImmutableArray<Section> CreateSections();
protected abstract BlobBuilder SerializeSection(string name, SectionLocation location);
protected internal abstract PEDirectoriesBuilder GetDirectories();
public BlobContentId Serialize(BlobBuilder builder)
{
// Define and serialize sections in two steps.
// We need to know about all sections before serializing them.
var serializedSections = SerializeSections();
// The positions and sizes of directories are calculated during section serialization.
var directories = GetDirectories();
Blob stampFixup;
WritePESignature(builder);
WriteCoffHeader(builder, serializedSections, out stampFixup);
WritePEHeader(builder, directories, serializedSections);
WriteSectionHeaders(builder, serializedSections);
builder.Align(Header.FileAlignment);
foreach (var section in serializedSections)
{
builder.LinkSuffix(section.Builder);
builder.Align(Header.FileAlignment);
}
var contentId = IdProvider(builder.GetBlobs());
// patch timestamp in COFF header:
var stampWriter = new BlobWriter(stampFixup);
stampWriter.WriteUInt32(contentId.Stamp);
Debug.Assert(stampWriter.RemainingBytes == 0);
return contentId;
}
private ImmutableArray<SerializedSection> SerializeSections()
{
var sections = GetSections();
var result = ImmutableArray.CreateBuilder<SerializedSection>(sections.Length);
int sizeOfPeHeaders = Header.ComputeSizeOfPEHeaders(sections.Length);
var nextRva = BitArithmetic.Align(sizeOfPeHeaders, Header.SectionAlignment);
var nextPointer = BitArithmetic.Align(sizeOfPeHeaders, Header.FileAlignment);
foreach (var section in sections)
{
var builder = SerializeSection(section.Name, new SectionLocation(nextRva, nextPointer));
var serialized = new SerializedSection(
builder,
section.Name,
section.Characteristics,
relativeVirtualAddress: nextRva,
sizeOfRawData: BitArithmetic.Align(builder.Count, Header.FileAlignment),
pointerToRawData: nextPointer);
result.Add(serialized);
nextRva = BitArithmetic.Align(serialized.RelativeVirtualAddress + serialized.VirtualSize, Header.SectionAlignment);
nextPointer = serialized.PointerToRawData + serialized.SizeOfRawData;
}
return result.MoveToImmutable();
}
private void WritePESignature(BlobBuilder builder)
{
// MS-DOS stub (128 bytes)
builder.WriteBytes(s_dosHeader);
// PE Signature "PE\0\0"
builder.WriteUInt32(PEHeaders.PESignature);
}
private static readonly byte[] s_dosHeader = new byte[]
{
0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x80, 0x00, 0x00, 0x00, // NT Header offset (0x80 == s_dosHeader.Length)
0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd,
0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68,
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72,
0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e,
0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20,
0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a,
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
};
internal static int DosHeaderSize = s_dosHeader.Length;
private void WriteCoffHeader(BlobBuilder builder, ImmutableArray<SerializedSection> sections, out Blob stampFixup)
{
// Machine
builder.WriteUInt16((ushort)(Header.Machine == 0 ? Machine.I386 : Header.Machine));
// NumberOfSections
builder.WriteUInt16((ushort)sections.Length);
// TimeDateStamp:
stampFixup = builder.ReserveBytes(sizeof(uint));
// PointerToSymbolTable (TODO: not supported):
// The file pointer to the COFF symbol table, or zero if no COFF symbol table is present.
// This value should be zero for a PE image.
builder.WriteUInt32(0);
// NumberOfSymbols (TODO: not supported):
// The number of entries in the symbol table. This data can be used to locate the string table,
// which immediately follows the symbol table. This value should be zero for a PE image.
builder.WriteUInt32(0);
// SizeOfOptionalHeader:
// The size of the optional header, which is required for executable files but not for object files.
// This value should be zero for an object file (TODO).
builder.WriteUInt16((ushort)PEHeader.Size(Header.Is32Bit));
// Characteristics
builder.WriteUInt16((ushort)Header.ImageCharacteristics);
}
private void WritePEHeader(BlobBuilder builder, PEDirectoriesBuilder directories, ImmutableArray<SerializedSection> sections)
{
builder.WriteUInt16((ushort)(Header.Is32Bit ? PEMagic.PE32 : PEMagic.PE32Plus));
builder.WriteByte(Header.MajorLinkerVersion);
builder.WriteByte(Header.MinorLinkerVersion);
// SizeOfCode:
builder.WriteUInt32((uint)SumRawDataSizes(sections, SectionCharacteristics.ContainsCode));
// SizeOfInitializedData:
builder.WriteUInt32((uint)SumRawDataSizes(sections, SectionCharacteristics.ContainsInitializedData));
// SizeOfUninitializedData:
builder.WriteUInt32((uint)SumRawDataSizes(sections, SectionCharacteristics.ContainsUninitializedData));
// AddressOfEntryPoint:
builder.WriteUInt32((uint)directories.AddressOfEntryPoint);
// BaseOfCode:
int codeSectionIndex = IndexOfSection(sections, SectionCharacteristics.ContainsCode);
builder.WriteUInt32((uint)(codeSectionIndex != -1 ? sections[codeSectionIndex].RelativeVirtualAddress : 0));
if (Header.Is32Bit)
{
// BaseOfData:
int dataSectionIndex = IndexOfSection(sections, SectionCharacteristics.ContainsInitializedData);
builder.WriteUInt32((uint)(dataSectionIndex != -1 ? sections[dataSectionIndex].RelativeVirtualAddress : 0));
builder.WriteUInt32((uint)Header.ImageBase);
}
else
{
builder.WriteUInt64(Header.ImageBase);
}
// NT additional fields:
builder.WriteUInt32((uint)Header.SectionAlignment);
builder.WriteUInt32((uint)Header.FileAlignment);
builder.WriteUInt16(Header.MajorOperatingSystemVersion);
builder.WriteUInt16(Header.MinorOperatingSystemVersion);
builder.WriteUInt16(Header.MajorImageVersion);
builder.WriteUInt16(Header.MinorImageVersion);
builder.WriteUInt16(Header.MajorSubsystemVersion);
builder.WriteUInt16(Header.MinorSubsystemVersion);
// Win32VersionValue (reserved, should be 0)
builder.WriteUInt32(0);
// SizeOfImage:
var lastSection = sections[sections.Length - 1];
builder.WriteUInt32((uint)BitArithmetic.Align(lastSection.RelativeVirtualAddress + lastSection.VirtualSize, Header.SectionAlignment));
// SizeOfHeaders:
builder.WriteUInt32((uint)BitArithmetic.Align(Header.ComputeSizeOfPEHeaders(sections.Length), Header.FileAlignment));
// Checksum:
// Shall be zero for strong name signing.
_lazyChecksum = builder.ReserveBytes(sizeof(uint));
new BlobWriter(_lazyChecksum).WriteUInt32(0);
builder.WriteUInt16((ushort)Header.Subsystem);
builder.WriteUInt16((ushort)Header.DllCharacteristics);
if (Header.Is32Bit)
{
builder.WriteUInt32((uint)Header.SizeOfStackReserve);
builder.WriteUInt32((uint)Header.SizeOfStackCommit);
builder.WriteUInt32((uint)Header.SizeOfHeapReserve);
builder.WriteUInt32((uint)Header.SizeOfHeapCommit);
}
else
{
builder.WriteUInt64(Header.SizeOfStackReserve);
builder.WriteUInt64(Header.SizeOfStackCommit);
builder.WriteUInt64(Header.SizeOfHeapReserve);
builder.WriteUInt64(Header.SizeOfHeapCommit);
}
// LoaderFlags
builder.WriteUInt32(0);
// The number of data-directory entries in the remainder of the header.
builder.WriteUInt32(16);
// directory entries:
builder.WriteUInt32((uint)directories.ExportTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ExportTable.Size);
builder.WriteUInt32((uint)directories.ImportTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ImportTable.Size);
builder.WriteUInt32((uint)directories.ResourceTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ResourceTable.Size);
builder.WriteUInt32((uint)directories.ExceptionTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ExceptionTable.Size);
// Authenticode CertificateTable directory. Shall be zero before the PE is signed.
builder.WriteUInt32(0);
builder.WriteUInt32(0);
builder.WriteUInt32((uint)directories.BaseRelocationTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.BaseRelocationTable.Size);
builder.WriteUInt32((uint)directories.DebugTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.DebugTable.Size);
builder.WriteUInt32((uint)directories.CopyrightTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.CopyrightTable.Size);
builder.WriteUInt32((uint)directories.GlobalPointerTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.GlobalPointerTable.Size);
builder.WriteUInt32((uint)directories.ThreadLocalStorageTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ThreadLocalStorageTable.Size);
builder.WriteUInt32((uint)directories.LoadConfigTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.LoadConfigTable.Size);
builder.WriteUInt32((uint)directories.BoundImportTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.BoundImportTable.Size);
builder.WriteUInt32((uint)directories.ImportAddressTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.ImportAddressTable.Size);
builder.WriteUInt32((uint)directories.DelayImportTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.DelayImportTable.Size);
builder.WriteUInt32((uint)directories.CorHeaderTable.RelativeVirtualAddress);
builder.WriteUInt32((uint)directories.CorHeaderTable.Size);
// Reserved, should be 0
builder.WriteUInt64(0);
}
private void WriteSectionHeaders(BlobBuilder builder, ImmutableArray<SerializedSection> serializedSections)
{
foreach (var serializedSection in serializedSections)
{
WriteSectionHeader(builder, serializedSection);
}
}
private static void WriteSectionHeader(BlobBuilder builder, SerializedSection serializedSection)
{
if (serializedSection.VirtualSize == 0)
{
return;
}
for (int j = 0, m = serializedSection.Name.Length; j < 8; j++)
{
if (j < m)
{
builder.WriteByte((byte)serializedSection.Name[j]);
}
else
{
builder.WriteByte(0);
}
}
builder.WriteUInt32((uint)serializedSection.VirtualSize);
builder.WriteUInt32((uint)serializedSection.RelativeVirtualAddress);
builder.WriteUInt32((uint)serializedSection.SizeOfRawData);
builder.WriteUInt32((uint)serializedSection.PointerToRawData);
// PointerToRelocations (TODO: not supported):
builder.WriteUInt32(0);
// PointerToLinenumbers (TODO: not supported):
builder.WriteUInt32(0);
// NumberOfRelocations (TODO: not supported):
builder.WriteUInt16(0);
// NumberOfLinenumbers (TODO: not supported):
builder.WriteUInt16(0);
builder.WriteUInt32((uint)serializedSection.Characteristics);
}
private static int IndexOfSection(ImmutableArray<SerializedSection> sections, SectionCharacteristics characteristics)
{
for (int i = 0; i < sections.Length; i++)
{
if ((sections[i].Characteristics & characteristics) == characteristics)
{
return i;
}
}
return -1;
}
private static int SumRawDataSizes(ImmutableArray<SerializedSection> sections,SectionCharacteristics characteristics)
{
int result = 0;
for (int i = 0; i < sections.Length; i++)
{
if ((sections[i].Characteristics & characteristics) == characteristics)
{
result += sections[i].SizeOfRawData;
}
}
return result;
}
// internal for testing
internal static IEnumerable<Blob> GetContentToSign(BlobBuilder peImage, int peHeadersSize, int peHeaderAlignment, Blob strongNameSignatureFixup)
{
// Signed content includes
// - PE header without its alignment padding
// - all sections including their alignment padding and excluding strong name signature blob
// PE specification:
// To calculate the PE image hash, Authenticode orders the sections that are specified in the section table
// by address range, then hashes the resulting sequence of bytes, passing over the exclusion ranges.
//
// Note that sections are by construction ordered by their address, so there is no need to reorder.
int remainingHeaderToSign = peHeadersSize;
int remainingHeader = BitArithmetic.Align(peHeadersSize, peHeaderAlignment);
foreach (var blob in peImage.GetBlobs())
{
int blobStart = blob.Start;
int blobLength = blob.Length;
while (blobLength > 0)
{
if (remainingHeader > 0)
{
int length;
if (remainingHeaderToSign > 0)
{
length = Math.Min(remainingHeaderToSign, blobLength);
yield return new Blob(blob.Buffer, blobStart, length);
remainingHeaderToSign -= length;
}
else
{
length = Math.Min(remainingHeader, blobLength);
}
remainingHeader -= length;
blobStart += length;
blobLength -= length;
}
else if (blob.Buffer == strongNameSignatureFixup.Buffer)
{
yield return GetPrefixBlob(new Blob(blob.Buffer, blobStart, blobLength), strongNameSignatureFixup);
yield return GetSuffixBlob(new Blob(blob.Buffer, blobStart, blobLength), strongNameSignatureFixup);
break;
}
else
{
yield return new Blob(blob.Buffer, blobStart, blobLength);
break;
}
}
}
}
// internal for testing
internal static Blob GetPrefixBlob(Blob container, Blob blob) => new Blob(container.Buffer, container.Start, blob.Start - container.Start);
internal static Blob GetSuffixBlob(Blob container, Blob blob) => new Blob(container.Buffer, blob.Start + blob.Length, container.Start + container.Length - blob.Start - blob.Length);
// internal for testing
internal static IEnumerable<Blob> GetContentToChecksum(BlobBuilder peImage, Blob checksumFixup)
{
foreach (var blob in peImage.GetBlobs())
{
if (blob.Buffer == checksumFixup.Buffer)
{
yield return GetPrefixBlob(blob, checksumFixup);
yield return GetSuffixBlob(blob, checksumFixup);
}
else
{
yield return blob;
}
}
}
internal void Sign(BlobBuilder peImage, Blob strongNameSignatureFixup, Func<IEnumerable<Blob>, byte[]> signatureProvider)
{
Debug.Assert(peImage != null);
Debug.Assert(signatureProvider != null);
int peHeadersSize = Header.ComputeSizeOfPEHeaders(GetSections().Length);
byte[] signature = signatureProvider(GetContentToSign(peImage, peHeadersSize, Header.FileAlignment, strongNameSignatureFixup));
// signature may be shorter (the rest of the reserved space is padding):
if (signature == null || signature.Length > strongNameSignatureFixup.Length)
{
throw new InvalidOperationException(SR.SignatureProviderReturnedInvalidSignature);
}
uint checksum = CalculateChecksum(peImage, _lazyChecksum);
new BlobWriter(_lazyChecksum).WriteUInt32(checksum);
var writer = new BlobWriter(strongNameSignatureFixup);
writer.WriteBytes(signature);
}
// internal for testing
internal static uint CalculateChecksum(BlobBuilder peImage, Blob checksumFixup)
{
return CalculateChecksum(GetContentToChecksum(peImage, checksumFixup)) + (uint)peImage.Count;
}
private static unsafe uint CalculateChecksum(IEnumerable<Blob> blobs)
{
uint checksum = 0;
int pendingByte = -1;
foreach (var blob in blobs)
{
var segment = blob.GetBytes();
fixed (byte* arrayPtr = segment.Array)
{
Debug.Assert(segment.Count > 0);
byte* ptr = arrayPtr + segment.Offset;
byte* end = ptr + segment.Count;
if (pendingByte >= 0)
{
// little-endian encoding:
checksum = AggregateChecksum(checksum, (ushort)(*ptr << 8 | pendingByte));
ptr++;
}
if ((end - ptr) % 2 != 0)
{
end--;
pendingByte = *end;
}
else
{
pendingByte = -1;
}
while (ptr < end)
{
checksum = AggregateChecksum(checksum, *(ushort*)ptr);
ptr += sizeof(ushort);
}
}
}
if (pendingByte >= 0)
{
checksum = AggregateChecksum(checksum, (ushort)pendingByte);
}
return checksum;
}
private static uint AggregateChecksum(uint checksum, ushort value)
{
uint sum = checksum + value;
return (sum >> 16) + unchecked((ushort)sum);
}
}
}
| |
namespace KabMan.Forms
{
partial class DasdCuTypeManagerForm
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule1 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule();
DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule2 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule();
DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule3 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule();
this.CManager = new KabMan.Controls.C_ControlManagerForm();
this.layoutControl1 = new DevExpress.XtraLayout.LayoutControl();
this.CStartNo = new DevExpress.XtraEditors.SpinEdit();
this.CPortCount = new DevExpress.XtraEditors.SpinEdit();
this.CName = new DevExpress.XtraEditors.TextEdit();
this.layoutControlGroup1 = new DevExpress.XtraLayout.LayoutControlGroup();
this.layoutControlItem1 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem2 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem3 = new DevExpress.XtraLayout.LayoutControlItem();
this.CManagerValidator = new DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider(this.components);
this.CManager.LayoutPanel.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).BeginInit();
this.layoutControl1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.CStartNo.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.CPortCount.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.CName.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.CManagerValidator)).BeginInit();
this.SuspendLayout();
//
// CManager
//
this.CManager.DeleteProcedure = null;
this.CManager.Dock = System.Windows.Forms.DockStyle.Fill;
this.CManager.InsertProcedure = null;
this.CManager.IsCancel = true;
this.CManager.IsEdit = false;
this.CManager.IsNew = true;
//
// CManager.layoutControlPanel
//
this.CManager.LayoutPanel.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.CManager.LayoutPanel.Controls.Add(this.layoutControl1);
this.CManager.LayoutPanel.Location = new System.Drawing.Point(0, 0);
this.CManager.LayoutPanel.MinimumSize = new System.Drawing.Size(400, 20);
this.CManager.LayoutPanel.Name = "layoutControlPanel";
this.CManager.LayoutPanel.Size = new System.Drawing.Size(404, 97);
this.CManager.LayoutPanel.TabIndex = 6;
this.CManager.Location = new System.Drawing.Point(0, 0);
this.CManager.MinimumSize = new System.Drawing.Size(400, 460);
this.CManager.Name = "CManager";
this.CManager.SelectParameters = null;
this.CManager.Size = new System.Drawing.Size(404, 462);
this.CManager.TabIndex = 0;
this.CManager.UpdateProcedure = null;
//
// layoutControl1
//
this.layoutControl1.Appearance.DisabledLayoutGroupCaption.ForeColor = System.Drawing.SystemColors.GrayText;
this.layoutControl1.Appearance.DisabledLayoutGroupCaption.Options.UseForeColor = true;
this.layoutControl1.Appearance.DisabledLayoutItem.ForeColor = System.Drawing.SystemColors.GrayText;
this.layoutControl1.Appearance.DisabledLayoutItem.Options.UseForeColor = true;
this.layoutControl1.Controls.Add(this.CStartNo);
this.layoutControl1.Controls.Add(this.CPortCount);
this.layoutControl1.Controls.Add(this.CName);
this.layoutControl1.Dock = System.Windows.Forms.DockStyle.Fill;
this.layoutControl1.Location = new System.Drawing.Point(0, 0);
this.layoutControl1.Name = "layoutControl1";
this.layoutControl1.Root = this.layoutControlGroup1;
this.layoutControl1.Size = new System.Drawing.Size(404, 97);
this.layoutControl1.TabIndex = 0;
this.layoutControl1.Text = "layoutControl1";
//
// CStartNo
//
this.CStartNo.EditValue = new decimal(new int[] {
1,
0,
0,
0});
this.CStartNo.Location = new System.Drawing.Point(64, 69);
this.CStartNo.Name = "CStartNo";
this.CStartNo.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton()});
this.CStartNo.Properties.MaxValue = new decimal(new int[] {
99999999,
0,
0,
0});
this.CStartNo.Properties.MinValue = new decimal(new int[] {
1,
0,
0,
0});
this.CStartNo.Size = new System.Drawing.Size(334, 20);
this.CStartNo.StyleController = this.layoutControl1;
this.CStartNo.TabIndex = 6;
conditionValidationRule1.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.NotEquals;
conditionValidationRule1.ErrorText = "This value is not valid";
conditionValidationRule1.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning;
conditionValidationRule1.Value1 = "0";
this.CManagerValidator.SetValidationRule(this.CStartNo, conditionValidationRule1);
//
// CPortCount
//
this.CPortCount.EditValue = new decimal(new int[] {
1,
0,
0,
0});
this.CPortCount.Location = new System.Drawing.Point(64, 38);
this.CPortCount.Name = "CPortCount";
this.CPortCount.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton()});
this.CPortCount.Properties.MaxValue = new decimal(new int[] {
99999999,
0,
0,
0});
this.CPortCount.Properties.MinValue = new decimal(new int[] {
1,
0,
0,
0});
this.CPortCount.Size = new System.Drawing.Size(334, 20);
this.CPortCount.StyleController = this.layoutControl1;
this.CPortCount.TabIndex = 5;
conditionValidationRule2.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.NotEquals;
conditionValidationRule2.ErrorText = "This value is not valid";
conditionValidationRule2.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning;
conditionValidationRule2.Value1 = "0";
this.CManagerValidator.SetValidationRule(this.CPortCount, conditionValidationRule2);
//
// CName
//
this.CName.Location = new System.Drawing.Point(64, 7);
this.CName.Name = "CName";
this.CName.Size = new System.Drawing.Size(334, 20);
this.CName.StyleController = this.layoutControl1;
this.CName.TabIndex = 4;
conditionValidationRule3.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank;
conditionValidationRule3.ErrorText = "This value is not valid";
conditionValidationRule3.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning;
this.CManagerValidator.SetValidationRule(this.CName, conditionValidationRule3);
//
// layoutControlGroup1
//
this.layoutControlGroup1.CustomizationFormText = "layoutControlGroup1";
this.layoutControlGroup1.Items.AddRange(new DevExpress.XtraLayout.BaseLayoutItem[] {
this.layoutControlItem1,
this.layoutControlItem2,
this.layoutControlItem3});
this.layoutControlGroup1.Location = new System.Drawing.Point(0, 0);
this.layoutControlGroup1.Name = "layoutControlGroup1";
this.layoutControlGroup1.Size = new System.Drawing.Size(404, 97);
this.layoutControlGroup1.Spacing = new DevExpress.XtraLayout.Utils.Padding(0, 0, 0, 0);
this.layoutControlGroup1.Text = "layoutControlGroup1";
this.layoutControlGroup1.TextVisible = false;
//
// layoutControlItem1
//
this.layoutControlItem1.Control = this.CName;
this.layoutControlItem1.CustomizationFormText = "Name";
this.layoutControlItem1.Location = new System.Drawing.Point(0, 0);
this.layoutControlItem1.Name = "layoutControlItem1";
this.layoutControlItem1.Size = new System.Drawing.Size(402, 31);
this.layoutControlItem1.Text = "Name";
this.layoutControlItem1.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem1.TextSize = new System.Drawing.Size(52, 13);
//
// layoutControlItem2
//
this.layoutControlItem2.Control = this.CPortCount;
this.layoutControlItem2.CustomizationFormText = "Port Count";
this.layoutControlItem2.Location = new System.Drawing.Point(0, 31);
this.layoutControlItem2.Name = "layoutControlItem2";
this.layoutControlItem2.Size = new System.Drawing.Size(402, 31);
this.layoutControlItem2.Text = "Port Count";
this.layoutControlItem2.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem2.TextSize = new System.Drawing.Size(52, 13);
//
// layoutControlItem3
//
this.layoutControlItem3.Control = this.CStartNo;
this.layoutControlItem3.CustomizationFormText = "Start No";
this.layoutControlItem3.Location = new System.Drawing.Point(0, 62);
this.layoutControlItem3.Name = "layoutControlItem3";
this.layoutControlItem3.Size = new System.Drawing.Size(402, 33);
this.layoutControlItem3.Text = "Start No";
this.layoutControlItem3.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem3.TextSize = new System.Drawing.Size(52, 13);
//
// DasdCuTypeManagerForm
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(404, 462);
this.Controls.Add(this.CManager);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.MinimumSize = new System.Drawing.Size(420, 500);
this.Name = "DasdCuTypeManagerForm";
this.ShowInTaskbar = false;
this.Text = "Dasd CuType Manager";
this.CManager.LayoutPanel.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).EndInit();
this.layoutControl1.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.CStartNo.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.CPortCount.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.CName.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.CManagerValidator)).EndInit();
this.ResumeLayout(false);
}
#endregion
private KabMan.Controls.C_ControlManagerForm CManager;
private DevExpress.XtraLayout.LayoutControl layoutControl1;
private DevExpress.XtraEditors.SpinEdit CStartNo;
private DevExpress.XtraEditors.SpinEdit CPortCount;
private DevExpress.XtraEditors.TextEdit CName;
private DevExpress.XtraLayout.LayoutControlGroup layoutControlGroup1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem2;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem3;
private DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider CManagerValidator;
}
}
| |
using System;
using System.Runtime.InteropServices;
using System.Text;
using System.Collections.Generic;
using OpenHome.Net.Core;
namespace OpenHome.Net.Device.Providers
{
public interface IDvProviderUpnpOrgSwitchPower1 : IDisposable
{
/// <summary>
/// Set the value of the Status property
/// </summary>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
bool SetPropertyStatus(bool aValue);
/// <summary>
/// Get a copy of the value of the Status property
/// </summary>
/// <returns>Value of the Status property.</param>
bool PropertyStatus();
}
/// <summary>
/// Provider for the upnp.org:SwitchPower:1 UPnP service
/// </summary>
public class DvProviderUpnpOrgSwitchPower1 : DvProvider, IDisposable, IDvProviderUpnpOrgSwitchPower1
{
private GCHandle iGch;
private ActionDelegate iDelegateSetTarget;
private ActionDelegate iDelegateGetTarget;
private ActionDelegate iDelegateGetStatus;
private PropertyBool iPropertyStatus;
/// <summary>
/// Constructor
/// </summary>
/// <param name="aDevice">Device which owns this provider</param>
protected DvProviderUpnpOrgSwitchPower1(DvDevice aDevice)
: base(aDevice, "upnp.org", "SwitchPower", 1)
{
iGch = GCHandle.Alloc(this);
}
/// <summary>
/// Enable the Status property.
/// </summary>
public void EnablePropertyStatus()
{
iPropertyStatus = new PropertyBool(new ParameterBool("Status"));
AddProperty(iPropertyStatus);
}
/// <summary>
/// Set the value of the Status property
/// </summary>
/// <remarks>Can only be called if EnablePropertyStatus has previously been called.</remarks>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
public bool SetPropertyStatus(bool aValue)
{
if (iPropertyStatus == null)
throw new PropertyDisabledError();
return SetPropertyBool(iPropertyStatus, aValue);
}
/// <summary>
/// Get a copy of the value of the Status property
/// </summary>
/// <remarks>Can only be called if EnablePropertyStatus has previously been called.</remarks>
/// <returns>Value of the Status property.</returns>
public bool PropertyStatus()
{
if (iPropertyStatus == null)
throw new PropertyDisabledError();
return iPropertyStatus.Value();
}
/// <summary>
/// Signal that the action SetTarget is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// SetTarget must be overridden if this is called.</remarks>
protected void EnableActionSetTarget()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("SetTarget");
action.AddInputParameter(new ParameterBool("newTargetValue"));
iDelegateSetTarget = new ActionDelegate(DoSetTarget);
EnableAction(action, iDelegateSetTarget, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action GetTarget is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// GetTarget must be overridden if this is called.</remarks>
protected void EnableActionGetTarget()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("GetTarget");
action.AddOutputParameter(new ParameterBool("RetTargetValue"));
iDelegateGetTarget = new ActionDelegate(DoGetTarget);
EnableAction(action, iDelegateGetTarget, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action GetStatus is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// GetStatus must be overridden if this is called.</remarks>
protected void EnableActionGetStatus()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("GetStatus");
action.AddOutputParameter(new ParameterRelated("ResultStatus", iPropertyStatus));
iDelegateGetStatus = new ActionDelegate(DoGetStatus);
EnableAction(action, iDelegateGetStatus, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// SetTarget action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// SetTarget action for the owning device.
///
/// Must be implemented iff EnableActionSetTarget was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="anewTargetValue"></param>
protected virtual void SetTarget(IDvInvocation aInvocation, bool anewTargetValue)
{
throw (new ActionDisabledError());
}
/// <summary>
/// GetTarget action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// GetTarget action for the owning device.
///
/// Must be implemented iff EnableActionGetTarget was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aRetTargetValue"></param>
protected virtual void GetTarget(IDvInvocation aInvocation, out bool aRetTargetValue)
{
throw (new ActionDisabledError());
}
/// <summary>
/// GetStatus action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// GetStatus action for the owning device.
///
/// Must be implemented iff EnableActionGetStatus was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aResultStatus"></param>
protected virtual void GetStatus(IDvInvocation aInvocation, out bool aResultStatus)
{
throw (new ActionDisabledError());
}
private static int DoSetTarget(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderUpnpOrgSwitchPower1 self = (DvProviderUpnpOrgSwitchPower1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
bool newTargetValue;
try
{
invocation.ReadStart();
newTargetValue = invocation.ReadBool("newTargetValue");
invocation.ReadEnd();
self.SetTarget(invocation, newTargetValue);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "SetTarget");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", "SetTarget"));
return -1;
}
catch (Exception e)
{
Console.WriteLine("WARNING: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "SetTarget", e.TargetSite.Name);
Console.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
Console.WriteLine("ERROR: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "SetTarget", e.TargetSite.Name);
Console.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoGetTarget(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderUpnpOrgSwitchPower1 self = (DvProviderUpnpOrgSwitchPower1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
bool retTargetValue;
try
{
invocation.ReadStart();
invocation.ReadEnd();
self.GetTarget(invocation, out retTargetValue);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "GetTarget");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", "GetTarget"));
return -1;
}
catch (Exception e)
{
Console.WriteLine("WARNING: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "GetTarget", e.TargetSite.Name);
Console.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteBool("RetTargetValue", retTargetValue);
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
Console.WriteLine("ERROR: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "GetTarget", e.TargetSite.Name);
Console.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoGetStatus(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderUpnpOrgSwitchPower1 self = (DvProviderUpnpOrgSwitchPower1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
bool resultStatus;
try
{
invocation.ReadStart();
invocation.ReadEnd();
self.GetStatus(invocation, out resultStatus);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "GetStatus");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", "GetStatus"));
return -1;
}
catch (Exception e)
{
Console.WriteLine("WARNING: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "GetStatus", e.TargetSite.Name);
Console.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteBool("ResultStatus", resultStatus);
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
Console.WriteLine("ERROR: unexpected exception {0}(\"{1}\") thrown by {2} in {3}", e.GetType(), e.Message, "GetStatus", e.TargetSite.Name);
Console.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
/// <summary>
/// Must be called for each class instance. Must be called before Core.Library.Close().
/// </summary>
public virtual void Dispose()
{
if (DisposeProvider())
iGch.Free();
}
}
}
| |
using System;
using UnityEngine;
using UnityStandardAssets.CrossPlatformInput;
using Random = UnityEngine.Random;
namespace UnityStandardAssets.Characters.FirstPerson
{
[RequireComponent(typeof (CharacterController))]
[RequireComponent(typeof (AudioSource))]
public class FirstPersonController : MonoBehaviour
{
[SerializeField] private bool m_IsWalking;
[SerializeField] private float m_WalkSpeed;
[SerializeField] private float m_RunSpeed;
[SerializeField] [Range(0f, 1f)] private float m_RunstepLenghten;
[SerializeField] private float m_JumpSpeed;
[SerializeField] private float m_StickToGroundForce;
[SerializeField] private float m_GravityMultiplier;
[SerializeField] private MouseLook m_MouseLook;
[SerializeField] private float m_StepInterval;
[SerializeField] private AudioClip[] m_FootstepSounds; // an array of footstep sounds that will be randomly selected from.
[SerializeField] private AudioClip m_JumpSound; // the sound played when character leaves the ground.
[SerializeField] private AudioClip m_LandSound; // the sound played when character touches back on ground.
private Camera m_Camera;
private bool m_Jump;
private float m_YRotation;
private Vector2 m_Input;
private Vector3 m_MoveDir = Vector3.zero;
private CharacterController m_CharacterController;
private CollisionFlags m_CollisionFlags;
private bool m_PreviouslyGrounded;
private float m_StepCycle;
private float m_NextStep;
private bool m_Jumping;
private AudioSource m_AudioSource;
// Use this for initialization
private void Start()
{
m_CharacterController = GetComponent<CharacterController>();
m_Camera = Camera.main;
m_StepCycle = 0f;
m_NextStep = m_StepCycle/2f;
m_Jumping = false;
m_AudioSource = GetComponent<AudioSource>();
m_MouseLook.Init(transform , m_Camera.transform);
}
// Update is called once per frame
private void Update()
{
RotateView();
// the jump state needs to read here to make sure it is not missed
if (!m_Jump)
{
m_Jump = CrossPlatformInputManager.GetButtonDown("Jump");
}
if (!m_PreviouslyGrounded && m_CharacterController.isGrounded)
{
//StartCoroutine(m_JumpBob.DoBobCycle());
PlayLandingSound();
m_MoveDir.y = 0f;
m_Jumping = false;
}
if (!m_CharacterController.isGrounded && !m_Jumping && m_PreviouslyGrounded)
{
m_MoveDir.y = 0f;
}
m_PreviouslyGrounded = m_CharacterController.isGrounded;
}
private void PlayLandingSound()
{
m_AudioSource.clip = m_LandSound;
m_AudioSource.Play();
m_NextStep = m_StepCycle + .5f;
}
private void FixedUpdate()
{
float speed;
GetInput(out speed);
// always move along the camera forward as it is the direction that it being aimed at
Vector3 desiredMove = transform.forward*m_Input.y + transform.right*m_Input.x;
// get a normal for the surface that is being touched to move along it
RaycastHit hitInfo;
Physics.SphereCast(transform.position, m_CharacterController.radius, Vector3.down, out hitInfo,
m_CharacterController.height/2f);
desiredMove = Vector3.ProjectOnPlane(desiredMove, hitInfo.normal).normalized;
m_MoveDir.x = desiredMove.x*speed;
m_MoveDir.z = desiredMove.z*speed;
if (m_CharacterController.isGrounded)
{
m_MoveDir.y = -m_StickToGroundForce;
if (m_Jump)
{
m_MoveDir.y = m_JumpSpeed;
PlayJumpSound();
m_Jump = false;
m_Jumping = true;
}
}
else
{
m_MoveDir += Physics.gravity*m_GravityMultiplier*Time.fixedDeltaTime;
}
m_CollisionFlags = m_CharacterController.Move(m_MoveDir*Time.fixedDeltaTime);
ProgressStepCycle(speed);
}
private void PlayJumpSound()
{
m_AudioSource.clip = m_JumpSound;
m_AudioSource.Play();
}
private void ProgressStepCycle(float speed)
{
if (m_CharacterController.velocity.sqrMagnitude > 0 && (m_Input.x != 0 || m_Input.y != 0))
{
m_StepCycle += (m_CharacterController.velocity.magnitude + (speed*(m_IsWalking ? 1f : m_RunstepLenghten)))*
Time.fixedDeltaTime;
}
if (!(m_StepCycle > m_NextStep))
{
return;
}
m_NextStep = m_StepCycle + m_StepInterval;
PlayFootStepAudio();
}
private void PlayFootStepAudio()
{
if (!m_CharacterController.isGrounded)
{
return;
}
// pick & play a random footstep sound from the array,
// excluding sound at index 0
int n = Random.Range(1, m_FootstepSounds.Length);
m_AudioSource.clip = m_FootstepSounds[n];
m_AudioSource.PlayOneShot(m_AudioSource.clip);
// move picked sound to index 0 so it's not picked next time
m_FootstepSounds[n] = m_FootstepSounds[0];
m_FootstepSounds[0] = m_AudioSource.clip;
}
private void GetInput(out float speed)
{
// Read input
float horizontal = CrossPlatformInputManager.GetAxis("Horizontal");
float vertical = CrossPlatformInputManager.GetAxis("Vertical");
#if !MOBILE_INPUT
// On standalone builds, walk/run speed is modified by a key press.
// keep track of whether or not the character is walking or running
m_IsWalking = !Input.GetKey(KeyCode.LeftShift);
#endif
// set the desired speed to be walking or running
speed = m_IsWalking ? m_WalkSpeed : m_RunSpeed;
m_Input = new Vector2(horizontal, vertical);
// normalize input if it exceeds 1 in combined length:
if (m_Input.sqrMagnitude > 1)
{
m_Input.Normalize();
}
}
private void RotateView()
{
m_MouseLook.LookRotation (transform, m_Camera.transform);
}
private void OnControllerColliderHit(ControllerColliderHit hit)
{
Rigidbody body = hit.collider.attachedRigidbody;
//dont move the rigidbody if the character is on top of it
if (m_CollisionFlags == CollisionFlags.Below)
{
return;
}
if (body == null || body.isKinematic)
{
return;
}
body.AddForceAtPosition(m_CharacterController.velocity*0.1f, hit.point, ForceMode.Impulse);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using Microsoft.Win32.SafeHandles;
namespace Internal.Cryptography.Pal
{
internal static class PkcsFormatReader
{
internal static bool IsPkcs7(byte[] rawData)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.DecodePkcs7(rawData, rawData.Length))
{
if (!pkcs7.IsInvalid)
{
return true;
}
}
using (SafeBioHandle bio = Interop.Crypto.CreateMemoryBio())
{
Interop.Crypto.CheckValidOpenSslHandle(bio);
Interop.Crypto.BioWrite(bio, rawData, rawData.Length);
using (SafePkcs7Handle pkcs7 = Interop.Crypto.PemReadBioPkcs7(bio))
{
return !pkcs7.IsInvalid;
}
}
}
internal static bool IsPkcs7Der(SafeBioHandle fileBio)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.D2IPkcs7Bio(fileBio))
{
return !pkcs7.IsInvalid;
}
}
internal static bool IsPkcs7Pem(SafeBioHandle fileBio)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.PemReadBioPkcs7(fileBio))
{
return !pkcs7.IsInvalid;
}
}
internal static bool TryReadPkcs7Der(byte[] rawData, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs7Der(rawData, true, out certPal, out ignored);
}
internal static bool TryReadPkcs7Der(SafeBioHandle bio, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs7Der(bio, true, out certPal, out ignored);
}
internal static bool TryReadPkcs7Der(byte[] rawData, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs7Der(rawData, false, out ignored, out certPals);
}
internal static bool TryReadPkcs7Der(SafeBioHandle bio, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs7Der(bio, false, out ignored, out certPals);
}
private static bool TryReadPkcs7Der(
byte[] rawData,
bool single,
out ICertificatePal certPal,
out List<ICertificatePal> certPals)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.DecodePkcs7(rawData, rawData.Length))
{
if (pkcs7.IsInvalid)
{
certPal = null;
certPals = null;
return false;
}
return TryReadPkcs7(pkcs7, single, out certPal, out certPals);
}
}
private static bool TryReadPkcs7Der(
SafeBioHandle bio,
bool single,
out ICertificatePal certPal,
out List<ICertificatePal> certPals)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.D2IPkcs7Bio(bio))
{
if (pkcs7.IsInvalid)
{
certPal = null;
certPals = null;
return false;
}
return TryReadPkcs7(pkcs7, single, out certPal, out certPals);
}
}
internal static bool TryReadPkcs7Pem(byte[] rawData, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs7Pem(rawData, true, out certPal, out ignored);
}
internal static bool TryReadPkcs7Pem(SafeBioHandle bio, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs7Pem(bio, true, out certPal, out ignored);
}
internal static bool TryReadPkcs7Pem(byte[] rawData, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs7Pem(rawData, false, out ignored, out certPals);
}
internal static bool TryReadPkcs7Pem(SafeBioHandle bio, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs7Pem(bio, false, out ignored, out certPals);
}
private static bool TryReadPkcs7Pem(
byte[] rawData,
bool single,
out ICertificatePal certPal,
out List<ICertificatePal> certPals)
{
using (SafeBioHandle bio = Interop.Crypto.CreateMemoryBio())
{
Interop.Crypto.CheckValidOpenSslHandle(bio);
Interop.Crypto.BioWrite(bio, rawData, rawData.Length);
return TryReadPkcs7Pem(bio, single, out certPal, out certPals);
}
}
private static bool TryReadPkcs7Pem(
SafeBioHandle bio,
bool single,
out ICertificatePal certPal,
out List<ICertificatePal> certPals)
{
using (SafePkcs7Handle pkcs7 = Interop.Crypto.PemReadBioPkcs7(bio))
{
if (pkcs7.IsInvalid)
{
certPal = null;
certPals = null;
return false;
}
return TryReadPkcs7(pkcs7, single, out certPal, out certPals);
}
}
private static bool TryReadPkcs7(
SafePkcs7Handle pkcs7,
bool single,
out ICertificatePal certPal,
out List<ICertificatePal> certPals)
{
List<ICertificatePal> readPals = single ? null : new List<ICertificatePal>();
using (SafeSharedX509StackHandle certs = Interop.Crypto.GetPkcs7Certificates(pkcs7))
{
int count = Interop.Crypto.GetX509StackFieldCount(certs);
if (single)
{
// In single mode for a PKCS#7 signed or signed-and-enveloped file we're supposed to return
// the certificate which signed the PKCS#7 file.
//
// X509Certificate2Collection::Export(X509ContentType.Pkcs7) claims to be a signed PKCS#7,
// but doesn't emit a signature block. So this is hard to test.
//
// TODO(2910): Figure out how to extract the signing certificate, when it's present.
throw new CryptographicException(SR.Cryptography_X509_PKCS7_NoSigner);
}
for (int i = 0; i < count; i++)
{
// Use FromHandle to duplicate the handle since it would otherwise be freed when the PKCS7
// is Disposed.
IntPtr certHandle = Interop.Crypto.GetX509StackField(certs, i);
ICertificatePal pal = CertificatePal.FromHandle(certHandle);
readPals.Add(pal);
}
}
certPal = null;
certPals = readPals;
return true;
}
internal static bool TryReadPkcs12(byte[] rawData, string password, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs12(rawData, password, true, out certPal, out ignored);
}
internal static bool TryReadPkcs12(SafeBioHandle bio, string password, out ICertificatePal certPal)
{
List<ICertificatePal> ignored;
return TryReadPkcs12(bio, password, true, out certPal, out ignored);
}
internal static bool TryReadPkcs12(byte[] rawData, string password, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs12(rawData, password, false, out ignored, out certPals);
}
internal static bool TryReadPkcs12(SafeBioHandle bio, string password, out List<ICertificatePal> certPals)
{
ICertificatePal ignored;
return TryReadPkcs12(bio, password, false, out ignored, out certPals);
}
private static bool TryReadPkcs12(
byte[] rawData,
string password,
bool single,
out ICertificatePal readPal,
out List<ICertificatePal> readCerts)
{
// DER-PKCS12
OpenSslPkcs12Reader pfx;
if (!OpenSslPkcs12Reader.TryRead(rawData, out pfx))
{
readPal = null;
readCerts = null;
return false;
}
using (pfx)
{
return TryReadPkcs12(pfx, password, single, out readPal, out readCerts);
}
}
private static bool TryReadPkcs12(
SafeBioHandle bio,
string password,
bool single,
out ICertificatePal readPal,
out List<ICertificatePal> readCerts)
{
// DER-PKCS12
OpenSslPkcs12Reader pfx;
if (!OpenSslPkcs12Reader.TryRead(bio, out pfx))
{
readPal = null;
readCerts = null;
return false;
}
using (pfx)
{
return TryReadPkcs12(pfx, password, single, out readPal, out readCerts);
}
}
private static bool TryReadPkcs12(
OpenSslPkcs12Reader pfx,
string password,
bool single,
out ICertificatePal readPal,
out List<ICertificatePal> readCerts)
{
pfx.Decrypt(password);
ICertificatePal first = null;
List<ICertificatePal> certs = null;
if (!single)
{
certs = new List<ICertificatePal>();
}
foreach (OpenSslX509CertificateReader certPal in pfx.ReadCertificates())
{
if (single)
{
// When requesting an X509Certificate2 from a PFX only the first entry is
// returned. Other entries should be disposed.
if (first == null)
{
first = certPal;
}
else if (certPal.HasPrivateKey && !first.HasPrivateKey)
{
first.Dispose();
first = certPal;
}
else
{
certPal.Dispose();
}
}
else
{
certs.Add(certPal);
}
}
readPal = first;
readCerts = certs;
return true;
}
}
}
| |
using System;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using SteamKit2;
using SteamTrade.Exceptions;
namespace SteamTrade
{
public class TradeManager
{
private const int MaxGapTimeDefault = 15;
private const int MaxTradeTimeDefault = 180;
private const int TradePollingIntervalDefault = 800;
private readonly string ApiKey;
private readonly SteamWeb SteamWeb;
private DateTime tradeStartTime;
private DateTime lastOtherActionTime;
private DateTime lastTimeoutMessage;
private Task<Inventory> myInventoryTask;
private Task<Inventory> otherInventoryTask;
/// <summary>
/// Initializes a new instance of the <see cref="SteamTrade.TradeManager"/> class.
/// </summary>
/// <param name='apiKey'>
/// The Steam Web API key. Cannot be null.
/// </param>
/// <param name="steamWeb">
/// The SteamWeb instances for this bot
/// </param>
public TradeManager (string apiKey, SteamWeb steamWeb)
{
if (apiKey == null)
throw new ArgumentNullException ("apiKey");
if (steamWeb == null)
throw new ArgumentNullException ("steamWeb");
SetTradeTimeLimits (MaxTradeTimeDefault, MaxGapTimeDefault, TradePollingIntervalDefault);
ApiKey = apiKey;
SteamWeb = steamWeb;
}
#region Public Properties
/// <summary>
/// Gets or the maximum trading time the bot will take in seconds.
/// </summary>
/// <value>
/// The maximum trade time.
/// </value>
public int MaxTradeTimeSec
{
get;
private set;
}
/// <summary>
/// Gets or the maxmium amount of time the bot will wait between actions.
/// </summary>
/// <value>
/// The maximum action gap.
/// </value>
public int MaxActionGapSec
{
get;
private set;
}
/// <summary>
/// Gets the Trade polling interval in milliseconds.
/// </summary>
public int TradePollingInterval
{
get;
private set;
}
/// <summary>
/// Gets the inventory of the bot.
/// </summary>
/// <value>
/// The bot's inventory fetched via Steam Web API.
/// </value>
public Inventory MyInventory
{
get
{
if(myInventoryTask == null)
return null;
myInventoryTask.Wait();
return myInventoryTask.Result;
}
}
/// <summary>
/// Gets the inventory of the other trade partner.
/// </summary>
/// <value>
/// The other trade partner's inventory fetched via Steam Web API.
/// </value>
public Inventory OtherInventory
{
get
{
if(otherInventoryTask == null)
return null;
otherInventoryTask.Wait();
return otherInventoryTask.Result;
}
}
/// <summary>
/// Gets or sets a value indicating whether the trade thread running.
/// </summary>
/// <value>
/// <c>true</c> if the trade thread running; otherwise, <c>false</c>.
/// </value>
public bool IsTradeThreadRunning
{
get;
internal set;
}
#endregion Public Properties
#region Public Events
/// <summary>
/// Occurs when the trade times out because either the user didn't complete an
/// action in a set amount of time, or they took too long with the whole trade.
/// </summary>
public EventHandler OnTimeout;
#endregion Public Events
#region Public Methods
/// <summary>
/// Sets the trade time limits.
/// </summary>
/// <param name='maxTradeTime'>
/// Max trade time in seconds.
/// </param>
/// <param name='maxActionGap'>
/// Max gap between user action in seconds.
/// </param>
/// <param name='pollingInterval'>The trade polling interval in milliseconds.</param>
public void SetTradeTimeLimits (int maxTradeTime, int maxActionGap, int pollingInterval)
{
MaxTradeTimeSec = maxTradeTime;
MaxActionGapSec = maxActionGap;
TradePollingInterval = pollingInterval;
}
/// <summary>
/// Creates a trade object and returns it for use.
/// Call <see cref="InitializeTrade"/> before using this method.
/// </summary>
/// <returns>
/// The trade object to use to interact with the Steam trade.
/// </returns>
/// <param name='me'>
/// The <see cref="SteamID"/> of the bot.
/// </param>
/// <param name='other'>
/// The <see cref="SteamID"/> of the other trade partner.
/// </param>
/// <remarks>
/// If the needed inventories are <c>null</c> then they will be fetched.
/// </remarks>
public Trade CreateTrade (SteamID me, SteamID other)
{
if (otherInventoryTask == null || myInventoryTask == null)
InitializeTrade (me, other);
var t = new Trade (me, other, SteamWeb, myInventoryTask, otherInventoryTask);
t.OnClose += delegate
{
IsTradeThreadRunning = false;
};
return t;
}
/// <summary>
/// Stops the trade thread.
/// </summary>
/// <remarks>
/// Also, nulls out the inventory objects so they have to be fetched
/// again if a new trade is started.
/// </remarks>
public void StopTrade ()
{
// TODO: something to check that trade was the Trade returned from CreateTrade
otherInventoryTask = null;
myInventoryTask = null;
IsTradeThreadRunning = false;
}
/// <summary>
/// Fetchs the inventories of both the bot and the other user as well as the TF2 item schema.
/// </summary>
/// <param name='me'>
/// The <see cref="SteamID"/> of the bot.
/// </param>
/// <param name='other'>
/// The <see cref="SteamID"/> of the other trade partner.
/// </param>
/// <remarks>
/// This should be done anytime a new user is traded with or the inventories are out of date. It should
/// be done sometime before calling <see cref="CreateTrade"/>.
/// </remarks>
public void InitializeTrade (SteamID me, SteamID other)
{
// fetch other player's inventory from the Steam API.
otherInventoryTask = Task.Factory.StartNew(() => Inventory.FetchInventory(other.ConvertToUInt64(), ApiKey, SteamWeb));
//if (OtherInventory == null)
//{
// throw new InventoryFetchException (other);
//}
// fetch our inventory from the Steam API.
myInventoryTask = Task.Factory.StartNew(() => Inventory.FetchInventory(me.ConvertToUInt64(), ApiKey, SteamWeb));
// check that the schema was already successfully fetched
if (Trade.CurrentSchema == null)
Trade.CurrentSchema = Schema.FetchSchema (ApiKey);
if (Trade.CurrentSchema == null)
throw new TradeException ("Could not download the latest item schema.");
}
#endregion Public Methods
/// <summary>
/// Starts the actual trade-polling thread.
/// </summary>
public void StartTradeThread (Trade trade)
{
// initialize data to use in thread
tradeStartTime = DateTime.Now;
lastOtherActionTime = DateTime.Now;
lastTimeoutMessage = DateTime.Now.AddSeconds(-1000);
var pollThread = new Thread (() =>
{
IsTradeThreadRunning = true;
DebugPrint ("Trade thread starting.");
// main thread loop for polling
try
{
while(IsTradeThreadRunning)
{
bool action = trade.Poll();
if(action)
lastOtherActionTime = DateTime.Now;
if (trade.HasTradeEnded || CheckTradeTimeout(trade))
{
IsTradeThreadRunning = false;
break;
}
Thread.Sleep(TradePollingInterval);
}
}
catch(Exception ex)
{
// TODO: find a new way to do this w/o the trade events
//if (OnError != null)
// OnError("Error Polling Trade: " + e);
// ok then we should stop polling...
IsTradeThreadRunning = false;
DebugPrint("[TRADEMANAGER] general error caught: " + ex);
trade.FireOnErrorEvent("Unknown error occurred: " + ex.ToString());
}
finally
{
DebugPrint("Trade thread shutting down.");
try
{
try //Yikes, that's a lot of nested 'try's. Is there some way to clean this up?
{
if(trade.HasTradeCompletedOk)
trade.FireOnSuccessEvent();
else if(trade.IsTradeAwaitingEmailConfirmation)
trade.FireOnAwaitingEmailConfirmation();
}
finally
{
//Make sure OnClose is always fired after OnSuccess, even if OnSuccess throws an exception
//(which it NEVER should, but...)
trade.FireOnCloseEvent();
}
}
catch(Exception ex)
{
trade.FireOnErrorEvent("Unknown error occurred DURING CLEANUP(!?): " + ex.ToString());
}
}
});
pollThread.Start();
}
private bool CheckTradeTimeout (Trade trade)
{
// User has accepted the trade. Disregard time out.
if (trade.OtherUserAccepted)
return false;
var now = DateTime.Now;
DateTime actionTimeout = lastOtherActionTime.AddSeconds (MaxActionGapSec);
int untilActionTimeout = (int)Math.Round ((actionTimeout - now).TotalSeconds);
DebugPrint (String.Format ("{0} {1}", actionTimeout, untilActionTimeout));
DateTime tradeTimeout = tradeStartTime.AddSeconds (MaxTradeTimeSec);
int untilTradeTimeout = (int)Math.Round ((tradeTimeout - now).TotalSeconds);
double secsSinceLastTimeoutMessage = (now - lastTimeoutMessage).TotalSeconds;
if (untilActionTimeout <= 0 || untilTradeTimeout <= 0)
{
DebugPrint ("timed out...");
if (OnTimeout != null)
{
OnTimeout (this, null);
}
trade.CancelTrade ();
return true;
}
else if (untilActionTimeout <= 20 && secsSinceLastTimeoutMessage >= 10)
{
try
{
trade.SendMessage("Are You AFK? The trade will be canceled in " + untilActionTimeout + " seconds if you don't do something.");
}
catch { }
lastTimeoutMessage = now;
}
return false;
}
[Conditional ("DEBUG_TRADE_MANAGER")]
private static void DebugPrint (string output)
{
// I don't really want to add the Logger as a dependecy to TradeManager so I
// print using the console directly. To enable this for debugging put this:
// #define DEBUG_TRADE_MANAGER
// at the first line of this file.
System.Console.WriteLine (output);
}
}
}
| |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Generic;
using System.Linq;
using Cassandra.DataStax.Search;
using Cassandra.IntegrationTests.TestBase;
using Cassandra.Tests;
using Newtonsoft.Json;
using NUnit.Framework;
namespace Cassandra.IntegrationTests.DataStax.Search
{
[Category(TestCategory.Short), TestDseVersion(5, 1), Category(TestCategory.ServerApi)]
public class DateRangeTests : SharedClusterTest
{
private static readonly string[] Values = new[]
{
"[0001-01-01T00:00:00.000Z TO 9999-12-31T23:59:59.999Z]", // min TO max dates supported
"[0021-04-20T01 TO 0077-10-13T02]", // 4 digits years
"2010",
"[2017-02 TO *]",
"[1 TO 2]",
"[1200 TO 2017-07-04T16]",
"[2015-02 TO 2016-02]",
"[2016-10-01T08 TO *]",
"[* TO 2016-03-01T16:56:39.999]",
"[2016-03-01T16:56 TO *]",
"[* TO *]",
"*"
};
protected override string[] SetupQueries
{
get
{
return new[]
{
"CREATE TABLE tbl_date_range (pk uuid PRIMARY KEY, c1 'DateRangeType')",
"INSERT INTO tbl_date_range (pk, c1) VALUES (uuid(), '[2010-12-03 TO 2010-12-04]')",
"CREATE TYPE IF NOT EXISTS test_udt (i int, range 'DateRangeType')",
"CREATE TABLE tbl_udt_tuple (k uuid PRIMARY KEY, u test_udt, uf frozen<test_udt>," +
" t tuple<'DateRangeType', int>, tf frozen<tuple<'DateRangeType', int>>)",
"CREATE TABLE tbl_collection (k uuid PRIMARY KEY, l list<'DateRangeType'>," +
" s set<'DateRangeType'>, m0 map<text, 'DateRangeType'>, m1 map<'DateRangeType', text>)",
"CREATE TABLE tbl_date_range_pk (k 'DateRangeType' PRIMARY KEY, v int)"
};
}
}
[Test]
public void Should_Deserialize_And_Serialize_DateRange()
{
const string insertQuery = "INSERT INTO tbl_date_range (pk, c1) VALUES (?, ?)";
const string selectQuery = "SELECT pk, c1 FROM tbl_date_range WHERE pk = ?";
foreach (var stringValue in DateRangeTests.Values)
{
var id = Guid.NewGuid();
var value = DateRange.Parse(stringValue);
Session.Execute(new SimpleStatement(insertQuery, id, value));
var rs = Session.Execute(new SimpleStatement(selectQuery, id));
var row = rs.First();
Assert.AreEqual(value, row.GetValue<DateRange>("c1"));
Assert.AreEqual(value.ToString(), row.GetValue<DateRange>("c1").ToString());
}
}
/// <summary>
/// Test if the driver throws Exception when using a wrong order of dates in DataRange
/// </summary>
[Test]
public void Should_Disallow_Invalid_DateRange_Order()
{
const string insertQuery = "INSERT INTO tbl_date_range (pk, c1) VALUES (?, ?)";
var id = Guid.NewGuid();
var value = DateRange.Parse("[0077-10-13T02 TO 0021-04-20T01]");
//should throw ServerErrorException :
// java.lang.IllegalArgumentException: Wrong order: 0077-10-13T02 TO 0021-04-20T01
Assert.Throws<ServerErrorException>(() => Session.Execute(new SimpleStatement(insertQuery, id, value)),
"java.lang.IllegalArgumentException: Wrong order: 0077-10-13T02 TO 0021-04-20T01");
}
/// <summary>
/// Test if the DateRange returned in a JSON is parsable
/// </summary>
[Test]
public void Should_Parse_DateRange_In_JSON()
{
const string insertQuery = "INSERT INTO tbl_date_range (pk, c1) VALUES (?, ?)";
const string selectQuery = "SELECT JSON c1 FROM tbl_date_range WHERE pk = ?";
foreach (var stringValue in DateRangeTests.Values)
{
var id = Guid.NewGuid();
var value = DateRange.Parse(stringValue);
Session.Execute(new SimpleStatement(insertQuery, id, value));
var rs = Session.Execute(new SimpleStatement(selectQuery, id));
var row = rs.First();
var jsonString = row.GetValue<string>("[json]");
dynamic dynamicJsonObj = JsonConvert.DeserializeObject(jsonString);
Assert.AreEqual(value, DateRange.Parse(dynamicJsonObj.c1.ToString()));
}
}
[Test]
public void Should_Allow_DataRange_In_Udt_And_Tuple()
{
const string insertQuery = "INSERT INTO tbl_udt_tuple (k, u, uf, t, tf) VALUES (?,?,?,?,?)";
const string selectQuery = "SELECT * FROM tbl_udt_tuple WHERE k = ?";
var id = Guid.NewGuid();
var dtExpected = DateRange.Parse("[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]");
Session.UserDefinedTypes.Define(
UdtMap.For<UdtDataRange>("test_udt")
.Map(v => v.Id, "i")
.Map(v => v.DateRange, "range")
);
var udtRangeValue = new UdtDataRange
{
Id = 1,
DateRange = dtExpected
};
var tuple1 = new Tuple<DateRange, int>(dtExpected, 30);
var tuple2 = new Tuple<DateRange, int>(dtExpected, 40);
Session.Execute(new SimpleStatement(insertQuery, id, udtRangeValue, udtRangeValue, tuple1, tuple2));
var rs = Session.Execute(new SimpleStatement(selectQuery, id));
var row = rs.First();
Assert.AreEqual(udtRangeValue, row.GetValue<UdtDataRange>("u"));
Assert.AreEqual(udtRangeValue, row.GetValue<UdtDataRange>("uf"));
Assert.AreEqual(tuple1, row.GetValue<Tuple<DateRange, int>>("t"));
Assert.AreEqual(tuple2, row.GetValue<Tuple<DateRange, int>>("tf"));
}
[Test]
public void Should_Allow_DataRange_In_Collections()
{
const string insertQuery = "INSERT INTO tbl_collection (k, l, s, m0, m1) VALUES (?,?,?,?,?)";
const string selectQuery = "SELECT * FROM tbl_collection WHERE k = ?";
var id = Guid.NewGuid();
var dtExpected = DateRange.Parse("[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]");
var dtExpected2 = DateRange.Parse("[0021-04-20T01 TO 0077-10-13T02]");
var set = new HashSet<DateRange> { dtExpected, dtExpected2 };
var list = new List<DateRange> { dtExpected, dtExpected2 };
var map = new Dictionary<string, DateRange>();
var mapReverse = new Dictionary<DateRange, string>();
map.Add("key", dtExpected);
mapReverse.Add(dtExpected, "value");
Session.Execute(new SimpleStatement(insertQuery, id, list, set, map, mapReverse));
var rs = Session.Execute(new SimpleStatement(selectQuery, id));
var row = rs.First();
Assert.AreEqual(id, row.GetValue<Guid>("k"));
Assert.AreEqual(list, row.GetValue<List<DateRange>>("l"));
Assert.AreEqual(set, row.GetValue<HashSet<DateRange>>("s"));
Assert.AreEqual(map, row.GetValue<IDictionary<string, DateRange>>("m0"));
Assert.AreEqual(mapReverse, row.GetValue<IDictionary<DateRange, string>>("m1"));
}
[Test]
public void Should_Allow_DataRange_In_as_Primary_Key()
{
//CREATE TABLE tbl_date_range_pk (k 'DateRangeType' PRIMARY KEY, v int)
var dtExpected = DateRange.Parse("[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]");
const string insertQuery = "INSERT INTO tbl_date_range_pk (k, v) VALUES (?,?)";
const string selectQuery = "SELECT * FROM tbl_date_range_pk WHERE k = ?";
Session.Execute(new SimpleStatement(insertQuery, dtExpected, 1));
var rs = Session.Execute(new SimpleStatement(selectQuery, dtExpected));
var row = rs.First();
Assert.AreEqual(dtExpected, row.GetValue<DateRange>("k"));
Assert.AreEqual(1, row.GetValue<int>("v"));
}
[Test]
public void Should_Allow_DataRange_In_Prepared_Statements()
{
//"INSERT INTO tbl_date_range (pk, c1) VALUES (uuid(), '[2010-12-03 TO 2010-12-04]')"
var dtExpected = DateRange.Parse("[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]");
var id = Guid.NewGuid();
const string insertQuery = "INSERT INTO tbl_date_range (pk, c1) VALUES (?,?)";
const string selectQuery = "SELECT * FROM tbl_date_range WHERE pk = ?";
var preparedStatement = Session.Prepare(insertQuery);
var preparedSelectStatement = Session.Prepare(selectQuery);
Session.Execute(preparedStatement.Bind(id, dtExpected));
var rs = Session.Execute(preparedSelectStatement.Bind(id));
var row = rs.First();
Assert.AreEqual(id, row.GetValue<Guid>("pk"));
Assert.AreEqual(dtExpected, row.GetValue<DateRange>("c1"));
}
}
internal class UdtDataRange
{
public int Id { get; set; }
public DateRange DateRange { get; set; }
public override bool Equals(object obj)
{
if (!(obj is UdtDataRange))
{
return false;
}
var dataRange = (UdtDataRange)obj;
if (dataRange.Id == this.Id &&
dataRange.DateRange == this.DateRange)
{
return true;
}
return false;
}
public override int GetHashCode()
{
return base.GetHashCode();
}
}
}
| |
using Lucene.Net.Support;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Lucene.Net.Codecs.Lucene40
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
using Directory = Lucene.Net.Store.Directory;
using FieldInfo = Lucene.Net.Index.FieldInfo;
using FieldInfos = Lucene.Net.Index.FieldInfos;
using IndexFileNames = Lucene.Net.Index.IndexFileNames;
using IndexInput = Lucene.Net.Store.IndexInput;
using IOContext = Lucene.Net.Store.IOContext;
using IOUtils = Lucene.Net.Util.IOUtils;
using SegmentInfo = Lucene.Net.Index.SegmentInfo;
using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
/// <summary>
/// Class responsible for access to stored document fields.
/// <para/>
/// It uses <segment>.fdt and <segment>.fdx; files.
/// <para/>
/// @lucene.internal
/// </summary>
/// <seealso cref="Lucene40StoredFieldsFormat"/>
public sealed class Lucene40StoredFieldsReader : StoredFieldsReader, IDisposable
#if FEATURE_CLONEABLE
, System.ICloneable
#endif
{
private readonly FieldInfos fieldInfos;
private readonly IndexInput fieldsStream;
private readonly IndexInput indexStream;
private int numTotalDocs;
private int size;
private bool closed;
/// <summary>
/// Returns a cloned FieldsReader that shares open
/// <see cref="IndexInput"/>s with the original one. It is the caller's
/// job not to dispose the original FieldsReader until all
/// clones are called (eg, currently <see cref="Index.SegmentReader"/> manages
/// this logic).
/// </summary>
public override object Clone()
{
EnsureOpen();
return new Lucene40StoredFieldsReader(fieldInfos, numTotalDocs, size, (IndexInput)fieldsStream.Clone(), (IndexInput)indexStream.Clone());
}
/// <summary>
/// Used only by clone. </summary>
private Lucene40StoredFieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, IndexInput fieldsStream, IndexInput indexStream)
{
this.fieldInfos = fieldInfos;
this.numTotalDocs = numTotalDocs;
this.size = size;
this.fieldsStream = fieldsStream;
this.indexStream = indexStream;
}
/// <summary>
/// Sole constructor. </summary>
public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
{
string segment = si.Name;
bool success = false;
fieldInfos = fn;
try
{
fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
indexStream = d.OpenInput(indexStreamFN, context);
CodecUtil.CheckHeader(indexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
CodecUtil.CheckHeader(fieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer());
Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer());
long indexSize = indexStream.Length - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX;
this.size = (int)(indexSize >> 3);
// Verify two sources of "maxDoc" agree:
if (this.size != si.DocCount)
{
throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.size + " but segmentInfo shows " + si.DocCount);
}
numTotalDocs = (int)(indexSize >> 3);
success = true;
}
finally
{
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if (!success)
{
try
{
Dispose();
} // ensure we throw our original exception
catch (Exception)
{
}
}
}
}
/// <exception cref="ObjectDisposedException"> if this FieldsReader is disposed. </exception>
private void EnsureOpen()
{
if (closed)
{
throw new ObjectDisposedException(this.GetType().FullName, "this FieldsReader is closed");
}
}
/// <summary>
/// Closes the underlying <see cref="Lucene.Net.Store.IndexInput"/> streams.
/// This means that the <see cref="Index.Fields"/> values will not be accessible.
/// </summary>
/// <exception cref="System.IO.IOException"> If an I/O error occurs. </exception>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (!closed)
{
IOUtils.Dispose(fieldsStream, indexStream);
closed = true;
}
}
}
/// <summary>
/// Returns number of documents.
/// <para/>
/// NOTE: This was size() in Lucene.
/// </summary>
public int Count
{
get { return size; }
}
private void SeekIndex(int docID)
{
indexStream.Seek(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX + docID * 8L);
}
public override void VisitDocument(int n, StoredFieldVisitor visitor)
{
SeekIndex(n);
fieldsStream.Seek(indexStream.ReadInt64());
int numFields = fieldsStream.ReadVInt32();
for (int fieldIDX = 0; fieldIDX < numFields; fieldIDX++)
{
int fieldNumber = fieldsStream.ReadVInt32();
FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
int bits = fieldsStream.ReadByte() & 0xFF;
Debug.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), "bits=" + bits.ToString("x"));
switch (visitor.NeedsField(fieldInfo))
{
case StoredFieldVisitor.Status.YES:
ReadField(visitor, fieldInfo, bits);
break;
case StoredFieldVisitor.Status.NO:
SkipField(bits);
break;
case StoredFieldVisitor.Status.STOP:
return;
}
}
}
private void ReadField(StoredFieldVisitor visitor, FieldInfo info, int bits)
{
int numeric = bits & Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK;
if (numeric != 0)
{
switch (numeric)
{
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_INT:
visitor.Int32Field(info, fieldsStream.ReadInt32());
return;
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_LONG:
visitor.Int64Field(info, fieldsStream.ReadInt64());
return;
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_FLOAT:
visitor.SingleField(info, Number.Int32BitsToSingle(fieldsStream.ReadInt32()));
return;
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_DOUBLE:
visitor.DoubleField(info, BitConverter.Int64BitsToDouble(fieldsStream.ReadInt64()));
return;
default:
throw new CorruptIndexException("Invalid numeric type: " + numeric.ToString("x"));
}
}
else
{
int length = fieldsStream.ReadVInt32();
var bytes = new byte[length];
fieldsStream.ReadBytes(bytes, 0, length);
if ((bits & Lucene40StoredFieldsWriter.FIELD_IS_BINARY) != 0)
{
visitor.BinaryField(info, bytes);
}
else
{
#pragma warning disable 612, 618
visitor.StringField(info, IOUtils.CHARSET_UTF_8.GetString(bytes));
#pragma warning restore 612, 618
}
}
}
private void SkipField(int bits)
{
int numeric = bits & Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK;
if (numeric != 0)
{
switch (numeric)
{
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_INT:
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_FLOAT:
fieldsStream.ReadInt32();
return;
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_LONG:
case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_DOUBLE:
fieldsStream.ReadInt64();
return;
default:
throw new CorruptIndexException("Invalid numeric type: " + numeric.ToString("x"));
}
}
else
{
int length = fieldsStream.ReadVInt32();
fieldsStream.Seek(fieldsStream.GetFilePointer() + length);
}
}
/// <summary>
/// Returns the length in bytes of each raw document in a
/// contiguous range of length <paramref name="numDocs"/> starting with
/// <paramref name="startDocID"/>. Returns the <see cref="IndexInput"/> (the fieldStream),
/// already seeked to the starting point for <paramref name="startDocID"/>.
/// </summary>
public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs)
{
SeekIndex(startDocID);
long startOffset = indexStream.ReadInt64();
long lastOffset = startOffset;
int count = 0;
while (count < numDocs)
{
long offset;
int docID = startDocID + count + 1;
Debug.Assert(docID <= numTotalDocs);
if (docID < numTotalDocs)
{
offset = indexStream.ReadInt64();
}
else
{
offset = fieldsStream.Length;
}
lengths[count++] = (int)(offset - lastOffset);
lastOffset = offset;
}
fieldsStream.Seek(startOffset);
return fieldsStream;
}
public override long RamBytesUsed()
{
return 0;
}
public override void CheckIntegrity()
{
}
}
}
| |
using System;
using UnityEngine.Assertions;
namespace UnityEngine.Rendering.PostProcessing
{
public enum ScreenSpaceReflectionPreset
{
Lower, Low, Medium, High, Higher, Ultra, Overkill, Custom
}
public enum ScreenSpaceReflectionResolution
{
Downsampled,
FullSize,
Supersampled
}
[Serializable]
public sealed class ScreenSpaceReflectionPresetParameter : ParameterOverride<ScreenSpaceReflectionPreset> { }
[Serializable]
public sealed class ScreenSpaceReflectionResolutionParameter : ParameterOverride<ScreenSpaceReflectionResolution> { }
[Serializable]
[PostProcess(typeof(ScreenSpaceReflectionsRenderer), "Unity/Screen-space reflections")]
public sealed class ScreenSpaceReflections : PostProcessEffectSettings
{
[Tooltip("Choose a quality preset, or use \"Custom\" to fine tune it. Don't use a preset higher than \"Medium\" if you care about performances on consoles.")]
public ScreenSpaceReflectionPresetParameter preset = new ScreenSpaceReflectionPresetParameter { value = ScreenSpaceReflectionPreset.Medium };
[Range(0, 256), Tooltip("Maximum iteration count.")]
public IntParameter maximumIterationCount = new IntParameter { value = 16 };
[Tooltip("Changes the size of the SSR buffer. Downsample it to maximize performances or supersample it to get slow but higher quality results.")]
public ScreenSpaceReflectionResolutionParameter resolution = new ScreenSpaceReflectionResolutionParameter { value = ScreenSpaceReflectionResolution.Downsampled };
[Range(1f, 64f), Tooltip("Ray thickness. Lower values are more expensive but allow the effect to detect smaller details.")]
public FloatParameter thickness = new FloatParameter { value = 8f };
[Tooltip("Maximum distance to traverse after which it will stop drawing reflections.")]
public FloatParameter maximumMarchDistance = new FloatParameter { value = 100f };
[Range(0f, 1f), Tooltip("Fades reflections close to the near planes.")]
public FloatParameter distanceFade = new FloatParameter { value = 0.5f };
[Range(0f, 1f), Tooltip("Fades reflections close to the screen edges.")]
public FloatParameter vignette = new FloatParameter { value = 0.5f };
public override bool IsEnabledAndSupported(PostProcessRenderContext context)
{
return enabled
&& context.camera.actualRenderingPath == RenderingPath.DeferredShading
&& SystemInfo.supportsMotionVectors
&& SystemInfo.supportsComputeShaders
&& SystemInfo.copyTextureSupport > CopyTextureSupport.None
&& context.resources.shaders.screenSpaceReflections
&& context.resources.shaders.screenSpaceReflections.isSupported
&& context.resources.computeShaders.gaussianDownsample;
}
}
public sealed class ScreenSpaceReflectionsRenderer : PostProcessEffectRenderer<ScreenSpaceReflections>
{
RenderTexture m_Resolve;
RenderTexture m_History;
int[] m_MipIDs;
class QualityPreset
{
public int maximumIterationCount;
public float thickness;
public ScreenSpaceReflectionResolution downsampling;
}
readonly QualityPreset[] m_Presets =
{
new QualityPreset { maximumIterationCount = 10, thickness = 32, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Lower
new QualityPreset { maximumIterationCount = 16, thickness = 32, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Low
new QualityPreset { maximumIterationCount = 32, thickness = 16, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Medium
new QualityPreset { maximumIterationCount = 48, thickness = 8, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // High
new QualityPreset { maximumIterationCount = 16, thickness = 32, downsampling = ScreenSpaceReflectionResolution.FullSize }, // Higher
new QualityPreset { maximumIterationCount = 48, thickness = 16, downsampling = ScreenSpaceReflectionResolution.FullSize }, // Ultra
new QualityPreset { maximumIterationCount = 128, thickness = 12, downsampling = ScreenSpaceReflectionResolution.Supersampled }, // Overkill
};
enum Pass
{
Test,
Resolve,
Reproject,
Composite
}
public override DepthTextureMode GetCameraFlags()
{
return DepthTextureMode.Depth | DepthTextureMode.MotionVectors;
}
internal void CheckRT(ref RenderTexture rt, int width, int height, RenderTextureFormat format, FilterMode filterMode, bool useMipMap)
{
if (rt == null || !rt.IsCreated() || rt.width != width || rt.height != height)
{
if (rt != null)
rt.Release();
rt = new RenderTexture(width, height, 0, format)
{
filterMode = filterMode,
useMipMap = useMipMap,
autoGenerateMips = false,
hideFlags = HideFlags.HideAndDontSave
};
rt.Create();
}
}
public override void Render(PostProcessRenderContext context)
{
var cmd = context.command;
cmd.BeginSample("Screen-space Reflections");
// Get quality settings
if (settings.preset.value != ScreenSpaceReflectionPreset.Custom)
{
int id = (int)settings.preset.value;
settings.maximumIterationCount.value = m_Presets[id].maximumIterationCount;
settings.thickness.value = m_Presets[id].thickness;
settings.resolution.value = m_Presets[id].downsampling;
}
settings.maximumMarchDistance.value = Mathf.Max(0f, settings.maximumMarchDistance.value);
// Square POT target
int size = Mathf.ClosestPowerOfTwo(Mathf.Min(context.width, context.height));
if (settings.resolution.value == ScreenSpaceReflectionResolution.Downsampled)
size >>= 1;
else if (settings.resolution.value == ScreenSpaceReflectionResolution.Supersampled)
size <<= 1;
// The gaussian pyramid compute works in blocks of 8x8 so make sure the last lod has a
// minimum size of 8x8
const int kMaxLods = 12;
int lodCount = Mathf.FloorToInt(Mathf.Log(size, 2f) - 3f);
lodCount = Mathf.Min(lodCount, kMaxLods);
CheckRT(ref m_Resolve, size, size, context.sourceFormat, FilterMode.Trilinear, true);
var noiseTex = context.resources.blueNoise256[0];
var sheet = context.propertySheets.Get(context.resources.shaders.screenSpaceReflections);
sheet.properties.SetTexture(ShaderIDs.Noise, noiseTex);
var screenSpaceProjectionMatrix = new Matrix4x4();
screenSpaceProjectionMatrix.SetRow(0, new Vector4(size * 0.5f, 0f, 0f, size * 0.5f));
screenSpaceProjectionMatrix.SetRow(1, new Vector4(0f, size * 0.5f, 0f, size * 0.5f));
screenSpaceProjectionMatrix.SetRow(2, new Vector4(0f, 0f, 1f, 0f));
screenSpaceProjectionMatrix.SetRow(3, new Vector4(0f, 0f, 0f, 1f));
var projectionMatrix = GL.GetGPUProjectionMatrix(context.camera.projectionMatrix, false);
screenSpaceProjectionMatrix *= projectionMatrix;
sheet.properties.SetMatrix(ShaderIDs.ViewMatrix, context.camera.worldToCameraMatrix);
sheet.properties.SetMatrix(ShaderIDs.InverseViewMatrix, context.camera.worldToCameraMatrix.inverse);
sheet.properties.SetMatrix(ShaderIDs.InverseProjectionMatrix, projectionMatrix.inverse);
sheet.properties.SetMatrix(ShaderIDs.ScreenSpaceProjectionMatrix, screenSpaceProjectionMatrix);
sheet.properties.SetVector(ShaderIDs.Params, new Vector4((float)settings.vignette.value, settings.distanceFade.value, settings.maximumMarchDistance.value, lodCount));
sheet.properties.SetVector(ShaderIDs.Params2, new Vector4((float)context.width / (float)context.height, (float)size / (float)noiseTex.width, settings.thickness.value, settings.maximumIterationCount.value));
cmd.GetTemporaryRT(ShaderIDs.Test, size, size, 0, FilterMode.Point, context.sourceFormat);
cmd.BlitFullscreenTriangle(context.source, ShaderIDs.Test, sheet, (int)Pass.Test);
if (context.isSceneView)
{
cmd.BlitFullscreenTriangle(context.source, m_Resolve, sheet, (int)Pass.Resolve);
}
else
{
CheckRT(ref m_History, size, size, context.sourceFormat, FilterMode.Bilinear, false);
if (m_ResetHistory)
{
context.command.BlitFullscreenTriangle(context.source, m_History);
m_ResetHistory = false;
}
cmd.GetTemporaryRT(ShaderIDs.SSRResolveTemp, size, size, 0, FilterMode.Bilinear, context.sourceFormat);
cmd.BlitFullscreenTriangle(context.source, ShaderIDs.SSRResolveTemp, sheet, (int)Pass.Resolve);
sheet.properties.SetTexture(ShaderIDs.History, m_History);
cmd.BlitFullscreenTriangle(ShaderIDs.SSRResolveTemp, m_Resolve, sheet, (int)Pass.Reproject);
cmd.CopyTexture(m_Resolve, 0, 0, m_History, 0, 0);
cmd.ReleaseTemporaryRT(ShaderIDs.SSRResolveTemp);
}
cmd.ReleaseTemporaryRT(ShaderIDs.Test);
// Pre-cache mipmaps ids
if (m_MipIDs == null || m_MipIDs.Length == 0)
{
m_MipIDs = new int[kMaxLods];
for (int i = 0; i < kMaxLods; i++)
m_MipIDs[i] = Shader.PropertyToID("_SSRGaussianMip" + i);
}
var compute = context.resources.computeShaders.gaussianDownsample;
int kernel = compute.FindKernel("KMain");
var last = new RenderTargetIdentifier(m_Resolve);
for (int i = 0; i < lodCount; i++)
{
size >>= 1;
Assert.IsTrue(size > 0);
cmd.GetTemporaryRT(m_MipIDs[i], size, size, 0, FilterMode.Bilinear, context.sourceFormat, RenderTextureReadWrite.Default, 1, true);
cmd.SetComputeTextureParam(compute, kernel, "_Source", last);
cmd.SetComputeTextureParam(compute, kernel, "_Result", m_MipIDs[i]);
cmd.SetComputeVectorParam(compute, "_Size", new Vector4(size, size, 1f / size, 1f / size));
cmd.DispatchCompute(compute, kernel, size / 8, size / 8, 1);
cmd.CopyTexture(m_MipIDs[i], 0, 0, m_Resolve, 0, i + 1);
last = m_MipIDs[i];
}
for (int i = 0; i < lodCount; i++)
cmd.ReleaseTemporaryRT(m_MipIDs[i]);
sheet.properties.SetTexture(ShaderIDs.Resolve, m_Resolve);
cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, (int)Pass.Composite);
cmd.EndSample("Screen-space Reflections");
}
public override void Release()
{
RuntimeUtilities.Destroy(m_Resolve);
RuntimeUtilities.Destroy(m_History);
m_Resolve = null;
m_History = null;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.DataLake.Store
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure.OData;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for AccountOperations.
/// </summary>
public static partial class AccountOperationsExtensions
{
/// <summary>
/// Deletes the specified firewall rule from the specified Data Lake Store
/// account
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to delete the firewall
/// rule.
/// </param>
/// <param name='firewallRuleName'>
/// The name of the firewall rule to delete.
/// </param>
public static void DeleteFirewallRule(this IAccountOperations operations, string resourceGroupName, string accountName, string firewallRuleName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).DeleteFirewallRuleAsync(resourceGroupName, accountName, firewallRuleName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the specified firewall rule from the specified Data Lake Store
/// account
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to delete the firewall
/// rule.
/// </param>
/// <param name='firewallRuleName'>
/// The name of the firewall rule to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteFirewallRuleAsync(this IAccountOperations operations, string resourceGroupName, string accountName, string firewallRuleName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteFirewallRuleWithHttpMessagesAsync(resourceGroupName, accountName, firewallRuleName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified Data Lake Store firewall rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to get the firewall
/// rule.
/// </param>
/// <param name='firewallRuleName'>
/// The name of the firewall rule to retrieve.
/// </param>
public static FirewallRule GetFirewallRule(this IAccountOperations operations, string resourceGroupName, string accountName, string firewallRuleName)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).GetFirewallRuleAsync(resourceGroupName, accountName, firewallRuleName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the specified Data Lake Store firewall rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to get the firewall
/// rule.
/// </param>
/// <param name='firewallRuleName'>
/// The name of the firewall rule to retrieve.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<FirewallRule> GetFirewallRuleAsync(this IAccountOperations operations, string resourceGroupName, string accountName, string firewallRuleName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetFirewallRuleWithHttpMessagesAsync(resourceGroupName, accountName, firewallRuleName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store firewall rules within the specified Data Lake
/// Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to get the firewall
/// rules.
/// </param>
public static IPage<FirewallRule> ListFirewallRules(this IAccountOperations operations, string resourceGroupName, string accountName)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListFirewallRulesAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store firewall rules within the specified Data Lake
/// Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account from which to get the firewall
/// rules.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<FirewallRule>> ListFirewallRulesAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListFirewallRulesWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates the specified firewall rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to which to add the firewall rule.
/// </param>
/// <param name='name'>
/// The name of the firewall rule to create or update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the create firewall rule.
/// </param>
public static FirewallRule CreateOrUpdateFirewallRule(this IAccountOperations operations, string resourceGroupName, string accountName, string name, FirewallRule parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).CreateOrUpdateFirewallRuleAsync(resourceGroupName, accountName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates the specified firewall rule.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to which to add the firewall rule.
/// </param>
/// <param name='name'>
/// The name of the firewall rule to create or update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the create firewall rule.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<FirewallRule> CreateOrUpdateFirewallRuleAsync(this IAccountOperations operations, string resourceGroupName, string accountName, string name, FirewallRule parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateFirewallRuleWithHttpMessagesAsync(resourceGroupName, accountName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount Create(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).CreateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> CreateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount BeginCreate(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).BeginCreateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> BeginCreateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginCreateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount Update(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).UpdateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> UpdateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount BeginUpdate(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).BeginUpdateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> BeginUpdateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginUpdateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
public static void Delete(this IAccountOperations operations, string resourceGroupName, string accountName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).DeleteAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
public static void BeginDelete(this IAccountOperations operations, string resourceGroupName, string accountName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).BeginDeleteAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to retrieve.
/// </param>
public static DataLakeStoreAccount Get(this IAccountOperations operations, string resourceGroupName, string accountName)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).GetAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to retrieve.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> GetAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account(s).
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// A Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='search'>
/// A free form search. A free-text search expression to match for whether a
/// particular entry should be included in the feed, e.g.
/// Categories?$search=blue OR green. Optional.
/// </param>
/// <param name='format'>
/// The desired return format. Return the response in particular formatxii
/// without access to request headers for standard content-type negotiation
/// (e.g Orders?$format=json). Optional.
/// </param>
public static IPage<DataLakeStoreAccount> ListByResourceGroup(this IAccountOperations operations, string resourceGroupName, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), string search = default(string), string format = default(string))
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListByResourceGroupAsync(resourceGroupName, odataQuery, select, count, search, format), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account(s).
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// A Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='search'>
/// A free form search. A free-text search expression to match for whether a
/// particular entry should be included in the feed, e.g.
/// Categories?$search=blue OR green. Optional.
/// </param>
/// <param name='format'>
/// The desired return format. Return the response in particular formatxii
/// without access to request headers for standard content-type negotiation
/// (e.g Orders?$format=json). Optional.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListByResourceGroupAsync(this IAccountOperations operations, string resourceGroupName, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), string search = default(string), string format = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupWithHttpMessagesAsync(resourceGroupName, odataQuery, select, count, search, format, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// The Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='search'>
/// A free form search. A free-text search expression to match for whether a
/// particular entry should be included in the feed, e.g.
/// Categories?$search=blue OR green. Optional.
/// </param>
/// <param name='format'>
/// The desired return format. Return the response in particular formatxii
/// without access to request headers for standard content-type negotiation
/// (e.g Orders?$format=json). Optional.
/// </param>
public static IPage<DataLakeStoreAccount> List(this IAccountOperations operations, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), string search = default(string), string format = default(string))
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListAsync(odataQuery, select, count, search, format), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// The Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='search'>
/// A free form search. A free-text search expression to match for whether a
/// particular entry should be included in the feed, e.g.
/// Categories?$search=blue OR green. Optional.
/// </param>
/// <param name='format'>
/// The desired return format. Return the response in particular formatxii
/// without access to request headers for standard content-type negotiation
/// (e.g Orders?$format=json). Optional.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListAsync(this IAccountOperations operations, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), string search = default(string), string format = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(odataQuery, select, count, search, format, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store firewall rules within the specified Data Lake
/// Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<FirewallRule> ListFirewallRulesNext(this IAccountOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListFirewallRulesNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store firewall rules within the specified Data Lake
/// Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<FirewallRule>> ListFirewallRulesNextAsync(this IAccountOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListFirewallRulesNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<DataLakeStoreAccount> ListByResourceGroupNext(this IAccountOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListByResourceGroupNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListByResourceGroupNextAsync(this IAccountOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<DataLakeStoreAccount> ListNext(this IAccountOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListNextAsync(this IAccountOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// FeedPropertiesDialog.cs
//
// Copyright (c) 2008 Ethan Osten
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
using System;
using Gtk;
using Summa.Core;
using Summa.Data;
using Summa.Gui;
namespace Summa.Gui {
public class FeedPropertiesDialog : Window {
public ISource feed;
private VBox vbox;
private Notebook notebook;
private ButtonBox bbox;
private VBox general_vbox;
private Entry entry_name;
private Entry entry_author;
private Entry entry_subtitle;
private Entry entry_image;
private Entry entry_url;
private TreeView tv_tags;
public ListStore store_tags;
private CellRendererToggle cr_toggle;
public FeedPropertiesDialog(ISource f) : base(WindowType.Toplevel) {
feed = f;
Title = "\""+feed.Name+"\" Properties";
Icon = feed.Favicon;
BorderWidth = 5;
DeleteEvent += OnClose;
vbox = new VBox();
vbox.Spacing = 6;
Add(vbox);
notebook = new Notebook();
vbox.PackStart(notebook, false, false, 0);
bbox = new HButtonBox();
bbox.Layout = ButtonBoxStyle.End;
vbox.PackStart(bbox, false, false, 0);
AddGeneralTab();
AddTagsTab();
AddCloseButton();
}
private void AddGeneralTab() {
general_vbox = new VBox();
general_vbox.BorderWidth = 5;
general_vbox.Spacing = 10;
Frame properties_frame = new Frame();
Label properties_label = new Label();
properties_label.Markup = ("<b>Properties</b>");
properties_label.UseUnderline = true;
properties_frame.LabelWidget = properties_label;
properties_frame.LabelXalign = 0.0f;
properties_frame.LabelYalign = 0.5f;
properties_frame.Shadow = ShadowType.None;
general_vbox.PackStart(properties_frame, false, false, 0);
Alignment properties_alignment = new Alignment(0.0f, 0.0f, 1.0f, 1.0f);
properties_alignment.TopPadding = (uint)(properties_frame == null ? 0 : 5);
properties_alignment.LeftPadding = 12;
properties_frame.Add(properties_alignment);
Table properties_table = new Table(2, 4, false);
properties_table.BorderWidth = 5;
properties_table.ColumnSpacing = 6;
properties_table.RowSpacing = 6;
properties_alignment.Add(properties_table);
Label name_label = new Label("Name: ");
name_label.SetAlignment(0.0F, 0.5F);
properties_table.Attach(name_label, 0, 1, 0, 1);
entry_name = new Entry(feed.Name);
properties_table.Attach(entry_name, 1, 2, 0, 1);
Label author_label = new Label("Author: ");
author_label.SetAlignment(0.0F, 0.5F);
properties_table.Attach(author_label, 0, 1, 1, 2);
entry_author = new Entry(feed.Author);
properties_table.Attach(entry_author, 1, 2, 1, 2);
Label subtitle_label = new Label("Subtitle: ");
subtitle_label.SetAlignment(0.0F, 0.5F);
properties_table.Attach(subtitle_label, 0, 1, 2, 3);
entry_subtitle = new Entry(feed.Subtitle);
properties_table.Attach(entry_subtitle, 1, 2, 2, 3);
Label image_label = new Label("Image: ");
image_label.SetAlignment(0.0F, 0.5F);
properties_table.Attach(image_label, 0, 1, 3, 4);
entry_image = new Entry(feed.Image);
properties_table.Attach(entry_image, 1, 2, 3, 4);
Frame source_frame = new Frame();
source_frame.Sensitive = false;
Label source_label = new Label();
source_label.Markup = ("<b>Source</b>");
source_label.UseUnderline = true;
source_frame.LabelWidget = source_label;
source_frame.LabelXalign = 0.0f;
source_frame.LabelYalign = 0.5f;
source_frame.Shadow = ShadowType.None;
general_vbox.PackStart(source_frame, false, false, 0);
Alignment source_alignment = new Alignment(0.0f, 0.0f, 1.0f, 1.0f);
source_alignment.TopPadding = (uint)(source_frame == null ? 0 : 5);
source_alignment.LeftPadding = 12;
source_frame.Add(source_alignment);
Table source_table = new Table(2, 4, false);
source_table.BorderWidth = 5;
source_table.ColumnSpacing = 6;
source_table.RowSpacing = 6;
source_alignment.Add(source_table);
Label url_label = new Label("URL: ");
url_label.SetAlignment(0.0F, 0.5F);
source_table.Attach(url_label, 0, 1, 0, 1);
entry_url = new Entry(feed.Url);
source_table.Attach(entry_url, 1, 2, 0, 1);
notebook.AppendPage(general_vbox, new Label("General"));
}
private void AddTagsTab() {
Table tags_table = new Table(1, 2, false);
tags_table.BorderWidth = 5;
tags_table.ColumnSpacing = 10;
tags_table.RowSpacing = 10;
ScrolledWindow tags_swin = new ScrolledWindow();
tags_swin.ShadowType = ShadowType.In;
tags_swin.SetPolicy(PolicyType.Automatic, PolicyType.Automatic);
tags_table.Attach(tags_swin, 0, 1, 0, 1);
cr_toggle = new CellRendererToggle();
cr_toggle.Activatable = true;
cr_toggle.Toggled += new ToggledHandler(OnCrToggleToggled);
tv_tags = new TreeView();
tags_swin.Add(tv_tags);
store_tags = new ListStore(typeof(bool), typeof(string));
tv_tags.Model = store_tags;
// set up the columns for the view
TreeViewColumn column_Read = new TreeViewColumn("Use", cr_toggle, "active", 0);
tv_tags.AppendColumn(column_Read);
TreeViewColumn column_Name = new TreeViewColumn("Title", new CellRendererText(), "text", 1);
tv_tags.AppendColumn(column_Name);
foreach ( string tag in Feeds.GetTags() ) {
TreeIter iter = store_tags.Append();
if ( feed.Tags.Contains(tag) ) {
store_tags.SetValue(iter, 0, true);
} else {
store_tags.SetValue(iter, 0, false);
}
store_tags.SetValue(iter, 1, tag);
}
ButtonBox tags_bbox = new HButtonBox();
tags_bbox.Layout = ButtonBoxStyle.End;
tags_table.Attach(tags_bbox, 0, 1, 1, 2);
Button add_button = new Button(Stock.Add);
add_button.Clicked += new EventHandler(OnAddButtonClicked);
tags_bbox.PackStart(add_button);
notebook.AppendPage(tags_table, new Label("Tags"));
}
private void OnAddButtonClicked(object obj, EventArgs args) {
Window win = new AddTagDialog(this);
win.ShowAll();
}
private void OnCrToggleToggled(object obj, ToggledArgs args) {
TreeIter iter;
store_tags.GetIter(out iter, new TreePath(args.Path));
if ( (bool)store_tags.GetValue(iter, 0) ) {
store_tags.SetValue(iter, 0, false);
feed.RemoveTag((string)store_tags.GetValue(iter, 1));
} else {
store_tags.SetValue(iter, 0, true);
feed.AppendTag((string)store_tags.GetValue(iter, 1));
}
}
private void AddCloseButton() {
Button close_button = new Button(Stock.Close);
close_button.Clicked += new EventHandler(OnClose);
bbox.PackStart(close_button);
}
private void OnClose(object obj, EventArgs args) {
feed.Name = entry_name.Text;
feed.Author = entry_author.Text;
feed.Subtitle = entry_subtitle.Text;
feed.Image = entry_image.Text;
Destroy();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Runtime.InteropServices;
using Interop.VixCOM;
using System.Reflection;
namespace Vestris.VMWareLib
{
/// <summary>
/// A VMWare snapshot.
/// </summary>
public class VMWareSnapshot : VMWareVixHandle<ISnapshot>
{
private IVM2 _vm = null;
private VMWareSnapshotCollection _childSnapshots = null;
private VMWareSnapshot _parent = null;
/// <summary>
/// A VMWare snapshot constructor.
/// </summary>
/// <param name="vm">Virtual machine.</param>
/// <param name="snapshot">Snapshot.</param>
/// <param name="parent">Parent snapshot.</param>
public VMWareSnapshot(IVM2 vm, ISnapshot snapshot, VMWareSnapshot parent)
: base(snapshot)
{
_vm = vm;
_parent = parent;
}
/// <summary>
/// Parent snapshot.
/// </summary>
/// <remarks>
/// Root snapshots have a null parent.
/// </remarks>
public VMWareSnapshot Parent
{
get
{
return _parent;
}
set
{
_parent = value;
}
}
/// <summary>
/// Restores the virtual machine to the state when the specified snapshot was created.
/// </summary>
/// <param name="powerOnOptions">
/// Any applicable VixVMPowerOpOptions. If the virtual machine was powered on when the snapshot was created,
/// then this will determine how the virtual machine is powered back on. To prevent the virtual machine from being
/// powered on regardless of the power state when the snapshot was created, use the
/// VIX_VMPOWEROP_SUPPRESS_SNAPSHOT_POWERON flag. VIX_VMPOWEROP_SUPPRESS_SNAPSHOT_POWERON is mutually exclusive to
/// all other VixVMPowerOpOptions.
/// </param>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
public void RevertToSnapshot(int powerOnOptions, int timeoutInSeconds)
{
try
{
VMWareJobCallback callback = new VMWareJobCallback();
using (VMWareJob job = new VMWareJob(_vm.RevertToSnapshot(
_handle, powerOnOptions, null, callback), callback))
{
job.Wait(timeoutInSeconds);
}
}
catch (Exception ex)
{
throw new Exception(
string.Format("Failed to revert to snapshot: powerOnOptions={0} timeoutInSeconds={1}",
powerOnOptions, timeoutInSeconds), ex);
}
}
/// <summary>
/// Restores the virtual machine to the state when the specified snapshot was created.
/// </summary>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
public void RevertToSnapshot(int timeoutInSeconds)
{
RevertToSnapshot(Constants.VIX_VMPOWEROP_NORMAL, timeoutInSeconds);
}
/// <summary>
/// Restores the virtual machine to the state when the specified snapshot was created.
/// </summary>
public void RevertToSnapshot()
{
RevertToSnapshot(VMWareInterop.Timeouts.RevertToSnapshotTimeout);
}
/// <summary>
/// Remove/delete this snapshot.
/// </summary>
public void RemoveSnapshot()
{
RemoveSnapshot(VMWareInterop.Timeouts.RemoveSnapshotTimeout);
}
/// <summary>
/// Remove/delete this snapshot.
/// </summary>
/// <remarks>
/// If the snapshot is a member of a collection, the latter is updated with orphaned
/// snapshots appended to the parent.
/// </remarks>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
public void RemoveSnapshot(int timeoutInSeconds)
{
try
{
// resolve child snapshots that will move one level up
IEnumerable<VMWareSnapshot> childSnapshots = ChildSnapshots;
// remove the snapshot
VMWareJobCallback callback = new VMWareJobCallback();
using (VMWareJob job = new VMWareJob(_vm.RemoveSnapshot(_handle, 0, callback), callback))
{
job.Wait(timeoutInSeconds);
}
// remove from parent
if (_parent != null)
{
// child snapshots from this snapshot have now moved one level up
_parent.ChildSnapshots.Remove(this);
}
Close();
}
catch (Exception ex)
{
throw new Exception(
string.Format("Failed to remove snapshot: timeoutInSeconds={0}",
timeoutInSeconds), ex);
}
}
/// <summary>
/// Child snapshots.
/// </summary>
public VMWareSnapshotCollection ChildSnapshots
{
get
{
if (_childSnapshots == null)
{
VMWareSnapshotCollection childSnapshots = new VMWareSnapshotCollection(_vm, this);
int nChildSnapshots = 0;
VMWareInterop.Check(_handle.GetNumChildren(out nChildSnapshots));
for (int i = 0; i < nChildSnapshots; i++)
{
ISnapshot childSnapshot = null;
VMWareInterop.Check(_handle.GetChild(i, out childSnapshot));
childSnapshots.Add(new VMWareSnapshot(_vm, childSnapshot, this));
}
_childSnapshots = childSnapshots;
}
return _childSnapshots;
}
}
/// <summary>
/// Display name of the snapshot.
/// </summary>
public string DisplayName
{
get
{
return GetProperty<string>(Constants.VIX_PROPERTY_SNAPSHOT_DISPLAYNAME);
}
}
/// <summary>
/// Display name of the snapshot.
/// </summary>
public string Description
{
get
{
return GetProperty<string>(Constants.VIX_PROPERTY_SNAPSHOT_DESCRIPTION);
}
}
/// <summary>
/// Complete snapshot path, from root.
/// </summary>
public string Path
{
get
{
ISnapshot parentSnapshot = null;
ulong ulError = 0;
switch ((ulError = _handle.GetParent(out parentSnapshot)))
{
case Constants.VIX_OK:
return parentSnapshot == null
? DisplayName
: System.IO.Path.Combine(new VMWareSnapshot(_vm, parentSnapshot, null).Path, DisplayName);
case Constants.VIX_E_SNAPSHOT_NOTFOUND: // no parent
return DisplayName;
case Constants.VIX_E_INVALID_ARG: // root snapshot
return string.Empty;
default:
throw new VMWareException(ulError);
}
}
}
/// <summary>
/// The power state of this snapshot, an OR-ed set of VIX_POWERSTATE_* values.
/// </summary>
public int PowerState
{
get
{
return GetProperty<int>(Constants.VIX_PROPERTY_SNAPSHOT_POWERSTATE);
}
}
/// <summary>
/// Returns true if the snapshot is replayable.
/// </summary>
public bool IsReplayable
{
get
{
return GetProperty<bool>("VIX_PROPERTY_SNAPSHOT_IS_REPLAYABLE", false);
}
}
/// <summary>
/// Replay a recording of a virtual machine.
/// </summary>
[Obsolete("Deprecated in VixCOM API 1.11")]
public void BeginReplay()
{
BeginReplay(Constants.VIX_VMPOWEROP_NORMAL,
VMWareInterop.Timeouts.ReplayTimeout);
}
/// <summary>
/// Replay a recording of a virtual machine.
/// </summary>
/// <param name="powerOnOptions">One of VIX_VMPOWEROP_NORMAL or VIX_VMPOWEROP_LAUNCH_GUI.</param>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
[Obsolete("Deprecated in VixCOM API 1.11")]
public void BeginReplay(int powerOnOptions, int timeoutInSeconds)
{
try
{
VMWareJobCallback callback = new VMWareJobCallback();
using (VMWareJob job = new VMWareJob(_vm.BeginReplay(
_handle, powerOnOptions, null, callback), callback))
{
job.Wait(timeoutInSeconds);
}
}
catch (Exception ex)
{
throw new Exception(
string.Format("Failed to begin replay: powerOnOptions={0} timeoutInSeconds={1}",
powerOnOptions, timeoutInSeconds), ex);
}
}
/// <summary>
/// Stop replaying a virtual machine's recording.
/// </summary>
[Obsolete("Deprecated in VixCOM API 1.11")]
public void EndReplay()
{
EndReplay(VMWareInterop.Timeouts.ReplayTimeout);
}
/// <summary>
/// Stop replaying a virtual machine's recording.
/// </summary>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
[Obsolete("Deprecated in VixCOM API 1.11")]
public void EndReplay(int timeoutInSeconds)
{
try
{
VMWareJobCallback callback = new VMWareJobCallback();
using (VMWareJob job = new VMWareJob(_vm.EndReplay(
0, null, callback), callback))
{
job.Wait(timeoutInSeconds);
}
}
catch (Exception ex)
{
throw new Exception(
string.Format("Failed to end replay: timeoutInSeconds={0}",
timeoutInSeconds), ex);
}
}
/// <summary>
/// Creates a copy of the virtual machine at the state at which this snapshot was taken.
/// </summary>
/// <param name="cloneType">Virtual Machine clone type.</param>
/// <param name="destConfigPathName">The path name of the virtual machine configuration file that will be created.</param>
public void Clone(VMWareVirtualMachineCloneType cloneType, string destConfigPathName)
{
Clone(cloneType, destConfigPathName, VMWareInterop.Timeouts.CloneTimeout);
}
/// <summary>
/// Creates a copy of the virtual machine at the state at which this snapshot was taken.
/// </summary>
/// <param name="cloneType">Virtual Machine clone type.</param>
/// <param name="destConfigPathName">The path name of the virtual machine configuration file that will be created.</param>
/// <param name="timeoutInSeconds">Timeout in seconds.</param>
public void Clone(VMWareVirtualMachineCloneType cloneType, string destConfigPathName, int timeoutInSeconds)
{
try
{
VMWareJobCallback callback = new VMWareJobCallback();
using (VMWareJob job = new VMWareJob(_vm.Clone(
_handle, (int)cloneType, destConfigPathName, 0, null, callback),
callback))
{
job.Wait(timeoutInSeconds);
}
}
catch (Exception ex)
{
throw new Exception(
string.Format("Failed to clone virtual machine snapshot: cloneType=\"{0}\" destConfigPathName=\"{1}\" timeoutInSeconds={2}",
Enum.GetName(cloneType.GetType(), cloneType), destConfigPathName, timeoutInSeconds), ex);
}
}
/// <summary>
/// Dispose the snapshot.
/// </summary>
public override void Dispose()
{
if (_childSnapshots != null)
{
_childSnapshots.Dispose();
_childSnapshots = null;
}
base.Dispose();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Runtime.InteropServices;
#if ES_BUILD_STANDALONE
using Environment = Microsoft.Diagnostics.Tracing.Internal.Environment;
#endif
#if !ES_BUILD_AGAINST_DOTNET_V35
using Contract = System.Diagnostics.Contracts.Contract;
#else
using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract;
#endif
#if ES_BUILD_STANDALONE
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
[StructLayout(LayoutKind.Explicit, Size = 16)]
#if !CORECLR && !ES_BUILD_PN
[System.Security.Permissions.HostProtection(MayLeakOnAbort = true)]
#endif // !CORECLR && !ES_BUILD_PN
/*
EventDescriptor was public in the separate System.Diagnostics.Tracing assembly(pre NS2.0),
now the move to CoreLib marked them as private.
While they are technically private (it's a contract used between the library and the ILC toolchain),
we need them to be rooted and exported from shared library for the system to work.
For now I'm simply marking them as public again.A cleaner solution might be to use.rd.xml to
root them and modify shared library definition to force export them.
*/
#if ES_BUILD_PN
public
#else
internal
#endif
struct EventDescriptor
{
# region private
[FieldOffset(0)]
private int m_traceloggingId;
[FieldOffset(0)]
private ushort m_id;
[FieldOffset(2)]
private byte m_version;
[FieldOffset(3)]
private byte m_channel;
[FieldOffset(4)]
private byte m_level;
[FieldOffset(5)]
private byte m_opcode;
[FieldOffset(6)]
private ushort m_task;
[FieldOffset(8)]
private long m_keywords;
#endregion
public EventDescriptor(
int traceloggingId,
byte level,
byte opcode,
long keywords
)
{
this.m_id = 0;
this.m_version = 0;
this.m_channel = 0;
this.m_traceloggingId = traceloggingId;
this.m_level = level;
this.m_opcode = opcode;
this.m_task = 0;
this.m_keywords = keywords;
}
public EventDescriptor(
int id,
byte version,
byte channel,
byte level,
byte opcode,
int task,
long keywords
)
{
if (id < 0)
{
throw new ArgumentOutOfRangeException(nameof(id), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (id > ushort.MaxValue)
{
throw new ArgumentOutOfRangeException(nameof(id), SR.Format(SR.ArgumentOutOfRange_NeedValidId, 1, ushort.MaxValue));
}
m_traceloggingId = 0;
m_id = (ushort)id;
m_version = version;
m_channel = channel;
m_level = level;
m_opcode = opcode;
m_keywords = keywords;
if (task < 0)
{
throw new ArgumentOutOfRangeException(nameof(task), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (task > ushort.MaxValue)
{
throw new ArgumentOutOfRangeException(nameof(task), SR.Format(SR.ArgumentOutOfRange_NeedValidId, 1, ushort.MaxValue));
}
m_task = (ushort)task;
}
public int EventId
{
get
{
return m_id;
}
}
public byte Version
{
get
{
return m_version;
}
}
public byte Channel
{
get
{
return m_channel;
}
}
public byte Level
{
get
{
return m_level;
}
}
public byte Opcode
{
get
{
return m_opcode;
}
}
public int Task
{
get
{
return m_task;
}
}
public long Keywords
{
get
{
return m_keywords;
}
}
public override bool Equals(object obj)
{
if (!(obj is EventDescriptor))
return false;
return Equals((EventDescriptor) obj);
}
public override int GetHashCode()
{
return m_id ^ m_version ^ m_channel ^ m_level ^ m_opcode ^ m_task ^ (int)m_keywords;
}
public bool Equals(EventDescriptor other)
{
if ((m_id != other.m_id) ||
(m_version != other.m_version) ||
(m_channel != other.m_channel) ||
(m_level != other.m_level) ||
(m_opcode != other.m_opcode) ||
(m_task != other.m_task) ||
(m_keywords != other.m_keywords))
{
return false;
}
return true;
}
public static bool operator ==(EventDescriptor event1, EventDescriptor event2)
{
return event1.Equals(event2);
}
public static bool operator !=(EventDescriptor event1, EventDescriptor event2)
{
return !event1.Equals(event2);
}
}
}
| |
using Microsoft.Zelig.Runtime;
using System;
namespace Microsoft.Zelig.Test
{
public abstract class Log
{
#region Member Variables
private static int m_passCount = 0;
private static int m_failCount = 0;
private static int m_skipCount = 0;
private static int m_knownFailureCount = 0;
private enum CommentType
{
Comment,
Exception
}
#endregion
#region Public
/// <summary>
/// This method is used to log any test comments.
/// </summary>
/// <param name="message">A string containing the test comments.</param>
public static void Comment(string message)
{
BugCheck.Log( "\t" + message );
//LocalComment(message, CommentType.Comment);
}
/// <summary>
/// This method is used to log any test comments.
/// </summary>
/// <param name="message">A string containing the test comments.</param>
public static void FilteredComment(string message)
{
//SpotTestLog.WriteRaw("\t<Comment type=\"0\">"
// + "<Text>" + FilterUnsafeXml(message) + "</Text>"
// + "<Date>" + GetDate() + "</Date>"
// + "<Time>" + DateTime.Now.TimeOfDay.ToString() + "</Time>"
// + "</Comment>");
}
/// <summary>
/// This method is used to log any exceptions that may arise during the test.
/// </summary>
/// <param name="message">A string containing the exception message.</param>
public static void Exception(string message)
{
//LocalComment(message, CommentType.Exception);
}
/// <summary>
/// This method is used to log any exceptions that may arise during the test.
/// </summary>
/// <param name="message">A string containing the exception message.</param>
public static void Exception(string message, Exception ex)
{
//LocalComment(message, CommentType.Exception);
//LocalComment("Message: " + ex.Message, CommentType.Exception);
//LocalComment("Stack: " + ex.StackTrace, CommentType.Exception);
//if (ex.InnerException != null)
//{
// LocalComment("InnerException Message: " + ex.InnerException.Message, CommentType.Exception);
// LocalComment("InnerException Stack: " + ex.InnerException.StackTrace, CommentType.Exception);
//}
}
#endregion
#region Internal
internal static void ResetCounts()
{
m_passCount = 0;
m_failCount = 0;
m_skipCount = 0;
m_knownFailureCount = 0;
}
internal static void Initialize(string test)
{
m_passCount = 0;
m_failCount = 0;
m_skipCount = 0;
m_knownFailureCount = 0;
//SpotTestLog.StartTestLog(test);
//Log.StartMethod("Initialize");
}
internal static void CleanUp(string test)
{
//Log.EndMethod("CleanUp");
//SpotTestLog.StartNode("Results");
//LogPassCount();
//LogFailCount();
//LogSkipCount();
//LogKnownFailureCount();
//SpotTestLog.EndNode("Results");
//SpotTestLog.EndTestLog(test);
//System.Threading.Thread.Sleep(2000);
}
internal static void TestResult(string message, TestResult result)
{
//if (result == Zelig.Test.TestResult.Pass)
//{
// SpotTestLog.StartResultNode("Pass");
// Pass( message );
//}
//else if (result == Zelig.Test.TestResult.Fail)
//{
// SpotTestLog.StartResultNode("Fail");
// Fail( message );
//}
//else if (result == Zelig.Test.TestResult.Skip)
//{
// SpotTestLog.StartResultNode("Skip");
// Skip( message );
//}
//else if (result == Zelig.Test.TestResult.KnownFailure)
//{
// SpotTestLog.StartResultNode("KnownFailure");
// KnownFailure( message );
//}
//SpotTestLog.EndNode("TestMethodResult");
}
internal static void Pass(string message)
{
m_passCount++;
WriteMessage(message);
}
internal static void Fail(string message)
{
m_failCount++;
WriteMessage(message);
}
internal static void Skip(string message)
{
m_skipCount++;
WriteMessage(message);
}
internal static void KnownFailure(string message)
{
m_knownFailureCount++;
WriteMessage(message);
}
internal static void WriteMessage(string message)
{
//SpotTestLog.WriteRaw("\t\t" + "<Text><![CDATA[" + message + "]]></Text>" +
// "<Date>" + GetDate() + "</Date>" +
// "<Time>" + DateTime.Now.TimeOfDay.ToString() + "</Time>");
}
internal static void StartTestMethod(string name)
{
//SpotTestLog.StartTestMethod(name);
}
internal static void EndTestMethod()
{
//SpotTestLog.EndTestMethod();
}
internal static void StartMethod(string name)
{
//SpotTestLog.StartNode(name);
}
internal static void EndMethod(string name)
{
//SpotTestLog.EndNode(name);
}
#endregion
#region Private
private static void LocalComment(string message, CommentType ct)
{
//SpotTestLog.WriteRaw("\t<Comment type=\"" + ct + "\">"
// + "<Text><![CDATA[" + message + "]]></Text>"
// + "<Date>" + GetDate() + "</Date>"
// + "<Time>" + DateTime.Now.TimeOfDay.ToString() + "</Time>"
// + "</Comment>");
}
private static string FilterUnsafeXml(string message)
{
// The filtering code is slow since it goes char by char and
// can lead to time outs on very long strings. Hence do not
// filter unsafe xml from strings longer than 200 chars.
string filtered = "";
// Iterate through each char and replace as we go
for (int i = 0; i < message.Length; i++)
{
switch (message[i])
{
case '&':
filtered += "&";
break;
case '>':
filtered += ">";
break;
case '<':
filtered += "<";
break;
case '"':
filtered += """;
break;
case '\'':
filtered += "'";
break;
default:
int val = (int)message[i];
if ((val > 127) || (val < 32))
{
filtered += "&#" + val + ";";
}
else
{
filtered += message[i];
}
break;
}
}
return filtered;
}
private static void LogPassCount()
{
//SpotTestLog.WriteRaw("\t<PassCount>" + "<Text>" + m_passCount.ToString() + "</Text>"
// + "<Date>" + GetDate() + "</Date>" + "<Time>" + DateTime.Now.TimeOfDay.ToString()
// + "</Time>" + "</PassCount>");
}
private static void LogFailCount()
{
//SpotTestLog.WriteRaw("\t<FailCount>" + "<Text>" + m_failCount.ToString() + "</Text>"
// + "<Date>" + GetDate() + "</Date>" + "<Time>" + DateTime.Now.TimeOfDay.ToString()
// + "</Time>" + "</FailCount>");
}
private static void LogSkipCount()
{
//SpotTestLog.WriteRaw("\t<SkipCount>" + "<Text>" + m_skipCount.ToString() + "</Text>"
// + "<Date>" + GetDate() + "</Date>" + "<Time>" + DateTime.Now.TimeOfDay.ToString()
// + "</Time>" + "</SkipCount>");
}
private static void LogKnownFailureCount()
{
//SpotTestLog.WriteRaw("\t<KnownFailureCount>" + "<Text>" + m_knownFailureCount.ToString() + "</Text>"
// + "<Date>" + GetDate() + "</Date>" + "<Time>" + DateTime.Now.TimeOfDay.ToString()
// + "</Time>" + "</KnownFailureCount>");
}
private static string GetDate()
{
return DateTime.Today.Month + "/" + DateTime.Today.Day + "/" + DateTime.Today.Year;
}
#endregion
}
internal abstract class SpotTestLog
{
#region Internal
internal static void Write(string node, string data)
{
//string value = "<" + RemoveInvalidCharacters(node) + ">" +
// RemoveInvalidCharacters(data) + "</" + RemoveInvalidCharacters(node) + ">";
//Log.WriteMessage(value);
}
internal static void StartTestLog(string test)
{
//string value = "<TestLog Test=\"" + test + "\">";
//Log.WriteMessage(value);
}
internal static void EndTestLog(string test)
{
//string value = "</TestLog>";
//Log.WriteMessage(value);
}
internal static void StartResultNode(string result)
{
string value = "\t<TestMethodResult Result=\"" + result + "\">";
Log.WriteMessage(value);
}
internal static void EndResultNode(string test)
{
//string value = "</TestMethodResult>";
//Log.WriteMessage(value);
//Log.WriteMessage(string.Empty);
}
internal static void StartTestMethod(string name)
{
//string value = "<TestMethod name=\"" + name + "\">";
//Log.WriteMessage(value);
}
internal static void EndTestMethod()
{
//string value = "</TestMethod>";
//Log.WriteMessage(value);
//Log.WriteMessage(string.Empty);
}
internal static void StartNode(string node)
{
//string value;
//if (string.Equals(node.ToLower(), "initialize") ||
// string.Equals(node.ToLower(), "cleanup") ||
// string.Equals(node.ToLower(), "results"))
//{
// if (string.Equals(node.ToLower(), "initialize") ||
// string.Equals(node.ToLower(), "results"))
// {
// Log.WriteMessage(string.Empty);
// }
// value = "<" + RemoveInvalidCharacters(node) + ">";
//}
//else
//{
// value = "\t<" + RemoveInvalidCharacters(node) + ">";
//}
//Log.WriteMessage(value);
}
internal static void EndNode(string node)
{
//string value;
//if (string.Equals(node.ToLower(), "initialize") ||
// string.Equals(node.ToLower(), "cleanup") ||
// string.Equals(node.ToLower(), "results"))
//{
// value = "</" + RemoveInvalidCharacters(node) + ">";
//}
//else
//{
// value = "\t</" + RemoveInvalidCharacters(node) + ">";
//}
//Log.WriteMessage(value);
//if (string.Equals(node.ToLower(), "initialize") ||
// string.Equals(node.ToLower(), "results"))
//{
// Log.WriteMessage(string.Empty);
//}
}
internal static void WriteDate(string date)
{
//string value = "<Date>" + date + "</Date>";
//Log.WriteMessage("\t\t" + value);
}
internal static void WriteTime(string time)
{
//string value = "<Time>" + time + "</Time>";
//Log.WriteMessage("\t\t" + value);
}
internal static void WriteText(string text)
{
//string value = "<Text><![CDATA[" + text + "]]></Text>";
//Log.WriteMessage("\t\t" + value);
}
internal static void WriteRaw(string text)
{
//Log.WriteMessage(text);
}
#endregion
#region Private
internal static string RemoveInvalidCharacters(string input)
{
// Remove chars that will cause problems loading the xml file ('<', '>' etc).
char[] invalidChars = { '<', '>', '&', '\'', '"' };
char[] array = input.ToCharArray();
foreach (char c in invalidChars)
{
for (int i = 0; i < array.Length; i++)
{
if (array[i] == c)
{
array[i] = ' ';
}
}
}
string returnVal = string.Empty;
for (int i = 0; i < array.Length; i++)
{
if (array[i] != ' ')
{
returnVal += array[i];
}
}
return returnVal;
}
private static string StripInValidXmlChars(string s)
{
string validXML = string.Empty;
char current;
char[] charArray = s.ToCharArray();
if (s.Length == 0) return string.Empty;
for (int i = 0; i < charArray.Length; i++)
{
current = charArray[i];
if ((current == 0x9) ||
(current == 0xA) ||
(current == 0xD) ||
((current >= 0x20) && (current <= 0xD7FF)) ||
((current >= 0xE000) && (current <= 0xFFFD)))
{
validXML += current;
}
}
return validXML;
}
#endregion
}
}
| |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.SimpleWorkflow.Model
{
/// <summary>
/// Container for the parameters to the ListOpenWorkflowExecutions operation.
/// <para> Returns a list of open workflow executions in the specified domain that meet the filtering criteria. The results may be split into
/// multiple pages. To retrieve subsequent pages, make the call again using the nextPageToken returned by the initial call. </para>
/// <para><b>NOTE:</b> This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and
/// changes. </para> <para> <b>Access Control</b> </para> <para>You can use IAM policies to control this action's access to Amazon SWF resources
/// as follows:</para>
/// <ul>
/// <li>Use a <c>Resource</c> element with the domain name to limit the action to only specified domains.</li>
/// <li>Use an <c>Action</c> element to allow or deny permission to call this action.</li>
/// <li>Constrain the following parameters by using a <c>Condition</c> element with the appropriate keys.
/// <ul>
/// <li> <c>tag</c> : String constraint. The key is <c>swf:tagFilter.tag</c> and you can specify a set of values.</li>
/// <li> <c>typeFilter.name</c> : String constraint. String constraint. The key is <c>swf:typeFilter.name</c> .</li>
/// <li> <c>typeFilter.version</c> : String constraint. String constraint. The key is <c>swf:typeFilter.version</c> .</li>
///
/// </ul>
/// </li>
///
/// </ul>
/// <para>If the caller does not have sufficient permissions to invoke the action, or the parameter values fall outside the specified
/// constraints, the action fails by throwing <c>OperationNotPermitted</c> . For details and example IAM policies, see Using IAM to Manage
/// Access to Amazon SWF Workflows.</para>
/// </summary>
/// <seealso cref="Amazon.SimpleWorkflow.AmazonSimpleWorkflow.ListOpenWorkflowExecutions"/>
public class ListOpenWorkflowExecutionsRequest : AmazonWebServiceRequest
{
private string domain;
private ExecutionTimeFilter startTimeFilter;
private WorkflowTypeFilter typeFilter;
private TagFilter tagFilter;
private string nextPageToken;
private int? maximumPageSize;
private bool? reverseOrder;
private WorkflowExecutionFilter executionFilter;
/// <summary>
/// The name of the domain that contains the workflow executions to list.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>1 - 256</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string Domain
{
get { return this.domain; }
set { this.domain = value; }
}
/// <summary>
/// Sets the Domain property
/// </summary>
/// <param name="domain">The value to set for the Domain property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithDomain(string domain)
{
this.domain = domain;
return this;
}
// Check to see if Domain property is set
internal bool IsSetDomain()
{
return this.domain != null;
}
/// <summary>
/// Workflow executions are included in the returned results based on whether their start times are within the range specified by this filter.
///
/// </summary>
public ExecutionTimeFilter StartTimeFilter
{
get { return this.startTimeFilter; }
set { this.startTimeFilter = value; }
}
/// <summary>
/// Sets the StartTimeFilter property
/// </summary>
/// <param name="startTimeFilter">The value to set for the StartTimeFilter property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithStartTimeFilter(ExecutionTimeFilter startTimeFilter)
{
this.startTimeFilter = startTimeFilter;
return this;
}
// Check to see if StartTimeFilter property is set
internal bool IsSetStartTimeFilter()
{
return this.startTimeFilter != null;
}
/// <summary>
/// If specified, only executions of the type specified in the filter are returned. <note><c>executionFilter</c>, <c>typeFilter</c> and
/// <c>tagFilter</c> are mutually exclusive. You can specify at most one of these in a request.</note>
///
/// </summary>
public WorkflowTypeFilter TypeFilter
{
get { return this.typeFilter; }
set { this.typeFilter = value; }
}
/// <summary>
/// Sets the TypeFilter property
/// </summary>
/// <param name="typeFilter">The value to set for the TypeFilter property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithTypeFilter(WorkflowTypeFilter typeFilter)
{
this.typeFilter = typeFilter;
return this;
}
// Check to see if TypeFilter property is set
internal bool IsSetTypeFilter()
{
return this.typeFilter != null;
}
/// <summary>
/// If specified, only executions that have the matching tag are listed. <note><c>executionFilter</c>, <c>typeFilter</c> and <c>tagFilter</c>
/// are mutually exclusive. You can specify at most one of these in a request.</note>
///
/// </summary>
public TagFilter TagFilter
{
get { return this.tagFilter; }
set { this.tagFilter = value; }
}
/// <summary>
/// Sets the TagFilter property
/// </summary>
/// <param name="tagFilter">The value to set for the TagFilter property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithTagFilter(TagFilter tagFilter)
{
this.tagFilter = tagFilter;
return this;
}
// Check to see if TagFilter property is set
internal bool IsSetTagFilter()
{
return this.tagFilter != null;
}
/// <summary>
/// If on a previous call to this method a <c>NextPageToken</c> was returned, the results are being paginated. To get the next page of results,
/// repeat the call with the returned token and all other arguments unchanged.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 2048</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string NextPageToken
{
get { return this.nextPageToken; }
set { this.nextPageToken = value; }
}
/// <summary>
/// Sets the NextPageToken property
/// </summary>
/// <param name="nextPageToken">The value to set for the NextPageToken property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithNextPageToken(string nextPageToken)
{
this.nextPageToken = nextPageToken;
return this;
}
// Check to see if NextPageToken property is set
internal bool IsSetNextPageToken()
{
return this.nextPageToken != null;
}
/// <summary>
/// The maximum number of results returned in each page. The default is 100, but the caller can override this value to a page size
/// <i>smaller</i> than the default. You cannot specify a page size greater than 100. Note that the number of executions may be less than the
/// maxiumum page size, in which case, the returned page will have fewer results than the maximumPageSize specified.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Range</term>
/// <description>0 - 1000</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public int MaximumPageSize
{
get { return this.maximumPageSize ?? default(int); }
set { this.maximumPageSize = value; }
}
/// <summary>
/// Sets the MaximumPageSize property
/// </summary>
/// <param name="maximumPageSize">The value to set for the MaximumPageSize property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithMaximumPageSize(int maximumPageSize)
{
this.maximumPageSize = maximumPageSize;
return this;
}
// Check to see if MaximumPageSize property is set
internal bool IsSetMaximumPageSize()
{
return this.maximumPageSize.HasValue;
}
/// <summary>
/// When set to <c>true</c>, returns the results in reverse order. By default the results are returned in descending order of the start time of
/// the executions.
///
/// </summary>
public bool ReverseOrder
{
get { return this.reverseOrder ?? default(bool); }
set { this.reverseOrder = value; }
}
/// <summary>
/// Sets the ReverseOrder property
/// </summary>
/// <param name="reverseOrder">The value to set for the ReverseOrder property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithReverseOrder(bool reverseOrder)
{
this.reverseOrder = reverseOrder;
return this;
}
// Check to see if ReverseOrder property is set
internal bool IsSetReverseOrder()
{
return this.reverseOrder.HasValue;
}
/// <summary>
/// If specified, only workflow executions matching the workflow id specified in the filter are returned. <note><c>executionFilter</c>,
/// <c>typeFilter</c> and <c>tagFilter</c> are mutually exclusive. You can specify at most one of these in a request.</note>
///
/// </summary>
public WorkflowExecutionFilter ExecutionFilter
{
get { return this.executionFilter; }
set { this.executionFilter = value; }
}
/// <summary>
/// Sets the ExecutionFilter property
/// </summary>
/// <param name="executionFilter">The value to set for the ExecutionFilter property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ListOpenWorkflowExecutionsRequest WithExecutionFilter(WorkflowExecutionFilter executionFilter)
{
this.executionFilter = executionFilter;
return this;
}
// Check to see if ExecutionFilter property is set
internal bool IsSetExecutionFilter()
{
return this.executionFilter != null;
}
}
}
| |
// ---------------------------------------------------------------------------
// <copyright file="SubscribeRequest.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ---------------------------------------------------------------------------
//-----------------------------------------------------------------------
// <summary>Defines the SubscribeRequest class.</summary>
//-----------------------------------------------------------------------
namespace Microsoft.Exchange.WebServices.Data
{
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Represents an abstract Subscribe request.
/// </summary>
/// <typeparam name="TSubscription">The type of the subscription.</typeparam>
internal abstract class SubscribeRequest<TSubscription> : MultiResponseServiceRequest<SubscribeResponse<TSubscription>>, IJsonSerializable
where TSubscription : SubscriptionBase
{
/// <summary>
/// Validate request.
/// </summary>
internal override void Validate()
{
base.Validate();
EwsUtilities.ValidateParam(this.FolderIds, "FolderIds");
EwsUtilities.ValidateParamCollection(this.EventTypes, "EventTypes");
this.FolderIds.Validate(this.Service.RequestedServerVersion);
// Check that caller isn't trying to subscribe to Status events.
if (this.EventTypes.Count<EventType>(eventType => (eventType == EventType.Status)) > 0)
{
throw new ServiceValidationException(Strings.CannotSubscribeToStatusEvents);
}
// If Watermark was specified, make sure it's not a blank string.
if (!string.IsNullOrEmpty(this.Watermark))
{
EwsUtilities.ValidateNonBlankStringParam(this.Watermark, "Watermark");
}
this.EventTypes.ForEach(eventType => EwsUtilities.ValidateEnumVersionValue(eventType, this.Service.RequestedServerVersion));
}
/// <summary>
/// Gets the name of the subscription XML element.
/// </summary>
/// <returns>XML element name,</returns>
internal abstract string GetSubscriptionXmlElementName();
/// <summary>
/// Gets the expected response message count.
/// </summary>
/// <returns>Number of expected response messages.</returns>
internal override int GetExpectedResponseMessageCount()
{
return 1;
}
/// <summary>
/// Gets the name of the XML element.
/// </summary>
/// <returns>XML element name,</returns>
internal override string GetXmlElementName()
{
return XmlElementNames.Subscribe;
}
/// <summary>
/// Gets the name of the response XML element.
/// </summary>
/// <returns>XML element name,</returns>
internal override string GetResponseXmlElementName()
{
return XmlElementNames.SubscribeResponse;
}
/// <summary>
/// Gets the name of the response message XML element.
/// </summary>
/// <returns>XML element name,</returns>
internal override string GetResponseMessageXmlElementName()
{
return XmlElementNames.SubscribeResponseMessage;
}
/// <summary>
/// Internal method to write XML elements.
/// </summary>
/// <param name="writer">The writer.</param>
internal abstract void InternalWriteElementsToXml(EwsServiceXmlWriter writer);
/// <summary>
/// Writes XML elements.
/// </summary>
/// <param name="writer">The writer.</param>
internal override void WriteElementsToXml(EwsServiceXmlWriter writer)
{
writer.WriteStartElement(XmlNamespace.Messages, this.GetSubscriptionXmlElementName());
if (this.FolderIds.Count == 0)
{
writer.WriteAttributeValue(
XmlAttributeNames.SubscribeToAllFolders,
true);
}
this.FolderIds.WriteToXml(
writer,
XmlNamespace.Types,
XmlElementNames.FolderIds);
writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.EventTypes);
foreach (EventType eventType in this.EventTypes)
{
writer.WriteElementValue(
XmlNamespace.Types,
XmlElementNames.EventType,
eventType);
}
writer.WriteEndElement();
if (!string.IsNullOrEmpty(this.Watermark))
{
writer.WriteElementValue(
XmlNamespace.Types,
XmlElementNames.Watermark,
this.Watermark);
}
this.InternalWriteElementsToXml(writer);
writer.WriteEndElement();
}
/// <summary>
/// Creates a JSON representation of this object.
/// </summary>
/// <param name="service">The service.</param>
/// <returns>
/// A Json value (either a JsonObject, an array of Json values, or a Json primitive)
/// </returns>
object IJsonSerializable.ToJson(ExchangeService service)
{
JsonObject jsonRequest = new JsonObject();
JsonObject jsonSubscribeRequest = new JsonObject();
jsonSubscribeRequest.AddTypeParameter(this.GetSubscriptionXmlElementName());
jsonSubscribeRequest.Add(XmlElementNames.EventTypes, this.EventTypes.ToArray());
if (this.FolderIds.Count > 0)
{
jsonSubscribeRequest.Add(XmlElementNames.FolderIds, this.FolderIds.InternalToJson(service));
}
else
{
jsonSubscribeRequest.Add(XmlAttributeNames.SubscribeToAllFolders, true);
}
if (!string.IsNullOrEmpty(this.Watermark))
{
jsonSubscribeRequest.Add(XmlElementNames.Watermark, this.Watermark);
}
this.AddJsonProperties(jsonSubscribeRequest, service);
jsonRequest.Add(XmlElementNames.SubscriptionRequest, jsonSubscribeRequest);
return jsonRequest;
}
/// <summary>
/// Adds the json properties.
/// </summary>
/// <param name="jsonSubscribeRequest">The json subscribe request.</param>
/// <param name="service">The service.</param>
internal abstract void AddJsonProperties(JsonObject jsonSubscribeRequest, ExchangeService service);
/// <summary>
/// Initializes a new instance of the <see cref="SubscribeRequest<TSubscription>"/> class.
/// </summary>
/// <param name="service">The service.</param>
internal SubscribeRequest(ExchangeService service)
: base(service, ServiceErrorHandling.ThrowOnError)
{
this.FolderIds = new FolderIdWrapperList();
this.EventTypes = new List<EventType>();
}
/// <summary>
/// Gets the folder ids.
/// </summary>
public FolderIdWrapperList FolderIds
{
get; private set;
}
/// <summary>
/// Gets the event types.
/// </summary>
public List<EventType> EventTypes
{
get; private set;
}
/// <summary>
/// Gets or sets the watermark.
/// </summary>
public string Watermark
{
get; set;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class BinaryEqualTests
{
public static IEnumerable<object[]> TestData()
{
foreach (bool useInterpreter in new bool[] { true, false })
{
yield return new object[] { new bool[] { true, false }, useInterpreter };
yield return new object[] { new byte[] { 0, 1, byte.MaxValue }, useInterpreter };
yield return new object[] { new char[] { '\0', '\b', 'A', '\uffff' }, useInterpreter };
yield return new object[] { new decimal[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue }, useInterpreter };
yield return new object[] { new double[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN }, useInterpreter };
yield return new object[] { new float[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN }, useInterpreter };
yield return new object[] { new int[] { 0, 1, -1, int.MinValue, int.MaxValue }, useInterpreter };
yield return new object[] { new long[] { 0, 1, -1, long.MinValue, long.MaxValue }, useInterpreter };
yield return new object[] { new sbyte[] { 0, 1, -1, sbyte.MinValue, sbyte.MaxValue }, useInterpreter };
yield return new object[] { new short[] { 0, 1, -1, short.MinValue, short.MaxValue }, useInterpreter };
yield return new object[] { new uint[] { 0, 1, uint.MaxValue }, useInterpreter };
yield return new object[] { new ulong[] { 0, 1, ulong.MaxValue }, useInterpreter };
yield return new object[] { new ushort[] { 0, 1, ushort.MaxValue }, useInterpreter };
yield return new object[] { new TestClass[] { new TestClass(), new TestClass() }, useInterpreter };
yield return new object[] { new TestEnum[] { new TestEnum(), new TestEnum() }, useInterpreter };
yield return new object[] { new E[] {E.A, E.B, (E)int.MinValue}, useInterpreter };
yield return new object[] { new bool?[] { null, true, false }, useInterpreter };
yield return new object[] { new byte?[] { null, 0, 1, byte.MaxValue }, useInterpreter };
yield return new object[] { new char?[] { null, '\0', '\b', 'A', '\uffff' }, useInterpreter };
yield return new object[] { new decimal?[] { null, decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue }, useInterpreter };
yield return new object[] { new double?[] { null, 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN }, useInterpreter };
yield return new object[] { new float?[] { null, 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN }, useInterpreter };
yield return new object[] { new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue }, useInterpreter };
yield return new object[] { new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue }, useInterpreter };
yield return new object[] { new sbyte?[] { null, 0, 1, -1, sbyte.MinValue, sbyte.MaxValue }, useInterpreter };
yield return new object[] { new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue }, useInterpreter };
yield return new object[] { new uint?[] { null, 0, 1, uint.MaxValue }, useInterpreter };
yield return new object[] { new ulong?[] { null, 0, 1, ulong.MaxValue }, useInterpreter };
yield return new object[] { new ushort?[] { null, 0, 1, ushort.MaxValue }, useInterpreter };
yield return new object[] { new E?[] {null, E.A, E.B, (E)int.MaxValue, (E)int.MinValue}, useInterpreter };
}
}
[Theory]
[MemberData(nameof(TestData))]
public static void Equal(Array array, bool useInterpreter)
{
Type type = array.GetType().GetElementType();
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
object a = array.GetValue(i);
object b = array.GetValue(j);
BinaryExpression equal = Expression.Equal(Expression.Constant(a, type), Expression.Constant(b, type));
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, GeneralBinaryTests.CustomEquals(a, b));
}
}
}
[Theory]
[MemberData(nameof(TestData))]
public static void NotEqual(Array array, bool useInterpreter)
{
Type type = array.GetType().GetElementType();
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
object a = array.GetValue(i);
object b = array.GetValue(j);
BinaryExpression equal = Expression.NotEqual(Expression.Constant(a, type), Expression.Constant(b, type));
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, !GeneralBinaryTests.CustomEquals(a, b));
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void Equal_Constant_DefaultString(bool useInterpreter)
{
var array = new Expression[] { Expression.Constant("bar", typeof(string)), Expression.Constant(null, typeof(string)), Expression.Default(typeof(string)) };
var isNull = new bool[] { false, true, true };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
BinaryExpression equal = Expression.Equal(array[i], array[j]);
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, isNull[i] == isNull[j]);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void Equal_Constant_DefaultNullable(bool useInterpreter)
{
var array = new Expression[] { Expression.Constant(42, typeof(int?)), Expression.Constant(null, typeof(int?)), Expression.Default(typeof(int?)) };
var isNull = new bool[] { false, true, true };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
BinaryExpression equal = Expression.Equal(array[i], array[j]);
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, isNull[i] == isNull[j]);
}
}
}
[Fact]
public static void Equal_CannotReduce()
{
Expression exp = Expression.Equal(Expression.Constant(0), Expression.Constant(0));
Assert.False(exp.CanReduce);
Assert.Same(exp, exp.Reduce());
Assert.Throws<ArgumentException>(null, () => exp.ReduceAndCheck());
}
[Fact]
public static void NotEqual_CannotReduce()
{
Expression exp = Expression.NotEqual(Expression.Constant(0), Expression.Constant(0));
Assert.False(exp.CanReduce);
Assert.Same(exp, exp.Reduce());
Assert.Throws<ArgumentException>(null, () => exp.ReduceAndCheck());
}
[Fact]
public static void ThrowsOnLeftNull()
{
Assert.Throws<ArgumentNullException>("left", () => Expression.Equal(null, Expression.Constant("")));
Assert.Throws<ArgumentNullException>("left", () => Expression.NotEqual(null, Expression.Constant("")));
}
[Fact]
public static void ThrowsOnRightNull()
{
Assert.Throws<ArgumentNullException>("right", () => Expression.Equal(Expression.Constant(""), null));
Assert.Throws<ArgumentNullException>("right", () => Expression.NotEqual(Expression.Constant(""), null));
}
private static class Unreadable<T>
{
public static T WriteOnly
{
set { }
}
}
[Fact]
public static void ThrowsOnLeftUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
Assert.Throws<ArgumentException>("left", () => Expression.Equal(value, Expression.Constant(1)));
Assert.Throws<ArgumentException>("left", () => Expression.NotEqual(value, Expression.Constant(1)));
}
[Fact]
public static void ThrowsOnRightUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
Assert.Throws<ArgumentException>("right", () => Expression.Equal(Expression.Constant(1), value));
Assert.Throws<ArgumentException>("right", () => Expression.NotEqual(Expression.Constant(1), value));
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void Update_ReferenceEquals(bool useInterpreter)
{
TestClass testClass1 = new TestClass();
TestClass testClass2 = new TestClass();
BinaryExpression equal = Expression.Equal(Expression.Constant(testClass1), Expression.Constant(testClass2));
BinaryExpression newEqual = equal.Update(Expression.Constant(testClass1), equal.Conversion, Expression.Constant(testClass1));
// Original BinaryExpression should be unchanged
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, false);
GeneralBinaryTests.CompileBinaryExpression(newEqual, useInterpreter, true);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void Update_ReferenceNotEquals(bool useInterpreter)
{
TestClass testClass1 = new TestClass();
TestClass testClass2 = new TestClass();
BinaryExpression equal = Expression.NotEqual(Expression.Constant(testClass1), Expression.Constant(testClass2));
BinaryExpression newEqual = equal.Update(Expression.Constant(testClass1), equal.Conversion, Expression.Constant(testClass1));
// Original BinaryExpression should be unchanged
GeneralBinaryTests.CompileBinaryExpression(equal, useInterpreter, true);
GeneralBinaryTests.CompileBinaryExpression(newEqual, useInterpreter, false);
}
[Fact]
public static void Equal_ToString()
{
BinaryExpression e = Expression.Equal(Expression.Parameter(typeof(int), "a"), Expression.Parameter(typeof(int), "b"));
Assert.Equal("(a == b)", e.ToString());
}
[Fact]
public static void NotEqual_ToString()
{
BinaryExpression e = Expression.NotEqual(Expression.Parameter(typeof(int), "a"), Expression.Parameter(typeof(int), "b"));
Assert.Equal("(a != b)", e.ToString());
}
[Fact]
public static void CannotPreformEqualityOnValueTypesWithoutOperators()
{
var uvConst = Expression.Constant(new UselessValue());
Assert.Throws<InvalidOperationException>(() => Expression.Equal(uvConst, uvConst));
Assert.Throws<InvalidOperationException>(() => Expression.NotEqual(uvConst, uvConst));
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CanPerformEqualityOnNullableWithoutOperatorsToConstantNull(bool useInterpreter)
{
var nullConst = Expression.Constant(null, typeof(UselessValue?));
var uvConst = Expression.Constant(new UselessValue(), typeof(UselessValue?));
var exp = Expression.Lambda<Func<bool>>(Expression.Equal(nullConst, uvConst));
var func = exp.Compile(useInterpreter);
Assert.False(func());
exp = Expression.Lambda<Func<bool>>(Expression.Equal(uvConst, nullConst));
func = exp.Compile(useInterpreter);
Assert.False(func());
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CanPerformInequalityOnNullableWithoutOperatorsToConstantNull(bool useInterpreter)
{
var nullConst = Expression.Constant(null, typeof(UselessValue?));
var uvConst = Expression.Constant(new UselessValue(), typeof(UselessValue?));
var exp = Expression.Lambda<Func<bool>>(Expression.NotEqual(nullConst, uvConst));
var func = exp.Compile(useInterpreter);
Assert.True(func());
exp = Expression.Lambda<Func<bool>>(Expression.NotEqual(uvConst, nullConst));
func = exp.Compile(useInterpreter);
Assert.True(func());
}
[Fact]
public static void CannotDoNullComparisonWithoutOperatorIfBothNullConstants()
{
var typedNullConst = Expression.Constant(null, typeof(UselessValue?));
Assert.Throws<InvalidOperationException>(() => Expression.Equal(typedNullConst, typedNullConst));
}
// DBNull having a different type code to other objects could result in bugs surrounding it if
// that type code got incorrectly used.
[Theory, ClassData(typeof(CompilationTypes))]
public static void CanCompareDBNullEqual(bool useInterpreter)
{
var x = Expression.Parameter(typeof(DBNull));
var y = Expression.Parameter(typeof(DBNull));
var lambda = Expression.Lambda<Func<DBNull, DBNull, bool>>(Expression.Equal(x, y), x, y);
var func = lambda.Compile(useInterpreter);
foreach(var xVal in new[] { DBNull.Value, null})
foreach(var yVal in new[] { DBNull.Value, null})
Assert.Equal(xVal == yVal, func(xVal, yVal));
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CanCompareDBNullNotEqual(bool useInterpreter)
{
var x = Expression.Parameter(typeof(DBNull));
var y = Expression.Parameter(typeof(DBNull));
var lambda = Expression.Lambda<Func<DBNull, DBNull, bool>>(Expression.NotEqual(x, y), x, y);
var func = lambda.Compile(useInterpreter);
foreach(var xVal in new[] { DBNull.Value, null})
foreach(var yVal in new[] { DBNull.Value, null})
Assert.Equal(xVal != yVal, func(xVal, yVal));
}
private struct UselessValue
{
}
public class TestClass { }
public enum TestEnum { }
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using DSharpPlus.Net;
namespace DSharpPlus.Entities
{
/// <summary>
/// Constructs embeds.
/// </summary>
public sealed class DiscordEmbedBuilder
{
/// <summary>
/// Gets or sets the embed's title.
/// </summary>
public string Title
{
get => this._title;
set
{
if (value != null && value.Length > 256)
throw new ArgumentException("Title length cannot exceed 256 characters.", nameof(value));
this._title = value;
}
}
private string _title;
/// <summary>
/// Gets or sets the embed's description.
/// </summary>
public string Description
{
get => this._description;
set
{
if (value != null && value.Length > 2048)
throw new ArgumentException("Description length cannot exceed 2048 characters.", nameof(value));
this._description = value;
}
}
private string _description;
/// <summary>
/// Gets or sets the url for the embed's title.
/// </summary>
public string Url
{
get => this._url?.ToString();
set => this._url = string.IsNullOrEmpty(value) ? null : new Uri(value);
}
private Uri _url;
/// <summary>
/// Gets or sets the embed's color.
/// </summary>
public Optional<DiscordColor> Color { get; set; }
/// <summary>
/// Gets or sets the embed's timestamp.
/// </summary>
public DateTimeOffset? Timestamp { get; set; }
/// <summary>
/// Gets or sets the embed's image url.
/// </summary>
public string ImageUrl
{
get => this._imageUri?.ToString();
set => this._imageUri = string.IsNullOrEmpty(value) ? null : new DiscordUri(value);
}
private DiscordUri _imageUri;
/// <summary>
/// Gets or sets the embed's author.
/// </summary>
public EmbedAuthor Author { get; set; }
/// <summary>
/// Gets or sets the embed's footer.
/// </summary>
public EmbedFooter Footer { get; set; }
/// <summary>
/// Gets or sets the embed's thumbnail.
/// </summary>
public EmbedThumbnail Thumbnail { get; set; }
/// <summary>
/// Gets the embed's fields.
/// </summary>
public IReadOnlyList<DiscordEmbedField> Fields { get; }
private readonly List<DiscordEmbedField> _fields = new List<DiscordEmbedField>();
/// <summary>
/// Constructs a new empty embed builder.
/// </summary>
public DiscordEmbedBuilder()
{
this.Fields = new ReadOnlyCollection<DiscordEmbedField>(this._fields);
}
/// <summary>
/// Constructs a new embed builder using another embed as prototype.
/// </summary>
/// <param name="original">Embed to use as prototype.</param>
public DiscordEmbedBuilder(DiscordEmbed original)
: this()
{
this.Title = original.Title;
this.Description = original.Description;
this.Url = original.Url?.ToString();
this.Color = original.Color;
this.Timestamp = original.Timestamp;
if (original.Thumbnail != null)
this.Thumbnail = new EmbedThumbnail
{
Url = original.Thumbnail.Url?.ToString(),
Height = original.Thumbnail.Height,
Width = original.Thumbnail.Width
};
if (original.Author != null)
this.Author = new EmbedAuthor
{
IconUrl = original.Author.IconUrl?.ToString(),
Name = original.Author.Name,
Url = original.Author.Url?.ToString()
};
if (original.Footer != null)
this.Footer = new EmbedFooter
{
IconUrl = original.Footer.IconUrl?.ToString(),
Text = original.Footer.Text
};
if (original.Fields?.Any() == true)
this._fields.AddRange(original.Fields);
while (this._fields.Count > 25)
this._fields.RemoveAt(this._fields.Count - 1);
}
/// <summary>
/// Sets the embed's title.
/// </summary>
/// <param name="title">Title to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithTitle(string title)
{
this.Title = title;
return this;
}
/// <summary>
/// Sets the embed's description.
/// </summary>
/// <param name="description">Description to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithDescription(string description)
{
this.Description = description;
return this;
}
/// <summary>
/// Sets the embed's title url.
/// </summary>
/// <param name="url">Title url to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithUrl(string url)
{
this.Url = url;
return this;
}
/// <summary>
/// Sets the embed's title url.
/// </summary>
/// <param name="url">Title url to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithUrl(Uri url)
{
this._url = url;
return this;
}
/// <summary>
/// Sets the embed's color.
/// </summary>
/// <param name="color">Embed color to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithColor(DiscordColor color)
{
this.Color = color;
return this;
}
/// <summary>
/// Sets the embed's timestamp.
/// </summary>
/// <param name="timestamp">Timestamp to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithTimestamp(DateTimeOffset? timestamp)
{
this.Timestamp = timestamp;
return this;
}
/// <summary>
/// Sets the embed's timestamp.
/// </summary>
/// <param name="timestamp">Timestamp to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithTimestamp(DateTime? timestamp)
{
if (timestamp == null)
this.Timestamp = null;
else
this.Timestamp = new DateTimeOffset(timestamp.Value);
return this;
}
/// <summary>
/// Sets the embed's timestamp based on a snowflake.
/// </summary>
/// <param name="snowflake">Snowflake to calculate timestamp from.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithTimestamp(ulong snowflake)
{
this.Timestamp = new DateTimeOffset(2015, 1, 1, 0, 0, 0, TimeSpan.Zero).AddMilliseconds(snowflake >> 22);
return this;
}
/// <summary>
/// Sets the embed's image url.
/// </summary>
/// <param name="url">Image url to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithImageUrl(string url)
{
this.ImageUrl = url;
return this;
}
/// <summary>
/// Sets the embed's image url.
/// </summary>
/// <param name="url">Image url to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithImageUrl(Uri url)
{
this._imageUri = new DiscordUri(url);
return this;
}
/// <summary>
/// Sets the embed's thumbnail.
/// </summary>
/// <param name="url">Thumbnail url to set.</param>
/// <param name="height">The height of the thumbnail to set.</param>
/// <param name="width">The width of the thumbnail to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithThumbnail(string url, int height = 0, int width = 0)
{
this.Thumbnail = new EmbedThumbnail
{
Url = url,
Height = height,
Width = width
};
return this;
}
/// <summary>
/// Sets the embed's thumbnail.
/// </summary>
/// <param name="url">Thumbnail url to set.</param>
/// <param name="height">The height of the thumbnail to set.</param>
/// <param name="width">The width of the thumbnail to set.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithThumbnail(Uri url, int height = 0, int width = 0)
{
this.Thumbnail = new EmbedThumbnail
{
_uri = new DiscordUri(url),
Height = height,
Width = width
};
return this;
}
/// <summary>
/// Sets the embed's author.
/// </summary>
/// <param name="name">Author's name.</param>
/// <param name="url">Author's url.</param>
/// <param name="iconUrl">Author icon's url.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithAuthor(string name = null, string url = null, string iconUrl = null)
{
if (string.IsNullOrEmpty(name) && string.IsNullOrEmpty(url) && string.IsNullOrEmpty(iconUrl))
this.Author = null;
else
this.Author = new EmbedAuthor
{
Name = name,
Url = url,
IconUrl = iconUrl
};
return this;
}
/// <summary>
/// Sets the embed's footer.
/// </summary>
/// <param name="text">Footer's text.</param>
/// <param name="iconUrl">Footer icon's url.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder WithFooter(string text = null, string iconUrl = null)
{
if (text != null && text.Length > 2048)
throw new ArgumentException("Footer text length cannot exceed 2048 characters.", nameof(text));
if (string.IsNullOrEmpty(text) && string.IsNullOrEmpty(iconUrl))
this.Footer = null;
else
this.Footer = new EmbedFooter
{
Text = text,
IconUrl = iconUrl
};
return this;
}
/// <summary>
/// Adds a field to this embed.
/// </summary>
/// <param name="name">Name of the field to add.</param>
/// <param name="value">Value of the field to add.</param>
/// <param name="inline">Whether the field is to be inline or not.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder AddField(string name, string value, bool inline = false)
{
if (string.IsNullOrWhiteSpace(name))
{
if (name == null)
throw new ArgumentNullException(nameof(name));
throw new ArgumentException("Name cannot be empty or whitespace.", nameof(name));
}
if (string.IsNullOrWhiteSpace(value))
{
if (value == null)
throw new ArgumentNullException(nameof(value));
throw new ArgumentException("Value cannot be empty or whitespace.", nameof(value));
}
if (name.Length > 256)
throw new ArgumentException("Embed field name length cannot exceed 256 characters.");
if (value.Length > 1024)
throw new ArgumentException("Embed field value length cannot exceed 1024 characters.");
if (this._fields.Count >= 25)
throw new InvalidOperationException("Cannot add more than 25 fields.");
this._fields.Add(new DiscordEmbedField
{
Inline = inline,
Name = name,
Value = value
});
return this;
}
/// <summary>
/// Removes a field of the specified index from this embed.
/// </summary>
/// <param name="index">Index of the field to remove.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder RemoveFieldAt(int index)
{
this._fields.RemoveAt(index);
return this;
}
/// <summary>
/// Removes fields of the specified range from this embed.
/// </summary>
/// <param name="index">Index of the first field to remove.</param>
/// <param name="count">Number of fields to remove.</param>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder RemoveFieldRange(int index, int count)
{
this._fields.RemoveRange(index, count);
return this;
}
/// <summary>
/// Removes all fields from this embed.
/// </summary>
/// <returns>This embed builder.</returns>
public DiscordEmbedBuilder ClearFields()
{
this._fields.Clear();
return this;
}
/// <summary>
/// Constructs a new embed from data supplied to this builder.
/// </summary>
/// <returns>New discord embed.</returns>
public DiscordEmbed Build()
{
var embed = new DiscordEmbed
{
Title = this._title,
Description = this._description,
Url = this._url,
_color = this.Color.IfPresent(e => e.Value),
Timestamp = this.Timestamp
};
if (this.Footer != null)
embed.Footer = new DiscordEmbedFooter
{
Text = this.Footer.Text,
IconUrl = this.Footer._iconUri
};
if (this.Author != null)
embed.Author = new DiscordEmbedAuthor
{
Name = this.Author.Name,
Url = this.Author._uri,
IconUrl = this.Author._iconUri
};
if (this._imageUri != null)
embed.Image = new DiscordEmbedImage { Url = this._imageUri };
if (this.Thumbnail != null)
embed.Thumbnail = new DiscordEmbedThumbnail
{
Url = this.Thumbnail._uri,
Height = this.Thumbnail.Height,
Width = this.Thumbnail.Width
};
embed.Fields = new ReadOnlyCollection<DiscordEmbedField>(new List<DiscordEmbedField>(this._fields)); // copy the list, don't wrap it, prevents mutation
return embed;
}
/// <summary>
/// Implicitly converts this builder to an embed.
/// </summary>
/// <param name="builder">Builder to convert.</param>
public static implicit operator DiscordEmbed(DiscordEmbedBuilder builder)
=> builder?.Build();
/// <summary>
/// Represents an embed author.
/// </summary>
public class EmbedAuthor
{
/// <summary>
/// Gets or sets the name of the author.
/// </summary>
public string Name
{
get => this._name;
set
{
if (value != null && value.Length > 256)
throw new ArgumentException("Author name length cannot exceed 256 characters.", nameof(value));
this._name = value;
}
}
private string _name;
/// <summary>
/// Gets or sets the Url to which the author's link leads.
/// </summary>
public string Url
{
get => this._uri?.ToString();
set => this._uri = string.IsNullOrEmpty(value) ? null : new Uri(value);
}
internal Uri _uri;
/// <summary>
/// Gets or sets the Author's icon url.
/// </summary>
public string IconUrl
{
get => this._iconUri?.ToString();
set => this._iconUri = string.IsNullOrEmpty(value) ? null : new DiscordUri(value);
}
internal DiscordUri _iconUri;
}
/// <summary>
/// Represents an embed footer.
/// </summary>
public class EmbedFooter
{
/// <summary>
/// Gets or sets the text of the footer.
/// </summary>
public string Text
{
get => this._text;
set
{
if (value != null && value.Length > 2048)
throw new ArgumentException("Footer text length cannot exceed 2048 characters.", nameof(value));
this._text = value;
}
}
private string _text;
/// <summary>
/// Gets or sets the Url
/// </summary>
public string IconUrl
{
get => this._iconUri?.ToString();
set => this._iconUri = string.IsNullOrEmpty(value) ? null : new DiscordUri(value);
}
internal DiscordUri _iconUri;
}
/// <summary>
/// Represents an embed thumbnail.
/// </summary>
public class EmbedThumbnail
{
/// <summary>
/// Gets or sets the thumbnail's image url.
/// </summary>
public string Url
{
get => this._uri?.ToString();
set => this._uri = string.IsNullOrEmpty(value) ? null : new DiscordUri(value);
}
internal DiscordUri _uri;
/// <summary>
/// Gets or sets the thumbnail's height.
/// </summary>
public int Height
{
get => this._height;
set => this._height = value >= 0 ? value : 0;
}
private int _height;
/// <summary>
/// Gets or sets the thumbnail's width.
/// </summary>
public int Width
{
get => this._width;
set => this._width = value >= 0 ? value : 0;
}
private int _width;
}
}
}
| |
namespace EIDSS.Reports.Parameterized.Uni.EventLog
{
partial class EventLogReport
{
#region Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(EventLogReport));
this.tableHeader = new DevExpress.XtraReports.UI.XRTable();
this.rowHeader1 = new DevExpress.XtraReports.UI.XRTableRow();
this.cellDateTime = new DevExpress.XtraReports.UI.XRTableCell();
this.cellType = new DevExpress.XtraReports.UI.XRTableCell();
this.cellPerson = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableRow3 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell3 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell4 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell5 = new DevExpress.XtraReports.UI.XRTableCell();
this.DetailReport = new DevExpress.XtraReports.UI.DetailReportBand();
this.Detail1 = new DevExpress.XtraReports.UI.DetailBand();
this.tableData = new DevExpress.XtraReports.UI.XRTable();
this.RowData = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell1 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell2 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell6 = new DevExpress.XtraReports.UI.XRTableCell();
this.sp_rep_UNI_EventLogTableAdapter1 = new EIDSS.Reports.Parameterized.Uni.EventLog.EventLogDataSetTableAdapters.sp_rep_UNI_EventLogTableAdapter();
this.eventLogDataSet1 = new EIDSS.Reports.Parameterized.Uni.EventLog.EventLogDataSet();
((System.ComponentModel.ISupportInitialize)(this.tableInterval)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.m_BaseDataSet)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.tableBaseHeader)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.tableHeader)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.tableData)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.eventLogDataSet1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this)).BeginInit();
//
// tableInterval
//
resources.ApplyResources(this.tableInterval, "tableInterval");
this.tableInterval.StylePriority.UseBorders = false;
this.tableInterval.StylePriority.UseFont = false;
this.tableInterval.StylePriority.UsePadding = false;
//
// cellLanguage
//
this.cellLanguage.StylePriority.UseTextAlignment = false;
//
// lblReportName
//
resources.ApplyResources(this.lblReportName, "lblReportName");
this.lblReportName.StylePriority.UseBorders = false;
this.lblReportName.StylePriority.UseBorderWidth = false;
this.lblReportName.StylePriority.UseFont = false;
this.lblReportName.StylePriority.UseTextAlignment = false;
//
// Detail
//
resources.ApplyResources(this.Detail, "Detail");
this.Detail.StylePriority.UseFont = false;
this.Detail.StylePriority.UsePadding = false;
//
// PageHeader
//
this.PageHeader.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.tableHeader});
this.PageHeader.StylePriority.UseFont = false;
this.PageHeader.StylePriority.UsePadding = false;
//
// PageFooter
//
this.PageFooter.StylePriority.UseBorders = false;
//
// ReportHeader
//
resources.ApplyResources(this.ReportHeader, "ReportHeader");
//
// xrPageInfo1
//
resources.ApplyResources(this.xrPageInfo1, "xrPageInfo1");
this.xrPageInfo1.StylePriority.UseBorders = false;
//
// cellReportHeader
//
this.cellReportHeader.StylePriority.UseBorders = false;
this.cellReportHeader.StylePriority.UseFont = false;
this.cellReportHeader.StylePriority.UseTextAlignment = false;
this.cellReportHeader.Weight = 2.2994485515985108D;
//
// cellBaseSite
//
this.cellBaseSite.StylePriority.UseBorders = false;
this.cellBaseSite.StylePriority.UseFont = false;
this.cellBaseSite.StylePriority.UseTextAlignment = false;
this.cellBaseSite.Weight = 2.2994485515985108D;
//
// cellBaseCountry
//
this.cellBaseCountry.StylePriority.UseFont = false;
this.cellBaseCountry.Weight = 1.6891816919536407D;
//
// cellBaseLeftHeader
//
this.cellBaseLeftHeader.Weight = 1.6891816919536407D;
//
// tableBaseHeader
//
resources.ApplyResources(this.tableBaseHeader, "tableBaseHeader");
this.tableBaseHeader.StylePriority.UseBorders = false;
this.tableBaseHeader.StylePriority.UseBorderWidth = false;
this.tableBaseHeader.StylePriority.UseFont = false;
this.tableBaseHeader.StylePriority.UsePadding = false;
this.tableBaseHeader.StylePriority.UseTextAlignment = false;
//
// tableHeader
//
this.tableHeader.Borders = ((DevExpress.XtraPrinting.BorderSide)((((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Top)
| DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
resources.ApplyResources(this.tableHeader, "tableHeader");
this.tableHeader.Name = "tableHeader";
this.tableHeader.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.rowHeader1,
this.xrTableRow3});
this.tableHeader.StylePriority.UseBorders = false;
this.tableHeader.StylePriority.UseFont = false;
this.tableHeader.StylePriority.UseTextAlignment = false;
//
// rowHeader1
//
this.rowHeader1.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.cellDateTime,
this.cellType,
this.cellPerson});
resources.ApplyResources(this.rowHeader1, "rowHeader1");
this.rowHeader1.Name = "rowHeader1";
this.rowHeader1.Padding = new DevExpress.XtraPrinting.PaddingInfo(2, 2, 0, 0, 100F);
this.rowHeader1.StylePriority.UseFont = false;
this.rowHeader1.StylePriority.UsePadding = false;
this.rowHeader1.StylePriority.UseTextAlignment = false;
this.rowHeader1.Weight = 0.4329004329004329D;
//
// cellDateTime
//
resources.ApplyResources(this.cellDateTime, "cellDateTime");
this.cellDateTime.Name = "cellDateTime";
this.cellDateTime.StylePriority.UseFont = false;
this.cellDateTime.Weight = 0.68740112079440219D;
//
// cellType
//
resources.ApplyResources(this.cellType, "cellType");
this.cellType.Name = "cellType";
this.cellType.StylePriority.UseFont = false;
this.cellType.StylePriority.UseTextAlignment = false;
this.cellType.Weight = 0.68740112079440219D;
//
// cellPerson
//
resources.ApplyResources(this.cellPerson, "cellPerson");
this.cellPerson.Name = "cellPerson";
this.cellPerson.StylePriority.UseFont = false;
this.cellPerson.StylePriority.UseTextAlignment = false;
this.cellPerson.Weight = 0.69030245026717763D;
//
// xrTableRow3
//
this.xrTableRow3.Borders = ((DevExpress.XtraPrinting.BorderSide)((((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Top)
| DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
this.xrTableRow3.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell3,
this.xrTableCell4,
this.xrTableCell5});
this.xrTableRow3.Name = "xrTableRow3";
this.xrTableRow3.StylePriority.UseBorders = false;
this.xrTableRow3.Weight = 0.28860028860028863D;
//
// xrTableCell3
//
resources.ApplyResources(this.xrTableCell3, "xrTableCell3");
this.xrTableCell3.Name = "xrTableCell3";
this.xrTableCell3.StylePriority.UseFont = false;
this.xrTableCell3.Weight = 0.68740112079440219D;
//
// xrTableCell4
//
resources.ApplyResources(this.xrTableCell4, "xrTableCell4");
this.xrTableCell4.Name = "xrTableCell4";
this.xrTableCell4.StylePriority.UseFont = false;
this.xrTableCell4.Weight = 0.68740112079440219D;
//
// xrTableCell5
//
resources.ApplyResources(this.xrTableCell5, "xrTableCell5");
this.xrTableCell5.Name = "xrTableCell5";
this.xrTableCell5.StylePriority.UseFont = false;
this.xrTableCell5.Weight = 0.69030245026717763D;
//
// DetailReport
//
this.DetailReport.Bands.AddRange(new DevExpress.XtraReports.UI.Band[] {
this.Detail1});
this.DetailReport.DataAdapter = this.sp_rep_UNI_EventLogTableAdapter1;
this.DetailReport.DataMember = "spRepUniEventLog";
this.DetailReport.DataSource = this.eventLogDataSet1;
this.DetailReport.Level = 0;
this.DetailReport.Name = "DetailReport";
//
// Detail1
//
this.Detail1.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.tableData});
resources.ApplyResources(this.Detail1, "Detail1");
this.Detail1.Name = "Detail1";
//
// tableData
//
this.tableData.Borders = ((DevExpress.XtraPrinting.BorderSide)(((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
resources.ApplyResources(this.tableData, "tableData");
this.tableData.Name = "tableData";
this.tableData.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.RowData});
this.tableData.StylePriority.UseBorders = false;
this.tableData.StylePriority.UseFont = false;
this.tableData.StylePriority.UseTextAlignment = false;
//
// RowData
//
this.RowData.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell1,
this.xrTableCell2,
this.xrTableCell6});
resources.ApplyResources(this.RowData, "RowData");
this.RowData.Name = "RowData";
this.RowData.Padding = new DevExpress.XtraPrinting.PaddingInfo(2, 2, 0, 0, 100F);
this.RowData.StylePriority.UseFont = false;
this.RowData.StylePriority.UsePadding = false;
this.RowData.StylePriority.UseTextAlignment = false;
this.RowData.Weight = 0.4329004329004329D;
//
// xrTableCell1
//
this.xrTableCell1.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "spRepUniEventLog.datEventDatatime", "{0:dd/MM/yyyy}")});
resources.ApplyResources(this.xrTableCell1, "xrTableCell1");
this.xrTableCell1.Name = "xrTableCell1";
this.xrTableCell1.StylePriority.UseFont = false;
this.xrTableCell1.Weight = 0.68740112079440219D;
//
// xrTableCell2
//
this.xrTableCell2.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "spRepUniEventLog.strEventTypeName")});
resources.ApplyResources(this.xrTableCell2, "xrTableCell2");
this.xrTableCell2.Name = "xrTableCell2";
this.xrTableCell2.StylePriority.UseFont = false;
this.xrTableCell2.StylePriority.UseTextAlignment = false;
this.xrTableCell2.Weight = 0.68740112079440219D;
//
// xrTableCell6
//
this.xrTableCell6.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "spRepUniEventLog.strPersonName")});
resources.ApplyResources(this.xrTableCell6, "xrTableCell6");
this.xrTableCell6.Name = "xrTableCell6";
this.xrTableCell6.StylePriority.UseFont = false;
this.xrTableCell6.StylePriority.UseTextAlignment = false;
this.xrTableCell6.Weight = 0.69030245026717763D;
//
// sp_rep_UNI_EventLogTableAdapter1
//
this.sp_rep_UNI_EventLogTableAdapter1.ClearBeforeFill = true;
//
// eventLogDataSet1
//
this.eventLogDataSet1.DataSetName = "EventLogDataSet";
this.eventLogDataSet1.SchemaSerializationMode = System.Data.SchemaSerializationMode.IncludeSchema;
//
// EventLogReport
//
this.Bands.AddRange(new DevExpress.XtraReports.UI.Band[] {
this.Detail,
this.PageHeader,
this.PageFooter,
this.DetailReport,
this.ReportHeader});
this.Landscape = false;
this.PageHeight = 1169;
this.PageWidth = 827;
this.Version = "11.1";
this.Controls.SetChildIndex(this.ReportHeader, 0);
this.Controls.SetChildIndex(this.DetailReport, 0);
this.Controls.SetChildIndex(this.PageFooter, 0);
this.Controls.SetChildIndex(this.PageHeader, 0);
this.Controls.SetChildIndex(this.Detail, 0);
((System.ComponentModel.ISupportInitialize)(this.tableInterval)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.m_BaseDataSet)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.tableBaseHeader)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.tableHeader)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.tableData)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.eventLogDataSet1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this)).EndInit();
}
#endregion
private DevExpress.XtraReports.UI.XRTable tableHeader;
private DevExpress.XtraReports.UI.XRTableRow rowHeader1;
private DevExpress.XtraReports.UI.XRTableCell cellType;
private DevExpress.XtraReports.UI.XRTableCell cellPerson;
private DevExpress.XtraReports.UI.DetailReportBand DetailReport;
private DevExpress.XtraReports.UI.DetailBand Detail1;
private DevExpress.XtraReports.UI.XRTableCell cellDateTime;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow3;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell3;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell4;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell5;
private DevExpress.XtraReports.UI.XRTable tableData;
private DevExpress.XtraReports.UI.XRTableRow RowData;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell1;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell2;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell6;
private EventLogDataSet eventLogDataSet1;
private EventLogDataSetTableAdapters.sp_rep_UNI_EventLogTableAdapter sp_rep_UNI_EventLogTableAdapter1;
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace EZOper.TechTester.OWINOAuthWebSI.Areas.ZApi
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
using System;
using System.Diagnostics;
using System.Text;
namespace Community.CsharpSqlite
{
public partial class Sqlite3
{
/*
** 2006 June 10
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
*************************************************************************
** This file contains code used to help implement virtual tables.
*************************************************************************
** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart
** C#-SQLite is an independent reimplementation of the SQLite software library
**
** SQLITE_SOURCE_ID: 2010-12-07 20:14:09 a586a4deeb25330037a49df295b36aaf624d0f45
**
*************************************************************************
*/
#if !SQLITE_OMIT_VIRTUALTABLE
//#include "sqliteInt.h"
/*
** The actual function that does the work of creating a new module.
** This function implements the sqlite3_create_module() and
** sqlite3_create_module_v2() interfaces.
*/
static int createModule(
sqlite3 *db, /* Database in which module is registered */
const char *zName, /* Name assigned to this module */
const sqlite3_module *pModule, /* The definition of the module */
void *pAux, /* Context pointer for xCreate/xConnect */
void (*xDestroy)(void *) /* Module destructor function */
){
int rc, nName;
Module *pMod;
sqlite3_mutex_enter(db.mutex);
nName = sqlite3Strlen30(zName);
pMod = (Module *)sqlite3DbMallocRaw(db, sizeof(Module) + nName + 1);
if( pMod ){
Module *pDel;
char *zCopy = (char *)(&pMod[1]);
memcpy(zCopy, zName, nName+1);
pMod.zName = zCopy;
pMod.pModule = pModule;
pMod.pAux = pAux;
pMod.xDestroy = xDestroy;
pDel = (Module *)sqlite3HashInsert(&db.aModule, zCopy, nName, (void*)pMod);
if( pDel && pDel.xDestroy ){
pDel.xDestroy(pDel.pAux);
}
sqlite3DbFree(db, pDel);
if( pDel==pMod ){
db.mallocFailed = 1;
}
sqlite3ResetInternalSchema(db, -1);
}else if( xDestroy ){
xDestroy(pAux);
}
rc = sqlite3ApiExit(db, SQLITE_OK);
sqlite3_mutex_leave(db.mutex);
return rc;
}
/*
** External API function used to create a new virtual-table module.
*/
int sqlite3_create_module(
sqlite3 *db, /* Database in which module is registered */
const char *zName, /* Name assigned to this module */
const sqlite3_module *pModule, /* The definition of the module */
void *pAux /* Context pointer for xCreate/xConnect */
){
return createModule(db, zName, pModule, pAux, 0);
}
/*
** External API function used to create a new virtual-table module.
*/
int sqlite3_create_module_v2(
sqlite3 *db, /* Database in which module is registered */
const char *zName, /* Name assigned to this module */
const sqlite3_module *pModule, /* The definition of the module */
void *pAux, /* Context pointer for xCreate/xConnect */
void (*xDestroy)(void *) /* Module destructor function */
){
return createModule(db, zName, pModule, pAux, xDestroy);
}
/*
** Lock the virtual table so that it cannot be disconnected.
** Locks nest. Every lock should have a corresponding unlock.
** If an unlock is omitted, resources leaks will occur.
**
** If a disconnect is attempted while a virtual table is locked,
** the disconnect is deferred until all locks have been removed.
*/
void sqlite3VtabLock(VTable *pVTab){
pVTab.nRef++;
}
/*
** pTab is a pointer to a Table structure representing a virtual-table.
** Return a pointer to the VTable object used by connection db to access
** this virtual-table, if one has been created, or NULL otherwise.
*/
VTable *sqlite3GetVTable(sqlite3 *db, Table *pTab){
VTable *pVtab;
Debug.Assert( IsVirtual(pTab) );
for(pVtab=pTab.pVTable; pVtab && pVtab.db!=db; pVtab=pVtab.pNext);
return pVtab;
}
/*
** Decrement the ref-count on a virtual table object. When the ref-count
** reaches zero, call the xDisconnect() method to delete the object.
*/
void sqlite3VtabUnlock(VTable *pVTab){
sqlite3 *db = pVTab.db;
Debug.Assert( db );
Debug.Assert( pVTab.nRef>0 );
Debug.Assert( sqlite3SafetyCheckOk(db) );
pVTab.nRef--;
if( pVTab.nRef==0 ){
sqlite3_vtab *p = pVTab.pVtab;
if( p ){
p.pModule.xDisconnect(p);
}
sqlite3DbFree(db, pVTab);
}
}
/*
** Table p is a virtual table. This function moves all elements in the
** p.pVTable list to the sqlite3.pDisconnect lists of their associated
** database connections to be disconnected at the next opportunity.
** Except, if argument db is not NULL, then the entry associated with
** connection db is left in the p.pVTable list.
*/
static VTable *vtabDisconnectAll(sqlite3 *db, Table *p){
VTable *pRet = 0;
VTable *pVTable = p.pVTable;
p.pVTable = 0;
/* Assert that the mutex (if any) associated with the BtShared database
** that contains table p is held by the caller. See header comments
** above function sqlite3VtabUnlockList() for an explanation of why
** this makes it safe to access the sqlite3.pDisconnect list of any
** database connection that may have an entry in the p.pVTable list.
*/
Debug.Assert( db==0 || sqlite3SchemaMutexHeld(db, 0, p.pSchema) );
while( pVTable ){
sqlite3 *db2 = pVTable.db;
VTable *pNext = pVTable.pNext;
Debug.Assert( db2 );
if( db2==db ){
pRet = pVTable;
p.pVTable = pRet;
pRet.pNext = 0;
}else{
pVTable.pNext = db2.pDisconnect;
db2.pDisconnect = pVTable;
}
pVTable = pNext;
}
Debug.Assert( !db || pRet );
return pRet;
}
/*
** Disconnect all the virtual table objects in the sqlite3.pDisconnect list.
**
** This function may only be called when the mutexes associated with all
** shared b-tree databases opened using connection db are held by the
** caller. This is done to protect the sqlite3.pDisconnect list. The
** sqlite3.pDisconnect list is accessed only as follows:
**
** 1) By this function. In this case, all BtShared mutexes and the mutex
** associated with the database handle itself must be held.
**
** 2) By function vtabDisconnectAll(), when it adds a VTable entry to
** the sqlite3.pDisconnect list. In this case either the BtShared mutex
** associated with the database the virtual table is stored in is held
** or, if the virtual table is stored in a non-sharable database, then
** the database handle mutex is held.
**
** As a result, a sqlite3.pDisconnect cannot be accessed simultaneously
** by multiple threads. It is thread-safe.
*/
void sqlite3VtabUnlockList(sqlite3 *db){
VTable *p = db.pDisconnect;
db.pDisconnect = 0;
Debug.Assert( sqlite3BtreeHoldsAllMutexes(db) );
Debug.Assert( sqlite3_mutex_held(db.mutex) );
if( p ){
sqlite3ExpirePreparedStatements(db);
do {
VTable *pNext = p.pNext;
sqlite3VtabUnlock(p);
p = pNext;
}while( p );
}
}
/*
** Clear any and all virtual-table information from the Table record.
** This routine is called, for example, just before deleting the Table
** record.
**
** Since it is a virtual-table, the Table structure contains a pointer
** to the head of a linked list of VTable structures. Each VTable
** structure is associated with a single sqlite3* user of the schema.
** The reference count of the VTable structure associated with database
** connection db is decremented immediately (which may lead to the
** structure being xDisconnected and free). Any other VTable structures
** in the list are moved to the sqlite3.pDisconnect list of the associated
** database connection.
*/
void sqlite3VtabClear(sqlite3 *db, Table *p){
if( !db || db.pnBytesFreed==0 ) vtabDisconnectAll(0, p);
if( p.azModuleArg ){
int i;
for(i=0; i<p.nModuleArg; i++){
sqlite3DbFree(db, p.azModuleArg[i]);
}
sqlite3DbFree(db, p.azModuleArg);
}
}
/*
** Add a new module argument to pTable.azModuleArg[].
** The string is not copied - the pointer is stored. The
** string will be freed automatically when the table is
** deleted.
*/
static void addModuleArgument(sqlite3 *db, Table *pTable, char *zArg){
int i = pTable.nModuleArg++;
int nBytes = sizeof(char *)*(1+pTable.nModuleArg);
char **azModuleArg;
azModuleArg = sqlite3DbRealloc(db, pTable.azModuleArg, nBytes);
if( azModuleArg==0 ){
int j;
for(j=0; j<i; j++){
sqlite3DbFree(db, pTable.azModuleArg[j]);
}
sqlite3DbFree(db, zArg);
sqlite3DbFree(db, pTable.azModuleArg);
pTable.nModuleArg = 0;
}else{
azModuleArg[i] = zArg;
azModuleArg[i+1] = 0;
}
pTable.azModuleArg = azModuleArg;
}
/*
** The parser calls this routine when it first sees a CREATE VIRTUAL TABLE
** statement. The module name has been parsed, but the optional list
** of parameters that follow the module name are still pending.
*/
void sqlite3VtabBeginParse(
Parse *pParse, /* Parsing context */
Token *pName1, /* Name of new table, or database name */
Token *pName2, /* Name of new table or NULL */
Token *pModuleName /* Name of the module for the virtual table */
){
int iDb; /* The database the table is being created in */
Table *pTable; /* The new virtual table */
sqlite3 *db; /* Database connection */
sqlite3StartTable(pParse, pName1, pName2, 0, 0, 1, 0);
pTable = pParse.pNewTable;
if( pTable==0 ) return;
Debug.Assert( 0==pTable.pIndex );
db = pParse.db;
iDb = sqlite3SchemaToIndex(db, pTable.pSchema);
Debug.Assert( iDb>=0 );
pTable.tabFlags |= TF_Virtual;
pTable.nModuleArg = 0;
addModuleArgument(db, pTable, sqlite3NameFromToken(db, pModuleName));
addModuleArgument(db, pTable, sqlite3DbStrDup(db, db.aDb[iDb].zName));
addModuleArgument(db, pTable, sqlite3DbStrDup(db, pTable.zName));
pParse.sNameToken.n = (int)(&pModuleName.z[pModuleName.n] - pName1.z);
#if !SQLITE_OMIT_AUTHORIZATION
/* Creating a virtual table invokes the authorization callback twice.
** The first invocation, to obtain permission to INSERT a row into the
** sqlite_master table, has already been made by sqlite3StartTable().
** The second call, to obtain permission to create the table, is made now.
*/
if( pTable.azModuleArg ){
sqlite3AuthCheck(pParse, SQLITE_CREATE_VTABLE, pTable.zName,
pTable.azModuleArg[0], pParse.db.aDb[iDb].zName);
}
#endif
}
/*
** This routine takes the module argument that has been accumulating
** in pParse.zArg[] and appends it to the list of arguments on the
** virtual table currently under construction in pParse.pTable.
*/
static void addArgumentToVtab(Parse *pParse){
if( pParse.sArg.z && ALWAYS(pParse.pNewTable) ){
const char *z = (const char*)pParse.sArg.z;
int n = pParse.sArg.n;
sqlite3 *db = pParse.db;
addModuleArgument(db, pParse.pNewTable, sqlite3DbStrNDup(db, z, n));
}
}
/*
** The parser calls this routine after the CREATE VIRTUAL TABLE statement
** has been completely parsed.
*/
void sqlite3VtabFinishParse(Parse *pParse, Token *pEnd){
Table *pTab = pParse.pNewTable; /* The table being constructed */
sqlite3 *db = pParse.db; /* The database connection */
if( pTab==0 ) return;
addArgumentToVtab(pParse);
pParse.sArg.z = 0;
if( pTab.nModuleArg<1 ) return;
/* If the CREATE VIRTUAL TABLE statement is being entered for the
** first time (in other words if the virtual table is actually being
** created now instead of just being read out of sqlite_master) then
** do additional initialization work and store the statement text
** in the sqlite_master table.
*/
if( !db.init.busy ){
char *zStmt;
char *zWhere;
int iDb;
Vdbe *v;
/* Compute the complete text of the CREATE VIRTUAL TABLE statement */
if( pEnd ){
pParse.sNameToken.n = (int)(pEnd.z - pParse.sNameToken.z) + pEnd.n;
}
zStmt = sqlite3MPrintf(db, "CREATE VIRTUAL TABLE %T", &pParse.sNameToken);
/* A slot for the record has already been allocated in the
** SQLITE_MASTER table. We just need to update that slot with all
** the information we've collected.
**
** The VM register number pParse.regRowid holds the rowid of an
** entry in the sqlite_master table tht was created for this vtab
** by sqlite3StartTable().
*/
iDb = sqlite3SchemaToIndex(db, pTab.pSchema);
sqlite3NestedParse(pParse,
"UPDATE %Q.%s "
"SET type='table', name=%Q, tbl_name=%Q, rootpage=0, sql=%Q "
"WHERE rowid=#%d",
db.aDb[iDb].zName, SCHEMA_TABLE(iDb),
pTab.zName,
pTab.zName,
zStmt,
pParse.regRowid
);
sqlite3DbFree(db, zStmt);
v = sqlite3GetVdbe(pParse);
sqlite3ChangeCookie(pParse, iDb);
sqlite3VdbeAddOp2(v, OP_Expire, 0, 0);
zWhere = sqlite3MPrintf(db, "name='%q' AND type='table'", pTab.zName);
sqlite3VdbeAddOp4(v, OP_ParseSchema, iDb, 0, 0, zWhere, P4_DYNAMIC);
sqlite3VdbeAddOp4(v, OP_VCreate, iDb, 0, 0,
pTab.zName, sqlite3Strlen30(pTab.zName) + 1);
}
/* If we are rereading the sqlite_master table create the in-memory
** record of the table. The xConnect() method is not called until
** the first time the virtual table is used in an SQL statement. This
** allows a schema that contains virtual tables to be loaded before
** the required virtual table implementations are registered. */
else {
Table *pOld;
Schema *pSchema = pTab.pSchema;
const char *zName = pTab.zName;
int nName = sqlite3Strlen30(zName);
Debug.Assert( sqlite3SchemaMutexHeld(db, 0, pSchema) );
pOld = sqlite3HashInsert(&pSchema.tblHash, zName, nName, pTab);
if( pOld ){
db.mallocFailed = 1;
Debug.Assert( pTab==pOld ); /* Malloc must have failed inside HashInsert() */
return;
}
pParse.pNewTable = 0;
}
}
/*
** The parser calls this routine when it sees the first token
** of an argument to the module name in a CREATE VIRTUAL TABLE statement.
*/
void sqlite3VtabArgInit(Parse *pParse){
addArgumentToVtab(pParse);
pParse.sArg.z = 0;
pParse.sArg.n = 0;
}
/*
** The parser calls this routine for each token after the first token
** in an argument to the module name in a CREATE VIRTUAL TABLE statement.
*/
void sqlite3VtabArgExtend(Parse *pParse, Token *p){
Token *pArg = &pParse.sArg;
if( pArg.z==0 ){
pArg.z = p.z;
pArg.n = p.n;
}else{
Debug.Assert(pArg.z < p.z);
pArg.n = (int)(&p.z[p.n] - pArg.z);
}
}
/*
** Invoke a virtual table constructor (either xCreate or xConnect). The
** pointer to the function to invoke is passed as the fourth parameter
** to this procedure.
*/
static int vtabCallConstructor(
sqlite3 *db,
Table *pTab,
Module *pMod,
int (*xConstruct)(sqlite3*,void*,int,const char*const*,sqlite3_vtab**,char**),
char **pzErr
){
VTable *pVTable;
int rc;
const char *const*azArg = (const char *const*)pTab.azModuleArg;
int nArg = pTab.nModuleArg;
char *zErr = 0;
char *zModuleName = sqlite3MPrintf(db, "%s", pTab.zName);
if( !zModuleName ){
return SQLITE_NOMEM;
}
pVTable = sqlite3DbMallocZero(db, sizeof(VTable));
if( !pVTable ){
sqlite3DbFree(db, zModuleName);
return SQLITE_NOMEM;
}
pVTable.db = db;
pVTable.pMod = pMod;
Debug.Assert( !db.pVTab );
Debug.Assert( xConstruct );
db.pVTab = pTab;
/* Invoke the virtual table constructor */
rc = xConstruct(db, pMod.pAux, nArg, azArg, &pVTable.pVtab, &zErr);
if( rc==SQLITE_NOMEM ) db.mallocFailed = 1;
if( SQLITE_OK!=rc ){
if( zErr==0 ){
*pzErr = sqlite3MPrintf(db, "vtable constructor failed: %s", zModuleName);
}else {
*pzErr = sqlite3MPrintf(db, "%s", zErr);
sqlite3_free(zErr);
}
sqlite3DbFree(db, pVTable);
}else if( ALWAYS(pVTable.pVtab) ){
/* Justification of ALWAYS(): A correct vtab constructor must allocate
** the sqlite3_vtab object if successful. */
pVTable.pVtab.pModule = pMod.pModule;
pVTable.nRef = 1;
if( db.pVTab ){
const char *zFormat = "vtable constructor did not declare schema: %s";
*pzErr = sqlite3MPrintf(db, zFormat, pTab.zName);
sqlite3VtabUnlock(pVTable);
rc = SQLITE_ERROR;
}else{
int iCol;
/* If everything went according to plan, link the new VTable structure
** into the linked list headed by pTab.pVTable. Then loop through the
** columns of the table to see if any of them contain the token "hidden".
** If so, set the Column.isHidden flag and remove the token from
** the type string. */
pVTable.pNext = pTab.pVTable;
pTab.pVTable = pVTable;
for(iCol=0; iCol<pTab.nCol; iCol++){
char *zType = pTab.aCol[iCol].zType;
int nType;
int i = 0;
if( !zType ) continue;
nType = sqlite3Strlen30(zType);
if( sqlite3StrNICmp("hidden", zType, 6)||(zType[6] && zType[6]!=' ') ){
for(i=0; i<nType; i++){
if( (0==sqlite3StrNICmp(" hidden", &zType[i], 7))
&& (zType[i+7]=='\0' || zType[i+7]==' ')
){
i++;
break;
}
}
}
if( i<nType ){
int j;
int nDel = 6 + (zType[i+6] ? 1 : 0);
for(j=i; (j+nDel)<=nType; j++){
zType[j] = zType[j+nDel];
}
if( zType[i]=='\0' && i>0 ){
Debug.Assert(zType[i-1]==' ');
zType[i-1] = '\0';
}
pTab.aCol[iCol].isHidden = 1;
}
}
}
}
sqlite3DbFree(db, zModuleName);
db.pVTab = 0;
return rc;
}
/*
** This function is invoked by the parser to call the xConnect() method
** of the virtual table pTab. If an error occurs, an error code is returned
** and an error left in pParse.
**
** This call is a no-op if table pTab is not a virtual table.
*/
int sqlite3VtabCallConnect(Parse *pParse, Table *pTab){
sqlite3 *db = pParse.db;
const char *zMod;
Module *pMod;
int rc;
Debug.Assert( pTab );
if( (pTab.tabFlags & TF_Virtual)==0 || sqlite3GetVTable(db, pTab) ){
return SQLITE_OK;
}
/* Locate the required virtual table module */
zMod = pTab.azModuleArg[0];
pMod = (Module*)sqlite3HashFind(&db.aModule, zMod, sqlite3Strlen30(zMod));
if( !pMod ){
const char *zModule = pTab.azModuleArg[0];
sqlite3ErrorMsg(pParse, "no such module: %s", zModule);
rc = SQLITE_ERROR;
}else{
char *zErr = 0;
rc = vtabCallConstructor(db, pTab, pMod, pMod.pModule.xConnect, &zErr);
if( rc!=SQLITE_OK ){
sqlite3ErrorMsg(pParse, "%s", zErr);
}
sqlite3DbFree(db, zErr);
}
return rc;
}
/*
** Add the virtual table pVTab to the array sqlite3.aVTrans[].
*/
static int addToVTrans(sqlite3 *db, VTable *pVTab){
const int ARRAY_INCR = 5;
/* Grow the sqlite3.aVTrans array if required */
if( (db.nVTrans%ARRAY_INCR)==0 ){
VTable **aVTrans;
int nBytes = sizeof(sqlite3_vtab *) * (db.nVTrans + ARRAY_INCR);
aVTrans = sqlite3DbRealloc(db, (void *)db.aVTrans, nBytes);
if( !aVTrans ){
return SQLITE_NOMEM;
}
memset(&aVTrans[db.nVTrans], 0, sizeof(sqlite3_vtab *)*ARRAY_INCR);
db.aVTrans = aVTrans;
}
/* Add pVtab to the end of sqlite3.aVTrans */
db.aVTrans[db.nVTrans++] = pVTab;
sqlite3VtabLock(pVTab);
return SQLITE_OK;
}
/*
** This function is invoked by the vdbe to call the xCreate method
** of the virtual table named zTab in database iDb.
**
** If an error occurs, *pzErr is set to point an an English language
** description of the error and an SQLITE_XXX error code is returned.
** In this case the caller must call sqlite3DbFree(db, ) on *pzErr.
*/
int sqlite3VtabCallCreate(sqlite3 *db, int iDb, const char *zTab, char **pzErr){
int rc = SQLITE_OK;
Table *pTab;
Module *pMod;
const char *zMod;
pTab = sqlite3FindTable(db, zTab, db.aDb[iDb].zName);
Debug.Assert( pTab && (pTab.tabFlags & TF_Virtual)!=0 && !pTab.pVTable );
/* Locate the required virtual table module */
zMod = pTab.azModuleArg[0];
pMod = (Module*)sqlite3HashFind(&db.aModule, zMod, sqlite3Strlen30(zMod));
/* If the module has been registered and includes a Create method,
** invoke it now. If the module has not been registered, return an
** error. Otherwise, do nothing.
*/
if( !pMod ){
*pzErr = sqlite3MPrintf(db, "no such module: %s", zMod);
rc = SQLITE_ERROR;
}else{
rc = vtabCallConstructor(db, pTab, pMod, pMod.pModule.xCreate, pzErr);
}
/* Justification of ALWAYS(): The xConstructor method is required to
** create a valid sqlite3_vtab if it returns SQLITE_OK. */
if( rc==SQLITE_OK && ALWAYS(sqlite3GetVTable(db, pTab)) ){
rc = addToVTrans(db, sqlite3GetVTable(db, pTab));
}
return rc;
}
/*
** This function is used to set the schema of a virtual table. It is only
** valid to call this function from within the xCreate() or xConnect() of a
** virtual table module.
*/
int sqlite3_declare_vtab(sqlite3 *db, const char *zCreateTable){
Parse *pParse;
int rc = SQLITE_OK;
Table *pTab;
char *zErr = 0;
sqlite3_mutex_enter(db.mutex);
pTab = db.pVTab;
if( !pTab ){
sqlite3Error(db, SQLITE_MISUSE, 0);
sqlite3_mutex_leave(db.mutex);
return SQLITE_MISUSE_BKPT;
}
Debug.Assert( (pTab.tabFlags & TF_Virtual)!=0 );
pParse = sqlite3StackAllocZero(db, sizeof(*pParse));
if( pParse==0 ){
rc = SQLITE_NOMEM;
}else{
pParse.declareVtab = 1;
pParse.db = db;
pParse.nQueryLoop = 1;
if( SQLITE_OK==sqlite3RunParser(pParse, zCreateTable, &zErr)
&& pParse.pNewTable
&& !db.mallocFailed
&& !pParse.pNewTable.pSelect
&& (pParse.pNewTable.tabFlags & TF_Virtual)==0
){
if( !pTab.aCol ){
pTab.aCol = pParse.pNewTable.aCol;
pTab.nCol = pParse.pNewTable.nCol;
pParse.pNewTable.nCol = 0;
pParse.pNewTable.aCol = 0;
}
db.pVTab = 0;
}else{
sqlite3Error(db, SQLITE_ERROR, (zErr ? "%s" : 0), zErr);
sqlite3DbFree(db, zErr);
rc = SQLITE_ERROR;
}
pParse.declareVtab = 0;
if( pParse.pVdbe ){
sqlite3VdbeFinalize(pParse.pVdbe);
}
sqlite3DeleteTable(db, pParse.pNewTable);
sqlite3StackFree(db, pParse);
}
Debug.Assert( (rc&0xff)==rc );
rc = sqlite3ApiExit(db, rc);
sqlite3_mutex_leave(db.mutex);
return rc;
}
/*
** This function is invoked by the vdbe to call the xDestroy method
** of the virtual table named zTab in database iDb. This occurs
** when a DROP TABLE is mentioned.
**
** This call is a no-op if zTab is not a virtual table.
*/
int sqlite3VtabCallDestroy(sqlite3 *db, int iDb, const char *zTab){
int rc = SQLITE_OK;
Table *pTab;
pTab = sqlite3FindTable(db, zTab, db.aDb[iDb].zName);
if( ALWAYS(pTab!=0 && pTab.pVTable!=0) ){
VTable *p = vtabDisconnectAll(db, pTab);
Debug.Assert( rc==SQLITE_OK );
rc = p.pMod.pModule.xDestroy(p.pVtab);
/* Remove the sqlite3_vtab* from the aVTrans[] array, if applicable */
if( rc==SQLITE_OK ){
Debug.Assert( pTab.pVTable==p && p.pNext==0 );
p.pVtab = 0;
pTab.pVTable = 0;
sqlite3VtabUnlock(p);
}
}
return rc;
}
/*
** This function invokes either the xRollback or xCommit method
** of each of the virtual tables in the sqlite3.aVTrans array. The method
** called is identified by the second argument, "offset", which is
** the offset of the method to call in the sqlite3_module structure.
**
** The array is cleared after invoking the callbacks.
*/
static void callFinaliser(sqlite3 *db, int offset){
int i;
if( db.aVTrans ){
for(i=0; i<db.nVTrans; i++){
VTable *pVTab = db.aVTrans[i];
sqlite3_vtab *p = pVTab.pVtab;
if( p ){
int (*x)(sqlite3_vtab *);
x = *(int (**)(sqlite3_vtab *))((char *)p.pModule + offset);
if( x ) x(p);
}
sqlite3VtabUnlock(pVTab);
}
sqlite3DbFree(db, db.aVTrans);
db.nVTrans = 0;
db.aVTrans = 0;
}
}
/*
** Invoke the xSync method of all virtual tables in the sqlite3.aVTrans
** array. Return the error code for the first error that occurs, or
** SQLITE_OK if all xSync operations are successful.
**
** Set *pzErrmsg to point to a buffer that should be released using
** sqlite3DbFree() containing an error message, if one is available.
*/
int sqlite3VtabSync(sqlite3 *db, char **pzErrmsg){
int i;
int rc = SQLITE_OK;
VTable **aVTrans = db.aVTrans;
db.aVTrans = 0;
for(i=0; rc==SQLITE_OK && i<db.nVTrans; i++){
int (*x)(sqlite3_vtab *);
sqlite3_vtab *pVtab = aVTrans[i].pVtab;
if( pVtab && (x = pVtab.pModule.xSync)!=0 ){
rc = x(pVtab);
sqlite3DbFree(db, *pzErrmsg);
*pzErrmsg = sqlite3DbStrDup(db, pVtab.zErrMsg);
sqlite3_free(pVtab.zErrMsg);
}
}
db.aVTrans = aVTrans;
return rc;
}
/*
** Invoke the xRollback method of all virtual tables in the
** sqlite3.aVTrans array. Then clear the array itself.
*/
int sqlite3VtabRollback(sqlite3 *db){
callFinaliser(db, offsetof(sqlite3_module,xRollback));
return SQLITE_OK;
}
/*
** Invoke the xCommit method of all virtual tables in the
** sqlite3.aVTrans array. Then clear the array itself.
*/
int sqlite3VtabCommit(sqlite3 *db){
callFinaliser(db, offsetof(sqlite3_module,xCommit));
return SQLITE_OK;
}
/*
** If the virtual table pVtab supports the transaction interface
** (xBegin/xRollback/xCommit and optionally xSync) and a transaction is
** not currently open, invoke the xBegin method now.
**
** If the xBegin call is successful, place the sqlite3_vtab pointer
** in the sqlite3.aVTrans array.
*/
int sqlite3VtabBegin(sqlite3 *db, VTable *pVTab){
int rc = SQLITE_OK;
const sqlite3_module *pModule;
/* Special case: If db.aVTrans is NULL and db.nVTrans is greater
** than zero, then this function is being called from within a
** virtual module xSync() callback. It is illegal to write to
** virtual module tables in this case, so return SQLITE_LOCKED.
*/
if( sqlite3VtabInSync(db) ){
return SQLITE_LOCKED;
}
if( !pVTab ){
return SQLITE_OK;
}
pModule = pVTab.pVtab.pModule;
if( pModule.xBegin ){
int i;
/* If pVtab is already in the aVTrans array, return early */
for(i=0; i<db.nVTrans; i++){
if( db.aVTrans[i]==pVTab ){
return SQLITE_OK;
}
}
/* Invoke the xBegin method */
rc = pModule.xBegin(pVTab.pVtab);
if( rc==SQLITE_OK ){
rc = addToVTrans(db, pVTab);
}
}
return rc;
}
/*
** The first parameter (pDef) is a function implementation. The
** second parameter (pExpr) is the first argument to this function.
** If pExpr is a column in a virtual table, then let the virtual
** table implementation have an opportunity to overload the function.
**
** This routine is used to allow virtual table implementations to
** overload MATCH, LIKE, GLOB, and REGEXP operators.
**
** Return either the pDef argument (indicating no change) or a
** new FuncDef structure that is marked as ephemeral using the
** SQLITE_FUNC_EPHEM flag.
*/
FuncDef *sqlite3VtabOverloadFunction(
sqlite3 *db, /* Database connection for reporting malloc problems */
FuncDef *pDef, /* Function to possibly overload */
int nArg, /* Number of arguments to the function */
Expr *pExpr /* First argument to the function */
){
Table *pTab;
sqlite3_vtab *pVtab;
sqlite3_module *pMod;
void (*xFunc)(sqlite3_context*,int,sqlite3_value**) = 0;
void *pArg = 0;
FuncDef *pNew;
int rc = 0;
char *zLowerName;
unsigned char *z;
/* Check to see the left operand is a column in a virtual table */
if( NEVER(pExpr==0) ) return pDef;
if( pExpr.op!=TK_COLUMN ) return pDef;
pTab = pExpr.pTab;
if( NEVER(pTab==0) ) return pDef;
if( (pTab.tabFlags & TF_Virtual)==0 ) return pDef;
pVtab = sqlite3GetVTable(db, pTab).pVtab;
Debug.Assert( pVtab!=0 );
Debug.Assert( pVtab.pModule!=0 );
pMod = (sqlite3_module *)pVtab.pModule;
if( pMod.xFindFunction==0 ) return pDef;
/* Call the xFindFunction method on the virtual table implementation
** to see if the implementation wants to overload this function
*/
zLowerName = sqlite3DbStrDup(db, pDef.zName);
if( zLowerName ){
for(z=(unsigned char*)zLowerName; *z; z++){
*z = sqlite3UpperToLower[*z];
}
rc = pMod.xFindFunction(pVtab, nArg, zLowerName, &xFunc, &pArg);
sqlite3DbFree(db, zLowerName);
}
if( rc==0 ){
return pDef;
}
/* Create a new ephemeral function definition for the overloaded
** function */
pNew = sqlite3DbMallocZero(db, sizeof(*pNew)
+ sqlite3Strlen30(pDef.zName) + 1);
if( pNew==0 ){
return pDef;
}
*pNew = *pDef;
pNew.zName = (char *)&pNew[1];
memcpy(pNew.zName, pDef.zName, sqlite3Strlen30(pDef.zName)+1);
pNew.xFunc = xFunc;
pNew.pUserData = pArg;
pNew.flags |= SQLITE_FUNC_EPHEM;
return pNew;
}
/*
** Make sure virtual table pTab is contained in the pParse.apVirtualLock[]
** array so that an OP_VBegin will get generated for it. Add pTab to the
** array if it is missing. If pTab is already in the array, this routine
** is a no-op.
*/
void sqlite3VtabMakeWritable(Parse *pParse, Table *pTab){
Parse *pToplevel = sqlite3ParseToplevel(pParse);
int i, n;
Table **apVtabLock;
Debug.Assert( IsVirtual(pTab) );
for(i=0; i<pToplevel.nVtabLock; i++){
if( pTab==pToplevel.apVtabLock[i] ) return;
}
n = (pToplevel.nVtabLock+1)*sizeof(pToplevel.apVtabLock[0]);
apVtabLock = sqlite3_realloc(pToplevel.apVtabLock, n);
if( apVtabLock ){
pToplevel.apVtabLock = apVtabLock;
pToplevel.apVtabLock[pToplevel.nVtabLock++] = pTab;
}else{
pToplevel.db.mallocFailed = 1;
}
}
#endif //* SQLITE_OMIT_VIRTUALTABLE */
}
}
| |
// Artificial Intelligence for Humans
// Volume 2: Nature-Inspired Algorithms
// C# Version
// http://www.aifh.org
// http://www.jeffheaton.com
//
// Code repository:
// https://github.com/jeffheaton/aifh
//
// Copyright 2014 by Jeff Heaton
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System;
namespace AIFH_Vol2_Capstone_Plants.Milestone1
{
/// <summary>
/// This class holds the grid that is the universe that a single plant grows in. Each plant has its
/// own universe. Each cell in the grid is either alive or dead. An alive cell has an energy above
/// the specified threshold.
/// This class will be used in each of the milestones. There are several helper functions that provide
/// information about the universe.
/// </summary>
public class PlantUniverse
{
/// <summary>
/// The width of the universe, in terms of cells.
/// Any actual "on screen" display is scaled from this.
/// </summary>
public const int UniverseWidth = 50;
/// <summary>
/// The height of the universe, in terms of cells.
/// Any actual "on screen" display is scaled from this.
/// </summary>
public const int UniverseHeight = 100;
/// <summary>
/// The location of the ground line. Anything >= to this is underground.
/// </summary>
public const int GroundLine = UniverseHeight - (UniverseHeight/3);
/// <summary>
/// The size of a cell "info vector". This vector identifies a cell's state, and is used to encode instructions
/// in the genome. All of these are normalized to [0,1]. There are currently four elements:
/// 0: The row that the cell is in.
/// 1: The amount of sunlight the cell is exposed to.
/// 2: The degree of crowding, from neighbors.
/// 3: The amount of nourishment the cell is exposed to.
/// </summary>
public const int CellVectorLength = 4;
/// <summary>
/// The size of a GENOME vector. A genome vector is made up of four "info vectors". These give instructions
/// on how to grow a plant.
/// Vector 0: Growth template #1
/// Vector 1: Growth template #2
/// Vector 2: Leaf template
/// Vector 3: Stem template
/// For more information on how these are used, refer to the PlantGrowth class.
/// </summary>
public const int GenomeSize = CellVectorLength*4;
/// <summary>
/// The rate that sunlight decays based on shade, or
/// the rate that nourishment decays based on roots absorbing.
/// </summary>
public const double Decay = 0.1;
/// <summary>
/// The rate at which leafy material turns to wooden stem.
/// </summary>
public const double StemTransition = 0.8;
/// <summary>
/// The threshold to allow growth.
/// </summary>
public const double GrowthThreshold = 0.25;
/// <summary>
/// The minimum distance to a genome template to execute that instruction.
/// Used to control how growth happens.
/// </summary>
public const double MinGrowthDist = 0.9;
/// <summary>
/// The minimum energy that a living cell is allowed to drop to.
/// </summary>
public const double MinLivingEnergy = 0.1;
/// <summary>
/// The population size for the genetic algorithm.
/// </summary>
public const int PopulationSize = 1000;
/// <summary>
/// How many cycles should we allow the plant to grow for?
/// </summary>
public const int EvaluationCycles = 100;
/// <summary>
/// The required root ratio between how leafy the surface is and how nourished the roots are.
/// </summary>
public const double RequiredRootRatio = 0.5;
/// <summary>
/// The actual grid, that holds the universe.
/// </summary>
private readonly PlantUniverseCell[][] _grid;
/// <summary>
/// Construct the universe and create the grid.
/// </summary>
public PlantUniverse()
{
_grid = new PlantUniverseCell[UniverseHeight][];
for (int row = 0; row < _grid.Length; row++)
{
_grid[row] = new PlantUniverseCell[UniverseWidth];
for (int col = 0; col < _grid[row].Length; col++)
{
_grid[row][col] = new PlantUniverseCell();
}
}
}
/// <summary>
/// The amount of nourishment that is held inside of the roots. This is used to calculate the
/// root ratio, that limits growth.
/// </summary>
public double RootCount { get; set; }
/// <summary>
/// The amount of leafy material above the surface. This is used to calculate the root ratio,
/// that limits growth.
/// </summary>
public double SurfaceCount { get; set; }
/// <summary>
/// Get a cell, using row and column index.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The cell.</returns>
public PlantUniverseCell GetCell(int row, int col)
{
return _grid[row][col];
}
/// <summary>
/// Calculate the degree of crowding in a cell. Leafy cells
/// produce more crowding than stem.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The crowd imposed by this cell.</returns>
public double CalculateCrowd(int row, int col)
{
if (!IsValid(row, col))
return 0;
PlantUniverseCell cell = GetCell(row, col);
if (!cell.IsAlive)
{
return 0;
}
return cell.Leafyness;
}
/// <summary>
/// Calculate the degree of crowding around a cell.
/// This is the mean crowding of the neighbors.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The mean crowdedness of the cell.</returns>
public double CalculateMeanNeighborsCrowd(int row, int col)
{
double sum = 0;
sum += CalculateCrowd(row - 1, col - 1);
sum += CalculateCrowd(row - 1, col);
sum += CalculateCrowd(row - 1, col + 1);
sum += CalculateCrowd(row, col - 1);
sum += CalculateCrowd(row, col + 1);
sum += CalculateCrowd(row + 1, col - 1);
sum += CalculateCrowd(row + 1, col);
sum += CalculateCrowd(row + 1, col + 1);
return sum/8.0;
}
/// <summary>
/// Return an info vector about a cell. This allows cells to be identified by instructions in the genome.
/// The vector contains four doubles. All doubles range from [0,1].
/// Element 0: The height of the cell. 1.0 for the last row and 0.0 for the first row.
/// Element 1: The amount of sunlight (for surface cells) or water (for underground cells) exposure for this cell.
/// Element 2: Crowding by neighbors.
/// Element 3: Nourishment for this cell.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The info vector.</returns>
public double[] GetCellInfoVector(int row, int col)
{
var result = new double[CellVectorLength];
PlantUniverseCell cell = GetCell(row, col);
// Height
result[0] = row/UniverseHeight;
// Sunlight
if (row < GroundLine)
{
result[1] = cell.CalculatedSunlight;
}
else
{
result[1] = cell.CalculatedWater;
}
// Crowd
result[2] = CalculateMeanNeighborsCrowd(row, col);
// Nourishment
result[3] = cell.Nourishment;
//
return result;
}
/// <summary>
/// Reset the entire grid to a single seed.
/// </summary>
public void Reset()
{
foreach (var aGrid in _grid)
{
foreach (PlantUniverseCell cell in aGrid)
{
cell.Leafyness = 0;
cell.Energy = 0;
cell.Nourishment = 0;
}
}
const int center = UniverseWidth/2;
const int groundLevel = GroundLine;
// root
_grid[groundLevel][center].Leafyness = 0;
_grid[groundLevel][center].Nourishment = 1;
_grid[groundLevel][center].Energy = 1;
// stem
_grid[groundLevel - 1][center].Leafyness = 0.5;
_grid[groundLevel - 1][center].Nourishment = 1;
_grid[groundLevel - 1][center].Energy = 1;
// leaf
_grid[groundLevel - 2][center].Leafyness = 1;
_grid[groundLevel - 2][center].Nourishment = 1;
_grid[groundLevel - 2][center].Energy = 1;
}
/// <summary>
/// Returns true if a cell is valid. Invalid cells are off the bounds of a grid.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>True, if valid.</returns>
public bool IsValid(int row, int col)
{
if (row < 0 || col < 0)
{
return false;
}
if (row >= _grid.Length)
{
return false;
}
if (col >= _grid[row].Length)
{
return false;
}
return true;
}
/// <summary>
/// Calculate the energy for a cell.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The info vector.</returns>
public double CalculateEnergy(int row, int col)
{
if (!IsValid(row, col))
{
return 0;
}
return _grid[row][col].Energy;
}
/// <summary>
/// Calculate the transfer energy for a cell. This is the amount of energy transferred into a cell.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The amount of energy transferred in.</returns>
public double CalculateTransferEnergy(int row, int col)
{
double result = 0;
result = Math.Max(result, CalculateEnergy(row - 1, col - 1));
result = Math.Max(result, CalculateEnergy(row - 1, col));
result = Math.Max(result, CalculateEnergy(row - 1, col + 1));
return result;
}
/// <summary>
/// Calculate the transfer nourishment for a cell. This is the amount of nourishment transferred into a cell.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The amount of energy transferred in.</returns>
public double CalculateTransferNourishment(int row, int col)
{
double result = 0;
result = Math.Max(result, CalculateEnergy(row + 1, col - 1));
result = Math.Max(result, CalculateEnergy(row + 1, col));
result = Math.Max(result, CalculateEnergy(row + 1, col + 1));
return result;
}
/// <summary>
/// Count the number of live cells as neighbors to a cell.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>The neighbor count.</returns>
public int CountNeighbors(int row, int col)
{
int sum = 0;
if (IsAlive(row - 1, col))
{
sum++;
}
if (IsAlive(row + 1, col))
{
sum++;
}
if (IsAlive(row, col - 1))
{
sum++;
}
if (IsAlive(row, col + 1))
{
sum++;
}
if (IsAlive(row - 1, col - 1))
{
sum++;
}
if (IsAlive(row + 1, col + 1))
{
sum++;
}
if (IsAlive(row - 1, col + 1))
{
sum++;
}
if (IsAlive(row + 1, col - 1))
{
sum++;
}
return sum;
}
/// <summary>
/// Returns true, if the specified cell can grow.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>True, if the specified cell is allowed to grow.</returns>
public bool CanGrow(int row, int col)
{
PlantUniverseCell cell = GetCell(row, col);
if (cell.IsAlive)
{
if (row >= GroundLine)
{
return CountNeighbors(row, col) < 4;
}
return cell.Energy > GrowthThreshold && cell.Nourishment > GrowthThreshold;
}
return false;
}
/// <summary>
/// Returns true, if the specified cell is alive.
/// Alive cells have energy.
/// </summary>
/// <param name="row">The row.</param>
/// <param name="col">The column.</param>
/// <returns>True, if the specified cell is alive to grow.</returns>
public bool IsAlive(int row, int col)
{
return IsValid(row, col) && (_grid[row][col].IsAlive);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Net.Cache;
using System.Net.Security;
using System.Runtime.Serialization;
using System.Security.Principal;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net
{
public abstract class WebRequest : MarshalByRefObject, ISerializable
{
internal class WebRequestPrefixElement
{
public readonly string Prefix;
public readonly IWebRequestCreate Creator;
public WebRequestPrefixElement(string prefix, IWebRequestCreate creator)
{
Prefix = prefix;
Creator = creator;
}
}
private static List<WebRequestPrefixElement> s_prefixList;
private static object s_internalSyncObject = new object();
internal const int DefaultTimeoutMilliseconds = 100 * 1000;
protected WebRequest() { }
protected WebRequest(SerializationInfo serializationInfo, StreamingContext streamingContext) { }
void ISerializable.GetObjectData(SerializationInfo serializationInfo, StreamingContext streamingContext)
{
GetObjectData(serializationInfo, streamingContext);
}
protected virtual void GetObjectData(SerializationInfo serializationInfo, StreamingContext streamingContext) { }
// Create a WebRequest.
//
// This is the main creation routine. We take a Uri object, look
// up the Uri in the prefix match table, and invoke the appropriate
// handler to create the object. We also have a parameter that
// tells us whether or not to use the whole Uri or just the
// scheme portion of it.
//
// Input:
// requestUri - Uri object for request.
// useUriBase - True if we're only to look at the scheme portion of the Uri.
//
// Returns:
// Newly created WebRequest.
private static WebRequest Create(Uri requestUri, bool useUriBase)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(null, requestUri);
string LookupUri;
WebRequestPrefixElement Current = null;
bool Found = false;
if (!useUriBase)
{
LookupUri = requestUri.AbsoluteUri;
}
else
{
// schemes are registered as <schemeName>":", so add the separator
// to the string returned from the Uri object
LookupUri = requestUri.Scheme + ':';
}
int LookupLength = LookupUri.Length;
// Copy the prefix list so that if it is updated it will
// not affect us on this thread.
List<WebRequestPrefixElement> prefixList = PrefixList;
// Look for the longest matching prefix.
// Walk down the list of prefixes. The prefixes are kept longest
// first. When we find a prefix that is shorter or the same size
// as this Uri, we'll do a compare to see if they match. If they
// do we'll break out of the loop and call the creator.
for (int i = 0; i < prefixList.Count; i++)
{
Current = prefixList[i];
// See if this prefix is short enough.
if (LookupLength >= Current.Prefix.Length)
{
// It is. See if these match.
if (String.Compare(Current.Prefix,
0,
LookupUri,
0,
Current.Prefix.Length,
StringComparison.OrdinalIgnoreCase) == 0)
{
// These match. Remember that we found it and break
// out.
Found = true;
break;
}
}
}
WebRequest webRequest = null;
if (Found)
{
// We found a match, so just call the creator and return what it does.
webRequest = Current.Creator.Create(requestUri);
if (NetEventSource.IsEnabled) NetEventSource.Exit(null, webRequest);
return webRequest;
}
// Otherwise no match, throw an exception.
if (NetEventSource.IsEnabled) NetEventSource.Exit(null);
throw new NotSupportedException(SR.net_unknown_prefix);
}
// Create - Create a WebRequest.
//
// An overloaded utility version of the real Create that takes a
// string instead of an Uri object.
//
// Input:
// RequestString - Uri string to create.
//
// Returns:
// Newly created WebRequest.
public static WebRequest Create(string requestUriString)
{
if (requestUriString == null)
{
throw new ArgumentNullException(nameof(requestUriString));
}
return Create(new Uri(requestUriString), false);
}
// Create - Create a WebRequest.
//
// Another overloaded version of the Create function that doesn't
// take the UseUriBase parameter.
//
// Input:
// requestUri - Uri object for request.
//
// Returns:
// Newly created WebRequest.
public static WebRequest Create(Uri requestUri)
{
if (requestUri == null)
{
throw new ArgumentNullException(nameof(requestUri));
}
return Create(requestUri, false);
}
// CreateDefault - Create a default WebRequest.
//
// This is the creation routine that creates a default WebRequest.
// We take a Uri object and pass it to the base create routine,
// setting the useUriBase parameter to true.
//
// Input:
// RequestUri - Uri object for request.
//
// Returns:
// Newly created WebRequest.
public static WebRequest CreateDefault(Uri requestUri)
{
if (requestUri == null)
{
throw new ArgumentNullException(nameof(requestUri));
}
return Create(requestUri, true);
}
public static HttpWebRequest CreateHttp(string requestUriString)
{
if (requestUriString == null)
{
throw new ArgumentNullException(nameof(requestUriString));
}
return CreateHttp(new Uri(requestUriString));
}
public static HttpWebRequest CreateHttp(Uri requestUri)
{
if (requestUri == null)
{
throw new ArgumentNullException(nameof(requestUri));
}
if ((requestUri.Scheme != "http") && (requestUri.Scheme != "https"))
{
throw new NotSupportedException(SR.net_unknown_prefix);
}
return (HttpWebRequest)CreateDefault(requestUri);
}
// RegisterPrefix - Register an Uri prefix for creating WebRequests.
//
// This function registers a prefix for creating WebRequests. When an
// user wants to create a WebRequest, we scan a table looking for a
// longest prefix match for the Uri they're passing. We then invoke
// the sub creator for that prefix. This function puts entries in
// that table.
//
// We don't allow duplicate entries, so if there is a dup this call
// will fail.
//
// Input:
// Prefix - Represents Uri prefix being registered.
// Creator - Interface for sub creator.
//
// Returns:
// True if the registration worked, false otherwise.
public static bool RegisterPrefix(string prefix, IWebRequestCreate creator)
{
bool Error = false;
int i;
WebRequestPrefixElement Current;
if (prefix == null)
{
throw new ArgumentNullException(nameof(prefix));
}
if (creator == null)
{
throw new ArgumentNullException(nameof(creator));
}
// Lock this object, then walk down PrefixList looking for a place to
// to insert this prefix.
lock (s_internalSyncObject)
{
// Clone the object and update the clone, thus
// allowing other threads to still read from the original.
List<WebRequestPrefixElement> prefixList = new List<WebRequestPrefixElement>(PrefixList);
// As AbsoluteUri is used later for Create, account for formating changes
// like Unicode escaping, default ports, etc.
Uri tempUri;
if (Uri.TryCreate(prefix, UriKind.Absolute, out tempUri))
{
String cookedUri = tempUri.AbsoluteUri;
// Special case for when a partial host matching is requested, drop the added trailing slash
// IE: http://host could match host or host.domain
if (!prefix.EndsWith("/", StringComparison.Ordinal)
&& tempUri.GetComponents(UriComponents.PathAndQuery | UriComponents.Fragment, UriFormat.UriEscaped)
.Equals("/"))
{
cookedUri = cookedUri.Substring(0, cookedUri.Length - 1);
}
prefix = cookedUri;
}
i = 0;
// The prefix list is sorted with longest entries at the front. We
// walk down the list until we find a prefix shorter than this
// one, then we insert in front of it. Along the way we check
// equal length prefixes to make sure this isn't a dupe.
while (i < prefixList.Count)
{
Current = prefixList[i];
// See if the new one is longer than the one we're looking at.
if (prefix.Length > Current.Prefix.Length)
{
// It is. Break out of the loop here.
break;
}
// If these are of equal length, compare them.
if (prefix.Length == Current.Prefix.Length)
{
// They're the same length.
if (string.Equals(Current.Prefix, prefix, StringComparison.OrdinalIgnoreCase))
{
// ...and the strings are identical. This is an error.
Error = true;
break;
}
}
i++;
}
// When we get here either i contains the index to insert at or
// we've had an error, in which case Error is true.
if (!Error)
{
// No error, so insert.
prefixList.Insert(i, new WebRequestPrefixElement(prefix, creator));
// Assign the clone to the static object. Other threads using it
// will have copied the original object already.
PrefixList = prefixList;
}
}
return !Error;
}
internal class HttpRequestCreator : IWebRequestCreate
{
// Create - Create an HttpWebRequest.
//
// This is our method to create an HttpWebRequest. We register
// for HTTP and HTTPS Uris, and this method is called when a request
// needs to be created for one of those.
//
//
// Input:
// uri - Uri for request being created.
//
// Returns:
// The newly created HttpWebRequest.
public WebRequest Create(Uri Uri)
{
return new HttpWebRequest(Uri);
}
}
// PrefixList - Returns And Initialize our prefix list.
//
//
// This is the method that initializes the prefix list. We create
// an List for the PrefixList, then each of the request creators,
// and then we register them with the associated prefixes.
//
// Returns:
// true
internal static List<WebRequestPrefixElement> PrefixList
{
get
{
// GetConfig() might use us, so we have a circular dependency issue
// that causes us to nest here. We grab the lock only if we haven't
// initialized.
return LazyInitializer.EnsureInitialized(ref s_prefixList, ref s_internalSyncObject, () =>
{
var httpRequestCreator = new HttpRequestCreator();
var ftpRequestCreator = new FtpWebRequestCreator();
var fileRequestCreator = new FileWebRequestCreator();
const int Count = 4;
var prefixList = new List<WebRequestPrefixElement>(Count)
{
new WebRequestPrefixElement("http:", httpRequestCreator),
new WebRequestPrefixElement("https:", httpRequestCreator),
new WebRequestPrefixElement("ftp:", ftpRequestCreator),
new WebRequestPrefixElement("file:", fileRequestCreator),
};
Debug.Assert(prefixList.Count == Count, $"Expected {Count}, got {prefixList.Count}");
return prefixList;
});
}
set
{
Volatile.Write(ref s_prefixList, value);
}
}
public static RequestCachePolicy DefaultCachePolicy { get; set; } = new RequestCachePolicy(RequestCacheLevel.BypassCache);
public virtual RequestCachePolicy CachePolicy { get; set; }
public AuthenticationLevel AuthenticationLevel { get; set; } = AuthenticationLevel.MutualAuthRequested;
public TokenImpersonationLevel ImpersonationLevel { get; set; } = TokenImpersonationLevel.Delegation;
public virtual string ConnectionGroupName
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual string Method
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual Uri RequestUri
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual WebHeaderCollection Headers
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual long ContentLength
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual string ContentType
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual ICredentials Credentials
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual int Timeout
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual bool UseDefaultCredentials
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual Stream GetRequestStream()
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual WebResponse GetResponse()
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual IAsyncResult BeginGetResponse(AsyncCallback callback, object state)
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual WebResponse EndGetResponse(IAsyncResult asyncResult)
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual IAsyncResult BeginGetRequestStream(AsyncCallback callback, object state)
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual Stream EndGetRequestStream(IAsyncResult asyncResult)
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
public virtual Task<Stream> GetRequestStreamAsync()
{
// Offload to a different thread to avoid blocking the caller during request submission.
// We use Task.Run rather than Task.Factory.StartNew even though StartNew would let us pass 'this'
// as a state argument to avoid the closure to capture 'this' and the associated delegate.
// This is because the task needs to call FromAsync and marshal the inner Task out, and
// Task.Run's implementation of this is sufficiently more efficient than what we can do with
// Unwrap() that it's worth it to just rely on Task.Run and accept the closure/delegate.
return Task.Run(() =>
Task<Stream>.Factory.FromAsync(
(callback, state) => ((WebRequest)state).BeginGetRequestStream(callback, state),
iar => ((WebRequest)iar.AsyncState).EndGetRequestStream(iar),
this));
}
public virtual Task<WebResponse> GetResponseAsync()
{
// See comment in GetRequestStreamAsync(). Same logic applies here.
return Task.Run(() =>
Task<WebResponse>.Factory.FromAsync(
(callback, state) => ((WebRequest)state).BeginGetResponse(callback, state),
iar => ((WebRequest)iar.AsyncState).EndGetResponse(iar),
this));
}
public virtual void Abort()
{
throw NotImplemented.ByDesignWithMessage(SR.net_MethodNotImplementedException);
}
// Default Web Proxy implementation.
private static IWebProxy s_DefaultWebProxy;
private static bool s_DefaultWebProxyInitialized;
public static IWebProxy GetSystemWebProxy() => SystemWebProxy.Get();
public static IWebProxy DefaultWebProxy
{
get
{
return LazyInitializer.EnsureInitialized(ref s_DefaultWebProxy, ref s_DefaultWebProxyInitialized, ref s_internalSyncObject, () => SystemWebProxy.Get());
}
set
{
lock (s_internalSyncObject)
{
s_DefaultWebProxy = value;
}
}
}
public virtual bool PreAuthenticate
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
public virtual IWebProxy Proxy
{
get
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
set
{
throw NotImplemented.ByDesignWithMessage(SR.net_PropertyNotImplementedException);
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using osu.Framework.Allocation;
using osu.Framework.Extensions.Color4Extensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Colour;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Cursor;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Graphics.Sprites;
using osu.Framework.Graphics.UserInterface;
using osu.Game.Graphics;
using osu.Game.Graphics.Sprites;
using osu.Game.Graphics.UserInterface;
using osu.Game.Online;
using osu.Game.Online.API;
using osu.Game.Online.Multiplayer;
using osu.Game.Online.Rooms;
using osu.Game.Rulesets;
using osu.Game.Screens.Play.HUD;
using osu.Game.Users;
using osu.Game.Users.Drawables;
using osuTK;
using osuTK.Graphics;
namespace osu.Game.Screens.OnlinePlay.Multiplayer.Participants
{
public class ParticipantPanel : MultiplayerRoomComposite, IHasContextMenu
{
public readonly MultiplayerRoomUser User;
[Resolved]
private IAPIProvider api { get; set; }
[Resolved]
private IRulesetStore rulesets { get; set; }
private SpriteIcon crown;
private OsuSpriteText userRankText;
private ModDisplay userModsDisplay;
private StateDisplay userStateDisplay;
private IconButton kickButton;
public ParticipantPanel(MultiplayerRoomUser user)
{
User = user;
RelativeSizeAxes = Axes.X;
Height = 40;
}
[BackgroundDependencyLoader]
private void load()
{
var user = User.User;
var backgroundColour = Color4Extensions.FromHex("#33413C");
InternalChild = new GridContainer
{
RelativeSizeAxes = Axes.Both,
ColumnDimensions = new[]
{
new Dimension(GridSizeMode.Absolute, 18),
new Dimension(GridSizeMode.AutoSize),
new Dimension(),
new Dimension(GridSizeMode.AutoSize),
},
Content = new[]
{
new Drawable[]
{
crown = new SpriteIcon
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Icon = FontAwesome.Solid.Crown,
Size = new Vector2(14),
Colour = Color4Extensions.FromHex("#F7E65D"),
Alpha = 0
},
new TeamDisplay(User),
new Container
{
RelativeSizeAxes = Axes.Both,
Masking = true,
CornerRadius = 5,
Children = new Drawable[]
{
new Box
{
RelativeSizeAxes = Axes.Both,
Colour = backgroundColour
},
new UserCoverBackground
{
Anchor = Anchor.CentreRight,
Origin = Anchor.CentreRight,
RelativeSizeAxes = Axes.Both,
Width = 0.75f,
User = user,
Colour = ColourInfo.GradientHorizontal(Color4.White.Opacity(0), Color4.White.Opacity(0.25f))
},
new FillFlowContainer
{
RelativeSizeAxes = Axes.Both,
Spacing = new Vector2(10),
Direction = FillDirection.Horizontal,
Children = new Drawable[]
{
new UpdateableAvatar
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
RelativeSizeAxes = Axes.Both,
FillMode = FillMode.Fit,
User = user
},
new UpdateableFlag
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Size = new Vector2(30, 20),
Country = user?.Country
},
new OsuSpriteText
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 18),
Text = user?.Username
},
userRankText = new OsuSpriteText
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Font = OsuFont.GetFont(size: 14),
}
}
},
new Container
{
Anchor = Anchor.CentreRight,
Origin = Anchor.CentreRight,
AutoSizeAxes = Axes.Both,
Margin = new MarginPadding { Right = 70 },
Child = userModsDisplay = new ModDisplay
{
Scale = new Vector2(0.5f),
ExpansionMode = ExpansionMode.AlwaysContracted,
}
},
userStateDisplay = new StateDisplay
{
Anchor = Anchor.CentreRight,
Origin = Anchor.CentreRight,
Margin = new MarginPadding { Right = 10 },
}
}
},
kickButton = new KickButton
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Alpha = 0,
Margin = new MarginPadding(4),
Action = () => Client.KickUser(User.UserID),
},
},
}
};
}
protected override void OnRoomUpdated()
{
base.OnRoomUpdated();
if (Room == null || Client.LocalUser == null)
return;
const double fade_time = 50;
var currentItem = Playlist.GetCurrentItem();
Debug.Assert(currentItem != null);
var ruleset = rulesets.GetRuleset(currentItem.RulesetID)?.CreateInstance();
int? currentModeRank = ruleset != null ? User.User?.RulesetsStatistics?.GetValueOrDefault(ruleset.ShortName)?.GlobalRank : null;
userRankText.Text = currentModeRank != null ? $"#{currentModeRank.Value:N0}" : string.Empty;
userStateDisplay.UpdateStatus(User.State, User.BeatmapAvailability);
if ((User.BeatmapAvailability.State == DownloadState.LocallyAvailable) && (User.State != MultiplayerUserState.Spectating))
userModsDisplay.FadeIn(fade_time);
else
userModsDisplay.FadeOut(fade_time);
if (Client.IsHost && !User.Equals(Client.LocalUser))
kickButton.FadeIn(fade_time);
else
kickButton.FadeOut(fade_time);
if (Room.Host?.Equals(User) == true)
crown.FadeIn(fade_time);
else
crown.FadeOut(fade_time);
// If the mods are updated at the end of the frame, the flow container will skip a reflow cycle: https://github.com/ppy/osu-framework/issues/4187
// This looks particularly jarring here, so re-schedule the update to that start of our frame as a fix.
Schedule(() => userModsDisplay.Current.Value = User.Mods.Select(m => m.ToMod(ruleset)).ToList());
}
public MenuItem[] ContextMenuItems
{
get
{
if (Room == null)
return null;
// If the local user is targetted.
if (User.UserID == api.LocalUser.Value.Id)
return null;
// If the local user is not the host of the room.
if (Room.Host?.UserID != api.LocalUser.Value.Id)
return null;
int targetUser = User.UserID;
return new MenuItem[]
{
new OsuMenuItem("Give host", MenuItemType.Standard, () =>
{
// Ensure the local user is still host.
if (!Client.IsHost)
return;
Client.TransferHost(targetUser);
}),
new OsuMenuItem("Kick", MenuItemType.Destructive, () =>
{
// Ensure the local user is still host.
if (!Client.IsHost)
return;
Client.KickUser(targetUser);
})
};
}
}
public class KickButton : IconButton
{
public KickButton()
{
Icon = FontAwesome.Solid.UserTimes;
TooltipText = "Kick";
}
[BackgroundDependencyLoader]
private void load(OsuColour colours)
{
IconHoverColour = colours.Red;
}
}
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2016 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
using Ds3.Models;
using System;
using System.Net;
namespace Ds3.Calls
{
public class GetDs3TargetReadPreferencesSpectraS3Request : Ds3Request
{
private string _bucketId;
public string BucketId
{
get { return _bucketId; }
set { WithBucketId(value); }
}
private bool? _lastPage;
public bool? LastPage
{
get { return _lastPage; }
set { WithLastPage(value); }
}
private int? _pageLength;
public int? PageLength
{
get { return _pageLength; }
set { WithPageLength(value); }
}
private int? _pageOffset;
public int? PageOffset
{
get { return _pageOffset; }
set { WithPageOffset(value); }
}
private string _pageStartMarker;
public string PageStartMarker
{
get { return _pageStartMarker; }
set { WithPageStartMarker(value); }
}
private TargetReadPreference? _readPreference;
public TargetReadPreference? ReadPreference
{
get { return _readPreference; }
set { WithReadPreference(value); }
}
private string _targetId;
public string TargetId
{
get { return _targetId; }
set { WithTargetId(value); }
}
public GetDs3TargetReadPreferencesSpectraS3Request WithBucketId(Guid? bucketId)
{
this._bucketId = bucketId.ToString();
if (bucketId != null)
{
this.QueryParams.Add("bucket_id", bucketId.ToString());
}
else
{
this.QueryParams.Remove("bucket_id");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithBucketId(string bucketId)
{
this._bucketId = bucketId;
if (bucketId != null)
{
this.QueryParams.Add("bucket_id", bucketId);
}
else
{
this.QueryParams.Remove("bucket_id");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithLastPage(bool? lastPage)
{
this._lastPage = lastPage;
if (lastPage != null)
{
this.QueryParams.Add("last_page", lastPage.ToString());
}
else
{
this.QueryParams.Remove("last_page");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithPageLength(int? pageLength)
{
this._pageLength = pageLength;
if (pageLength != null)
{
this.QueryParams.Add("page_length", pageLength.ToString());
}
else
{
this.QueryParams.Remove("page_length");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithPageOffset(int? pageOffset)
{
this._pageOffset = pageOffset;
if (pageOffset != null)
{
this.QueryParams.Add("page_offset", pageOffset.ToString());
}
else
{
this.QueryParams.Remove("page_offset");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithPageStartMarker(Guid? pageStartMarker)
{
this._pageStartMarker = pageStartMarker.ToString();
if (pageStartMarker != null)
{
this.QueryParams.Add("page_start_marker", pageStartMarker.ToString());
}
else
{
this.QueryParams.Remove("page_start_marker");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithPageStartMarker(string pageStartMarker)
{
this._pageStartMarker = pageStartMarker;
if (pageStartMarker != null)
{
this.QueryParams.Add("page_start_marker", pageStartMarker);
}
else
{
this.QueryParams.Remove("page_start_marker");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithReadPreference(TargetReadPreference? readPreference)
{
this._readPreference = readPreference;
if (readPreference != null)
{
this.QueryParams.Add("read_preference", readPreference.ToString());
}
else
{
this.QueryParams.Remove("read_preference");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithTargetId(Guid? targetId)
{
this._targetId = targetId.ToString();
if (targetId != null)
{
this.QueryParams.Add("target_id", targetId.ToString());
}
else
{
this.QueryParams.Remove("target_id");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request WithTargetId(string targetId)
{
this._targetId = targetId;
if (targetId != null)
{
this.QueryParams.Add("target_id", targetId);
}
else
{
this.QueryParams.Remove("target_id");
}
return this;
}
public GetDs3TargetReadPreferencesSpectraS3Request()
{
}
internal override HttpVerb Verb
{
get
{
return HttpVerb.GET;
}
}
internal override string Path
{
get
{
return "/_rest_/ds3_target_read_preference";
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace CancerLabWeb.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void ShiftLeftLogicalUInt641()
{
var test = new ImmUnaryOpTest__ShiftLeftLogicalUInt641();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__ShiftLeftLogicalUInt641
{
private struct TestStruct
{
public Vector256<UInt64> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref testStruct._fld), ref Unsafe.As<UInt64, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__ShiftLeftLogicalUInt641 testClass)
{
var result = Avx2.ShiftLeftLogical(_fld, 1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<UInt64>>() / sizeof(UInt64);
private static UInt64[] _data = new UInt64[Op1ElementCount];
private static Vector256<UInt64> _clsVar;
private Vector256<UInt64> _fld;
private SimpleUnaryOpTest__DataTable<UInt64, UInt64> _dataTable;
static ImmUnaryOpTest__ShiftLeftLogicalUInt641()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _clsVar), ref Unsafe.As<UInt64, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
}
public ImmUnaryOpTest__ShiftLeftLogicalUInt641()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<UInt64>, byte>(ref _fld), ref Unsafe.As<UInt64, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetUInt64(); }
_dataTable = new SimpleUnaryOpTest__DataTable<UInt64, UInt64>(_data, new UInt64[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx2.ShiftLeftLogical(
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArrayPtr),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx2.ShiftLeftLogical(
Avx.LoadVector256((UInt64*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx2.ShiftLeftLogical(
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx2).GetMethod(nameof(Avx2.ShiftLeftLogical), new Type[] { typeof(Vector256<UInt64>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<UInt64>>(_dataTable.inArrayPtr),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx2).GetMethod(nameof(Avx2.ShiftLeftLogical), new Type[] { typeof(Vector256<UInt64>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadVector256((UInt64*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx2).GetMethod(nameof(Avx2.ShiftLeftLogical), new Type[] { typeof(Vector256<UInt64>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<UInt64>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx2.ShiftLeftLogical(
_clsVar,
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector256<UInt64>>(_dataTable.inArrayPtr);
var result = Avx2.ShiftLeftLogical(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = Avx.LoadVector256((UInt64*)(_dataTable.inArrayPtr));
var result = Avx2.ShiftLeftLogical(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var firstOp = Avx.LoadAlignedVector256((UInt64*)(_dataTable.inArrayPtr));
var result = Avx2.ShiftLeftLogical(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__ShiftLeftLogicalUInt641();
var result = Avx2.ShiftLeftLogical(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx2.ShiftLeftLogical(_fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx2.ShiftLeftLogical(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<UInt64> firstOp, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<UInt64>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(UInt64[] firstOp, UInt64[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if ((ulong)(firstOp[0] << 1) != result[0])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((ulong)(firstOp[i] << 1) != result[i])
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx2)}.{nameof(Avx2.ShiftLeftLogical)}<UInt64>(Vector256<UInt64><9>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
//
// PreviewPopup.cs
//
// Author:
// Ruben Vermeersch <ruben@savanne.be>
// Larry Ewing <lewing@novell.com>
//
// Copyright (C) 2004-2010 Novell, Inc.
// Copyright (C) 2008, 2010 Ruben Vermeersch
// Copyright (C) 2004-2006 Larry Ewing
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Cairo;
using Gdk;
using FSpot.Core;
using FSpot.Widgets;
using FSpot.Utils;
using FSpot.Gui;
namespace FSpot {
public class PreviewPopup : Gtk.Window {
private CollectionGridView view;
private Gtk.Image image;
private Gtk.Label label;
private bool show_histogram;
public bool ShowHistogram {
get {
return show_histogram;
}
set {
if (value != show_histogram)
item = -1;
show_histogram = value;
}
}
private FSpot.Histogram hist;
private DisposableCache<string, Pixbuf> preview_cache = new DisposableCache<string, Pixbuf> (50);
private int item = -1;
public new int Item {
get {
return item;
}
set {
if (value != item) {
item = value;
UpdateImage ();
}
UpdatePosition ();
}
}
private void AddHistogram (Gdk.Pixbuf pixbuf)
{
if (show_histogram) {
Gdk.Pixbuf image = hist.Generate (pixbuf);
double scalex = 0.5;
double scaley = 0.5;
int width = (int)(image.Width * scalex);
int height = (int)(image.Height * scaley);
image.Composite (pixbuf,
pixbuf.Width - width - 10, pixbuf.Height - height - 10,
width, height,
pixbuf.Width - width - 10, pixbuf.Height - height - 10,
scalex, scaley,
Gdk.InterpType.Bilinear, 200);
}
}
protected override void OnRealized ()
{
bool composited = CompositeUtils.IsComposited (Screen) && CompositeUtils.SetRgbaColormap (this);
AppPaintable = composited;
base.OnRealized ();
}
protected override bool OnExposeEvent (Gdk.EventExpose args)
{
int round = 12;
Context g = Gdk.CairoHelper.Create (GdkWindow);
g.Operator = Operator.Source;
g.Source = new SolidPattern (new Cairo.Color (0, 0, 0, 0));
g.Paint ();
g.Operator = Operator.Over;
g.Source = new SolidPattern (new Cairo.Color (0, 0, 0, .7));
g.MoveTo (round, 0);
//g.LineTo (Allocation.Width - round, 0);
g.Arc (Allocation.Width - round, round, round, - Math.PI * 0.5, 0);
//g.LineTo (Allocation.Width, Allocation.Height - round);
g.Arc (Allocation.Width - round, Allocation.Height - round, round, 0, Math.PI * 0.5);
//g.LineTo (round, Allocation.Height);
g.Arc (round, Allocation.Height - round, round, Math.PI * 0.5, Math.PI);
g.Arc (round, round, round, Math.PI, Math.PI * 1.5);
g.ClosePath ();
g.Fill ();
((IDisposable)g).Dispose ();
return base.OnExposeEvent (args);
}
private void UpdateImage ()
{
IPhoto item = view.Collection [Item];
string orig_path = item.DefaultVersion.Uri.LocalPath;
Gdk.Pixbuf pixbuf = FSpot.Utils.PixbufUtils.ShallowCopy (preview_cache.Get (orig_path + show_histogram.ToString ()));
if (pixbuf == null) {
// A bizarre pixbuf = hack to try to deal with cinematic displays, etc.
int preview_size = ((this.Screen.Width + this.Screen.Height)/2)/3;
try {
pixbuf = FSpot.PhotoLoader.LoadAtMaxSize (item, preview_size, preview_size);
} catch (Exception) {
pixbuf = null;
}
if (pixbuf != null) {
preview_cache.Add (orig_path + show_histogram.ToString (), pixbuf);
AddHistogram (pixbuf);
image.Pixbuf = pixbuf;
} else {
image.Pixbuf = PixbufUtils.ErrorPixbuf;
}
} else {
image.Pixbuf = pixbuf;
pixbuf.Dispose ();
}
string desc = String.Empty;
if (item.Description != null && item.Description.Length > 0)
desc = item.Description + Environment.NewLine;
desc += item.Time.ToString () + " " + item.Name;
label.Text = desc;
}
private void UpdatePosition ()
{
int x, y;
Gdk.Rectangle bounds = view.CellBounds (this.Item);
Gtk.Requisition requisition = this.SizeRequest ();
this.Resize (requisition.Width, requisition.Height);
view.GdkWindow.GetOrigin (out x, out y);
// Acount for scrolling
bounds.X -= (int)view.Hadjustment.Value;
bounds.Y -= (int)view.Vadjustment.Value;
// calculate the cell center
x += bounds.X + (bounds.Width / 2);
y += bounds.Y + (bounds.Height / 2);
// find the window's x location limiting it to the screen
x = Math.Max (0, x - requisition.Width / 2);
x = Math.Min (x, this.Screen.Width - requisition.Width);
// find the window's y location offset above or below depending on space
y = Math.Max (0, y - requisition.Height / 2);
y = Math.Min (y, this.Screen.Height - requisition.Height);
this.Move (x, y);
}
private void UpdateItem (int x, int y)
{
int item = view.CellAtPosition (x, y);
if (item >= 0) {
this.Item = item;
Show ();
} else {
this.Hide ();
}
}
private void UpdateItem ()
{
int x, y;
view.GetPointer (out x, out y);
x += (int) view.Hadjustment.Value;
y += (int) view.Vadjustment.Value;
UpdateItem (x, y);
}
private void HandleIconViewMotion (object sender, Gtk.MotionNotifyEventArgs args)
{
if (!this.Visible)
return;
int x = (int) args.Event.X;
int y = (int) args.Event.Y;
view.GrabFocus ();
UpdateItem (x, y);
}
private void HandleIconViewKeyPress (object sender, Gtk.KeyPressEventArgs args)
{
switch (args.Event.Key) {
case Gdk.Key.v:
ShowHistogram = false;
UpdateItem ();
args.RetVal = true;
break;
case Gdk.Key.V:
ShowHistogram = true;
UpdateItem ();
args.RetVal = true;
break;
}
}
private void HandleKeyRelease (object sender, Gtk.KeyReleaseEventArgs args)
{
switch (args.Event.Key) {
case Gdk.Key.v:
case Gdk.Key.V:
case Gdk.Key.h:
this.Hide ();
break;
}
}
private void HandleButtonPress (object sender, Gtk.ButtonPressEventArgs args)
{
this.Hide ();
}
private void HandleIconViewDestroy (object sender, Gtk.DestroyEventArgs args)
{
this.Destroy ();
}
private void HandleDestroyed (object sender, System.EventArgs args)
{
this.preview_cache.Dispose ();
}
protected override bool OnMotionNotifyEvent (Gdk.EventMotion args)
{
//
// We look for motion events on the popup window so that
// if the pointer manages to get over the window we can
// Update the image properly and/or get out of the way.
//
UpdateItem ();
return false;
}
public PreviewPopup (SelectionCollectionGridView view) : base (Gtk.WindowType.Toplevel)
{
Gtk.VBox vbox = new Gtk.VBox ();
this.Add (vbox);
this.AddEvents ((int) (Gdk.EventMask.PointerMotionMask |
Gdk.EventMask.KeyReleaseMask |
Gdk.EventMask.ButtonPressMask));
this.Decorated = false;
this.SkipTaskbarHint = true;
this.SkipPagerHint = true;
this.SetPosition (Gtk.WindowPosition.None);
this.KeyReleaseEvent += HandleKeyRelease;
this.ButtonPressEvent += HandleButtonPress;
this.Destroyed += HandleDestroyed;
this.view = view;
view.MotionNotifyEvent += HandleIconViewMotion;
view.KeyPressEvent += HandleIconViewKeyPress;
view.KeyReleaseEvent += HandleKeyRelease;
view.DestroyEvent += HandleIconViewDestroy;
this.BorderWidth = 6;
hist = new FSpot.Histogram ();
hist.RedColorHint = 127;
hist.GreenColorHint = 127;
hist.BlueColorHint = 127;
hist.BackgroundColorHint = 0xff;
image = new Gtk.Image ();
image.CanFocus = false;
label = new Gtk.Label (String.Empty);
label.CanFocus = false;
label.ModifyFg (Gtk.StateType.Normal, new Gdk.Color (127, 127, 127));
label.ModifyBg (Gtk.StateType.Normal, new Gdk.Color (0, 0, 0));
this.ModifyFg (Gtk.StateType.Normal, new Gdk.Color (127, 127, 127));
this.ModifyBg (Gtk.StateType.Normal, new Gdk.Color (0, 0, 0));
vbox.PackStart (image, true, true, 0);
vbox.PackStart (label, true, false, 0);
vbox.ShowAll ();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.