context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// ***********************************************************************
// Copyright (c) 2009-2015 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal;
using NUnit.TestData.TestCaseSourceAttributeFixture;
using NUnit.TestUtilities;
namespace NUnit.Framework.Attributes
{
[TestFixture]
public class TestCaseSourceTests
{
[Test, TestCaseSource("StaticProperty")]
public void SourceCanBeStaticProperty(string source)
{
Assert.AreEqual("StaticProperty", source);
}
static IEnumerable StaticProperty
{
get { return new object[] { new object[] { "StaticProperty" } }; }
}
[Test]
public void SourceUsingInstancePropertyIsNotRunnable()
{
var result = TestBuilder.RunParameterizedMethodSuite(typeof(TestCaseSourceAttributeFixture), "MethodWithInstancePropertyAsSource");
Assert.AreEqual(result.Children[0].ResultState, ResultState.NotRunnable);
}
[Test, TestCaseSource("StaticMethod")]
public void SourceCanBeStaticMethod(string source)
{
Assert.AreEqual("StaticMethod", source);
}
static IEnumerable StaticMethod()
{
return new object[] { new object[] { "StaticMethod" } };
}
[Test]
public void SourceUsingInstanceMethodIsNotRunnable()
{
var result = TestBuilder.RunParameterizedMethodSuite(typeof(TestCaseSourceAttributeFixture), "MethodWithInstanceMethodAsSource");
Assert.AreEqual(result.Children[0].ResultState, ResultState.NotRunnable);
}
IEnumerable InstanceMethod()
{
return new object[] { new object[] { "InstanceMethod" } };
}
[Test, TestCaseSource("StaticField")]
public void SourceCanBeStaticField(string source)
{
Assert.AreEqual("StaticField", source);
}
static object[] StaticField =
{ new object[] { "StaticField" } };
[Test]
public void SourceUsingInstanceFieldIsNotRunnable()
{
var result = TestBuilder.RunParameterizedMethodSuite(typeof(TestCaseSourceAttributeFixture), "MethodWithInstanceFieldAsSource");
Assert.AreEqual(result.Children[0].ResultState, ResultState.NotRunnable);
}
[Test, TestCaseSource(typeof(DataSourceClass))]
public void SourceCanBeInstanceOfIEnumerable(string source)
{
Assert.AreEqual("DataSourceClass", source);
}
class DataSourceClass : IEnumerable
{
public DataSourceClass()
{
}
public IEnumerator GetEnumerator()
{
yield return "DataSourceClass";
}
}
[Test, TestCaseSource("MyData")]
public void SourceMayReturnArgumentsAsObjectArray(int n, int d, int q)
{
Assert.AreEqual(q, n / d);
}
[TestCaseSource("MyData")]
public void TestAttributeIsOptional(int n, int d, int q)
{
Assert.AreEqual(q, n / d);
}
[Test, TestCaseSource("MyIntData")]
public void SourceMayReturnArgumentsAsIntArray(int n, int d, int q)
{
Assert.AreEqual(q, n / d);
}
[Test, TestCaseSource("EvenNumbers")]
public void SourceMayReturnSinglePrimitiveArgumentAlone(int n)
{
Assert.AreEqual(0, n % 2);
}
[Test, TestCaseSource("Params")]
public int SourceMayReturnArgumentsAsParamSet(int n, int d)
{
return n / d;
}
[Test]
[TestCaseSource("MyData")]
[TestCaseSource("MoreData", Category="Extra")]
[TestCase(12, 2, 6)]
public void TestMayUseMultipleSourceAttributes(int n, int d, int q)
{
Assert.AreEqual(q, n / d);
}
[Test, TestCaseSource("FourArgs")]
public void TestWithFourArguments(int n, int d, int q, int r)
{
Assert.AreEqual(q, n / d);
Assert.AreEqual(r, n % d);
}
[Test, Category("Top"), TestCaseSource(typeof(DivideDataProvider), "HereIsTheData")]
public void SourceMayBeInAnotherClass(int n, int d, int q)
{
Assert.AreEqual(q, n / d);
}
[Test, TestCaseSource(typeof(DivideDataProviderWithReturnValue), "TestCases")]
public int SourceMayBeInAnotherClassWithReturn(int n, int d)
{
return n / d;
}
[Test]
public void IgnoreTakesPrecedenceOverExpectedException()
{
ITestResult result = TestBuilder.RunParameterizedMethodSuite(
typeof(TestCaseSourceAttributeFixture), "MethodCallsIgnore").Children[0];
Assert.AreEqual(ResultState.Ignored, result.ResultState);
Assert.AreEqual("Ignore this", result.Message);
}
[Test]
public void CanIgnoreIndividualTestCases()
{
TestSuite suite = TestBuilder.MakeParameterizedMethodSuite(
typeof(TestCaseSourceAttributeFixture), "MethodWithIgnoredTestCases");
Test testCase = TestFinder.Find("MethodWithIgnoredTestCases(1)", suite, false);
Assert.That(testCase.RunState, Is.EqualTo(RunState.Runnable));
testCase = TestFinder.Find("MethodWithIgnoredTestCases(2)", suite, false);
Assert.That(testCase.RunState, Is.EqualTo(RunState.Ignored));
Assert.That(testCase.Properties.Get(PropertyNames.SkipReason), Is.EqualTo("Don't Run Me!"));
}
[Test]
public void CanMarkIndividualTestCasesExplicit()
{
TestSuite suite = TestBuilder.MakeParameterizedMethodSuite(
typeof(TestCaseSourceAttributeFixture), "MethodWithExplicitTestCases");
Test testCase = TestFinder.Find("MethodWithExplicitTestCases(1)", suite, false);
Assert.That(testCase.RunState, Is.EqualTo(RunState.Runnable));
testCase = TestFinder.Find("MethodWithExplicitTestCases(2)", suite, false);
Assert.That(testCase.RunState, Is.EqualTo(RunState.Explicit));
testCase = TestFinder.Find("MethodWithExplicitTestCases(3)", suite, false);
Assert.That(testCase.RunState, Is.EqualTo(RunState.Explicit));
Assert.That(testCase.Properties.Get(PropertyNames.SkipReason), Is.EqualTo("Connection failing"));
}
[Test]
public void HandlesExceptionInTestCaseSource()
{
var testMethod = (TestMethod)TestBuilder.MakeParameterizedMethodSuite(
typeof(TestCaseSourceAttributeFixture), "MethodWithSourceThrowingException").Tests[0];
Assert.AreEqual(RunState.NotRunnable, testMethod.RunState);
ITestResult result = TestBuilder.RunTest(testMethod, null);
Assert.AreEqual(ResultState.NotRunnable, result.ResultState);
Assert.AreEqual("System.Exception : my message", result.Message);
}
[TestCaseSource("exception_source"), Explicit]
public void HandlesExceptionInTestCaseSource_GuiDisplay(string lhs, string rhs)
{
Assert.AreEqual(lhs, rhs);
}
static object[] testCases =
{
new TestCaseData(
new string[] { "A" },
new string[] { "B" })
};
[Test, TestCaseSource("testCases")]
public void MethodTakingTwoStringArrays(string[] a, string[] b)
{
Assert.That(a, Is.TypeOf(typeof(string[])));
Assert.That(b, Is.TypeOf(typeof(string[])));
}
#region Sources used by the tests
static object[] MyData = new object[] {
new object[] { 12, 3, 4 },
new object[] { 12, 4, 3 },
new object[] { 12, 6, 2 } };
static object[] MyIntData = new object[] {
new int[] { 12, 3, 4 },
new int[] { 12, 4, 3 },
new int[] { 12, 6, 2 } };
static object[] FourArgs = new object[] {
new TestCaseData( 12, 3, 4, 0 ),
new TestCaseData( 12, 4, 3, 0 ),
new TestCaseData( 12, 5, 2, 2 ) };
static int[] EvenNumbers = new int[] { 2, 4, 6, 8 };
static object[] MoreData = new object[] {
new object[] { 12, 1, 12 },
new object[] { 12, 2, 6 } };
static object[] Params = new object[] {
new TestCaseData(24, 3).Returns(8),
new TestCaseData(24, 2).Returns(12) };
private class DivideDataProvider
{
public static IEnumerable HereIsTheData
{
get
{
//yield return new TestCaseData(0, 0, 0)
// .SetName("ThisOneShouldThrow")
// .SetDescription("Demonstrates use of ExpectedException")
// .SetCategory("Junk")
// .SetProperty("MyProp", "zip")
// .Throws(typeof(System.DivideByZeroException));
yield return new object[] { 100, 20, 5 };
yield return new object[] { 100, 4, 25 };
}
}
}
public class DivideDataProviderWithReturnValue
{
public static IEnumerable TestCases
{
get
{
return new object[] {
new TestCaseData(12, 3).Returns(4).SetName("TC1"),
new TestCaseData(12, 2).Returns(6).SetName("TC2"),
new TestCaseData(12, 4).Returns(3).SetName("TC3")
};
}
}
}
private static IEnumerable exception_source
{
get
{
yield return new TestCaseData("a", "a");
yield return new TestCaseData("b", "b");
throw new System.Exception("my message");
}
}
#endregion
}
}
| |
/**************************************************************************\
Copyright Microsoft Corporation. All Rights Reserved.
\**************************************************************************/
namespace Standard
{
using System;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Security;
using System.Text;
using MS.Internal.Interop;
using FILETIME = System.Runtime.InteropServices.ComTypes.FILETIME;
#region Enums and Static Property Classes
/// <summary>ShellItem attribute flags. SIATTRIBFLAGS_*</summary>
internal enum SIATTRIBFLAGS
{
AND = 0x00000001,
OR = 0x00000002,
APPCOMPAT = 0x00000003,
}
internal enum APPDOCLISTTYPE
{
ADLT_RECENT = 0, // The recently used documents list
ADLT_FREQUENT, // The frequently used documents list
}
/// <summary>
/// Flags for SetTabProperties. STPF_*
/// </summary>
/// <remarks>The native enum was called STPFLAG.</remarks>
[Flags]
internal enum STPF
{
NONE = 0x00000000,
USEAPPTHUMBNAILALWAYS = 0x00000001,
USEAPPTHUMBNAILWHENACTIVE = 0x00000002,
USEAPPPEEKALWAYS = 0x00000004,
USEAPPPEEKWHENACTIVE = 0x00000008,
}
/// <summary>
/// Flags for Setting Taskbar Progress state. TBPF_*
/// </summary>
/// <remarks>
/// The native enum was called TBPFLAG.
/// </remarks>
internal enum TBPF
{
NOPROGRESS = 0x00000000,
INDETERMINATE = 0x00000001,
NORMAL = 0x00000002,
ERROR = 0x00000004,
PAUSED = 0x00000008,
}
/// <summary>
/// THUMBBUTTON mask. THB_*
/// </summary>
[Flags]
internal enum THB : uint
{
BITMAP = 0x0001,
ICON = 0x0002,
TOOLTIP = 0x0004,
FLAGS = 0x0008,
}
/// <summary>
/// THUMBBUTTON flags. THBF_*
/// </summary>
[Flags]
internal enum THBF : uint
{
ENABLED = 0x0000,
DISABLED = 0x0001,
DISMISSONCLICK = 0x0002,
NOBACKGROUND = 0x0004,
HIDDEN = 0x0008,
// Added post-beta
NONINTERACTIVE = 0x0010,
}
/// <summary>
/// GetPropertyStoreFlags. GPS_*.
/// </summary>
/// <remarks>
/// These are new for Vista, but are used in downlevel components
/// </remarks>
internal enum GPS
{
// If no flags are specified (GPS_DEFAULT), a read-only property store is returned that includes properties for the file or item.
// In the case that the shell item is a file, the property store contains:
// 1. properties about the file from the file system
// 2. properties from the file itself provided by the file's property handler, unless that file is offline,
// see GPS_OPENSLOWITEM
// 3. if requested by the file's property handler and supported by the file system, properties stored in the
// alternate property store.
//
// Non-file shell items should return a similar read-only store
//
// Specifying other GPS_ flags modifies the store that is returned
DEFAULT = 0x00000000,
HANDLERPROPERTIESONLY = 0x00000001, // only include properties directly from the file's property handler
READWRITE = 0x00000002, // Writable stores will only include handler properties
TEMPORARY = 0x00000004, // A read/write store that only holds properties for the lifetime of the IShellItem object
FASTPROPERTIESONLY = 0x00000008, // do not include any properties from the file's property handler (because the file's property handler will hit the disk)
OPENSLOWITEM = 0x00000010, // include properties from a file's property handler, even if it means retrieving the file from offline storage.
DELAYCREATION = 0x00000020, // delay the creation of the file's property handler until those properties are read, written, or enumerated
BESTEFFORT = 0x00000040, // For readonly stores, succeed and return all available properties, even if one or more sources of properties fails. Not valid with GPS_READWRITE.
NO_OPLOCK = 0x00000080, // some data sources protect the read property store with an oplock, this disables that
MASK_VALID = 0x000000FF,
}
/// <summary>
/// KNOWNDESTCATEGORY. KDC_*
/// </summary>
internal enum KDC
{
FREQUENT = 1,
RECENT,
}
// IShellFolder::GetAttributesOf flags
[Flags]
internal enum SFGAO : uint
{
/// <summary>Objects can be copied</summary>
/// <remarks>DROPEFFECT_COPY</remarks>
CANCOPY = 0x1,
/// <summary>Objects can be moved</summary>
/// <remarks>DROPEFFECT_MOVE</remarks>
CANMOVE = 0x2,
/// <summary>Objects can be linked</summary>
/// <remarks>
/// DROPEFFECT_LINK.
///
/// If this bit is set on an item in the shell folder, a
/// 'Create Shortcut' menu item will be added to the File
/// menu and context menus for the item. If the user selects
/// that command, your IContextMenu::InvokeCommand() will be called
/// with 'link'.
/// That flag will also be used to determine if 'Create Shortcut'
/// should be added when the item in your folder is dragged to another
/// folder.
/// </remarks>
CANLINK = 0x4,
/// <summary>supports BindToObject(IID_IStorage)</summary>
STORAGE = 0x00000008,
/// <summary>Objects can be renamed</summary>
CANRENAME = 0x00000010,
/// <summary>Objects can be deleted</summary>
CANDELETE = 0x00000020,
/// <summary>Objects have property sheets</summary>
HASPROPSHEET = 0x00000040,
// unused = 0x00000080,
/// <summary>Objects are drop target</summary>
DROPTARGET = 0x00000100,
CAPABILITYMASK = 0x00000177,
// unused = 0x00000200,
// unused = 0x00000400,
// unused = 0x00000800,
// unused = 0x00001000,
/// <summary>Object is encrypted (use alt color)</summary>
ENCRYPTED = 0x00002000,
/// <summary>'Slow' object</summary>
ISSLOW = 0x00004000,
/// <summary>Ghosted icon</summary>
GHOSTED = 0x00008000,
/// <summary>Shortcut (link)</summary>
LINK = 0x00010000,
/// <summary>Shared</summary>
SHARE = 0x00020000,
/// <summary>Read-only</summary>
READONLY = 0x00040000,
/// <summary> Hidden object</summary>
HIDDEN = 0x00080000,
DISPLAYATTRMASK = 0x000FC000,
/// <summary> May contain children with SFGAO_FILESYSTEM</summary>
FILESYSANCESTOR = 0x10000000,
/// <summary>Support BindToObject(IID_IShellFolder)</summary>
FOLDER = 0x20000000,
/// <summary>Is a win32 file system object (file/folder/root)</summary>
FILESYSTEM = 0x40000000,
/// <summary>May contain children with SFGAO_FOLDER (may be slow)</summary>
HASSUBFOLDER = 0x80000000,
CONTENTSMASK = 0x80000000,
/// <summary>Invalidate cached information (may be slow)</summary>
VALIDATE = 0x01000000,
/// <summary>Is this removeable media?</summary>
REMOVABLE = 0x02000000,
/// <summary> Object is compressed (use alt color)</summary>
COMPRESSED = 0x04000000,
/// <summary>Supports IShellFolder, but only implements CreateViewObject() (non-folder view)</summary>
BROWSABLE = 0x08000000,
/// <summary>Is a non-enumerated object (should be hidden)</summary>
NONENUMERATED = 0x00100000,
/// <summary>Should show bold in explorer tree</summary>
NEWCONTENT = 0x00200000,
/// <summary>Obsolete</summary>
CANMONIKER = 0x00400000,
/// <summary>Obsolete</summary>
HASSTORAGE = 0x00400000,
/// <summary>Supports BindToObject(IID_IStream)</summary>
STREAM = 0x00400000,
/// <summary>May contain children with SFGAO_STORAGE or SFGAO_STREAM</summary>
STORAGEANCESTOR = 0x00800000,
/// <summary>For determining storage capabilities, ie for open/save semantics</summary>
STORAGECAPMASK = 0x70C50008,
/// <summary>
/// Attributes that are masked out for PKEY_SFGAOFlags because they are considered
/// to cause slow calculations or lack context
/// (SFGAO_VALIDATE | SFGAO_ISSLOW | SFGAO_HASSUBFOLDER and others)
/// </summary>
PKEYSFGAOMASK = 0x81044000,
}
/// <summary>
/// IShellFolder::EnumObjects grfFlags bits. Also called SHCONT
/// </summary>
internal enum SHCONTF
{
CHECKING_FOR_CHILDREN = 0x0010, // hint that client is checking if (what) child items the folder contains - not all details (e.g. short file name) are needed
FOLDERS = 0x0020, // only want folders enumerated (SFGAO_FOLDER)
NONFOLDERS = 0x0040, // include non folders (items without SFGAO_FOLDER)
INCLUDEHIDDEN = 0x0080, // show items normally hidden (items with SFGAO_HIDDEN)
INIT_ON_FIRST_NEXT = 0x0100, // DEFUNCT - this is always assumed
NETPRINTERSRCH = 0x0200, // hint that client is looking for printers
SHAREABLE = 0x0400, // hint that client is looking sharable resources (local drives or hidden root shares)
STORAGE = 0x0800, // include all items with accessible storage and their ancestors
NAVIGATION_ENUM = 0x1000, // mark child folders to indicate that they should provide a "navigation" enumeration by default
FASTITEMS = 0x2000, // hint that client is only interested in items that can be enumerated quickly
FLATLIST = 0x4000, // enumerate items as flat list even if folder is stacked
ENABLE_ASYNC = 0x8000, // inform enumerator that client is listening for change notifications so enumerator does not need to be complete, items can be reported via change notifications
}
/// <summary>
/// IShellFolder::GetDisplayNameOf/SetNameOf uFlags. Also called SHGDNF.
/// </summary>
/// <remarks>
/// For compatibility with SIGDN, these bits must all sit in the LOW word.
/// </remarks>
[Flags]
internal enum SHGDN
{
SHGDN_NORMAL = 0x0000, // default (display purpose)
SHGDN_INFOLDER = 0x0001, // displayed under a folder (relative)
SHGDN_FOREDITING = 0x1000, // for in-place editing
SHGDN_FORADDRESSBAR = 0x4000, // UI friendly parsing name (remove ugly stuff)
SHGDN_FORPARSING = 0x8000, // parsing name for ParseDisplayName()
}
/// <summary>
/// SHELLITEMCOMPAREHINTF. SICHINT_*.
/// </summary>
internal enum SICHINT : uint
{
/// <summary>iOrder based on display in a folder view</summary>
DISPLAY = 0x00000000,
/// <summary>exact instance compare</summary>
ALLFIELDS = 0x80000000,
/// <summary>iOrder based on canonical name (better performance)</summary>
CANONICAL = 0x10000000,
TEST_FILESYSPATH_IF_NOT_EQUAL = 0x20000000,
};
/// <summary>
/// ShellItem enum. SIGDN_*.
/// </summary>
internal enum SIGDN : uint
{ // lower word (& with 0xFFFF)
NORMALDISPLAY = 0x00000000, // SHGDN_NORMAL
PARENTRELATIVEPARSING = 0x80018001, // SHGDN_INFOLDER | SHGDN_FORPARSING
DESKTOPABSOLUTEPARSING = 0x80028000, // SHGDN_FORPARSING
PARENTRELATIVEEDITING = 0x80031001, // SHGDN_INFOLDER | SHGDN_FOREDITING
DESKTOPABSOLUTEEDITING = 0x8004c000, // SHGDN_FORPARSING | SHGDN_FORADDRESSBAR
FILESYSPATH = 0x80058000, // SHGDN_FORPARSING
URL = 0x80068000, // SHGDN_FORPARSING
PARENTRELATIVEFORADDRESSBAR = 0x8007c001, // SHGDN_INFOLDER | SHGDN_FORPARSING | SHGDN_FORADDRESSBAR
PARENTRELATIVE = 0x80080001, // SHGDN_INFOLDER
}
/// <summary>
/// STR_GPS_*
/// </summary>
/// <remarks>
/// When requesting a property store through IShellFolder, you can specify the equivalent of
/// GPS_DEFAULT by passing in a null IBindCtx parameter.
///
/// You can specify the equivalent of GPS_READWRITE by passing a mode of STGM_READWRITE | STGM_EXCLUSIVE
/// in the bind context
///
/// Here are the string versions of GPS_ flags, passed to IShellFolder::BindToObject() via IBindCtx::RegisterObjectParam()
/// These flags are valid when requesting an IPropertySetStorage or IPropertyStore handler
///
/// The meaning of these flags are described above.
///
/// There is no STR_ equivalent for GPS_TEMPORARY because temporary property stores
/// are provided by IShellItem2 only -- not by the underlying IShellFolder.
/// </remarks>
internal static class STR_GPS
{
public const string HANDLERPROPERTIESONLY = "GPS_HANDLERPROPERTIESONLY";
public const string FASTPROPERTIESONLY = "GPS_FASTPROPERTIESONLY";
public const string OPENSLOWITEM = "GPS_OPENSLOWITEM";
public const string DELAYCREATION = "GPS_DELAYCREATION";
public const string BESTEFFORT = "GPS_BESTEFFORT";
public const string NO_OPLOCK = "GPS_NO_OPLOCK";
}
#endregion
#region Structs
[StructLayout(LayoutKind.Sequential, Pack = 8, CharSet = CharSet.Unicode)]
internal struct THUMBBUTTON
{
/// <summary>
/// WPARAM value for a THUMBBUTTON being clicked.
/// </summary>
public const int THBN_CLICKED = 0x1800;
public THB dwMask;
public uint iId;
public uint iBitmap;
public IntPtr hIcon;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 260)]
public string szTip;
public THBF dwFlags;
}
[StructLayout(LayoutKind.Sequential, Pack = 4)]
internal struct PKEY
{
/// <summary>fmtid</summary>
private readonly Guid _fmtid;
/// <summary>pid</summary>
private readonly uint _pid;
public PKEY(Guid fmtid, uint pid)
{
_fmtid = fmtid;
_pid = pid;
}
/// <summary>PKEY_Title</summary>
public static readonly PKEY Title = new PKEY(new Guid("F29F85E0-4FF9-1068-AB91-08002B27B3D9"), 2);
/// <summary>PKEY_AppUserModel_ID</summary>
public static readonly PKEY AppUserModel_ID = new PKEY(new Guid("9F4C2855-9F79-4B39-A8D0-E1D42DE1D5F3"), 5);
/// <summary>PKEY_AppUserModel_IsDestListSeparator</summary>
public static readonly PKEY AppUserModel_IsDestListSeparator = new PKEY(new Guid("9F4C2855-9F79-4B39-A8D0-E1D42DE1D5F3"), 6);
/// <summary>PKEY_AppUserModel_RelaunchCommand</summary>
public static readonly PKEY AppUserModel_RelaunchCommand = new PKEY(new Guid("9F4C2855-9F79-4B39-A8D0-E1D42DE1D5F3"), 2);
/// <summary>PKEY_AppUserModel_RelaunchDisplayNameResource</summary>
public static readonly PKEY AppUserModel_RelaunchDisplayNameResource = new PKEY(new Guid("9F4C2855-9F79-4B39-A8D0-E1D42DE1D5F3"), 4);
/// <summary>PKEY_AppUserModel_RelaunchIconResource</summary>
public static readonly PKEY AppUserModel_RelaunchIconResource = new PKEY(new Guid("9F4C2855-9F79-4B39-A8D0-E1D42DE1D5F3"), 3);
}
#endregion
#region Interfaces
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.EnumIdList),
]
internal interface IEnumIDList
{
[PreserveSig()]
HRESULT Next(uint celt, out IntPtr rgelt, out int pceltFetched);
[PreserveSig()]
HRESULT Skip(uint celt);
void Reset();
void Clone([Out, MarshalAs(UnmanagedType.Interface)] out IEnumIDList ppenum);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.EnumObjects),
]
internal interface IEnumObjects
{
//[local]
// This signature might not work... Hopefully don't need this interface though.
void Next(uint celt, [In] ref Guid riid, [Out, MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.IUnknown, IidParameterIndex = 1, SizeParamIndex = 0)] object[] rgelt, [Out] out uint pceltFetched);
/*
[call_as(Next)] HRESULT RemoteNext(
[in] ULONG celt,
[in] REFIID riid,
[out, size_is(celt), length_is(*pceltFetched), iid_is(riid)] void **rgelt,
[out] ULONG *pceltFetched);
*/
void Skip(uint celt);
void Reset();
IEnumObjects Clone();
}
/// <summary>Unknown Object Array</summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ObjectArray),
]
internal interface IObjectArray
{
uint GetCount();
[return: MarshalAs(UnmanagedType.IUnknown)]
object GetAt([In] uint uiIndex, [In] ref Guid riid);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ObjectArray),
]
interface IObjectCollection : IObjectArray
{
#region IObjectArray redeclarations
new uint GetCount();
[return: MarshalAs(UnmanagedType.IUnknown)]
new object GetAt([In] uint uiIndex, [In] ref Guid riid);
#endregion
void AddObject([MarshalAs(UnmanagedType.IUnknown)] object punk);
void AddFromArray(IObjectArray poaSource);
void RemoveObjectAt(uint uiIndex);
void Clear();
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.PropertyStore)
]
internal interface IPropertyStore
{
uint GetCount();
PKEY GetAt(uint iProp);
/// <SecurityNote>
/// Critical : Accepts critical PROPVARIANT argument
/// <SecurityNote>
[SecurityCritical]
void GetValue([In] ref PKEY pkey, [In, Out] PROPVARIANT pv);
/// <SecurityNote>
/// Critical : Accepts critical PROPVARIANT argument
/// <SecurityNote>
[SecurityCritical]
void SetValue([In] ref PKEY pkey, PROPVARIANT pv);
void Commit();
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ShellFolder),
]
internal interface IShellFolder
{
void ParseDisplayName(
[In] IntPtr hwnd,
[In] IBindCtx pbc,
[In, MarshalAs(UnmanagedType.LPWStr)] string pszDisplayName,
[In, Out] ref int pchEaten,
[Out] out IntPtr ppidl,
[In, Out] ref uint pdwAttributes);
IEnumIDList EnumObjects(
[In] IntPtr hwnd,
[In] SHCONTF grfFlags);
// returns an instance of a sub-folder which is specified by the IDList (pidl).
// IShellFolder or derived interfaces
[return: MarshalAs(UnmanagedType.Interface)]
object BindToObject(
[In] IntPtr pidl,
[In] IBindCtx pbc,
[In] ref Guid riid);
// produces the same result as BindToObject()
[return: MarshalAs(UnmanagedType.Interface)]
object BindToStorage([In] IntPtr pidl, [In] IBindCtx pbc, [In] ref Guid riid);
// compares two IDLists and returns the result. The shell
// explorer always passes 0 as lParam, which indicates 'sort by name'.
// It should return 0 (as CODE of the scode), if two id indicates the
// same object; negative value if pidl1 should be placed before pidl2;
// positive value if pidl2 should be placed before pidl1.
// use the macro ResultFromShort() to extract the result comparison
// it deals with the casting and type conversion issues for you
[PreserveSig]
HRESULT CompareIDs([In] IntPtr lParam, [In] IntPtr pidl1, [In] IntPtr pidl2);
// creates a view object of the folder itself. The view
// object is a difference instance from the shell folder object.
// 'hwndOwner' can be used as the owner window of its dialog box or
// menu during the lifetime of the view object.
// This member function should always create a new
// instance which has only one reference count. The explorer may create
// more than one instances of view object from one shell folder object
// and treat them as separate instances.
// returns IShellView derived interface
[return: MarshalAs(UnmanagedType.Interface)]
object CreateViewObject([In] IntPtr hwndOwner, [In] ref Guid riid);
// returns the attributes of specified objects in that
// folder. 'cidl' and 'apidl' specifies objects. 'apidl' contains only
// simple IDLists. The explorer initializes *prgfInOut with a set of
// flags to be evaluated. The shell folder may optimize the operation
// by not returning unspecified flags.
void GetAttributesOf(
[In] uint cidl,
[In] IntPtr apidl,
[In, Out] ref SFGAO rgfInOut);
// creates a UI object to be used for specified objects.
// The shell explorer passes either IID_IDataObject (for transfer operation)
// or IID_IContextMenu (for context menu operation) as riid
// and many other interfaces
[return: MarshalAs(UnmanagedType.Interface)]
object GetUIObjectOf(
[In] IntPtr hwndOwner,
[In] uint cidl,
[In, MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.SysInt, SizeParamIndex = 2)] IntPtr apidl,
[In] ref Guid riid,
[In, Out] ref uint rgfReserved);
// returns the display name of the specified object.
// If the ID contains the display name (in the locale character set),
// it returns the offset to the name. Otherwise, it returns a pointer
// to the display name string (UNICODE), which is allocated by the
// task allocator, or fills in a buffer.
// use the helper APIS StrRetToStr() or StrRetToBuf() to deal with the different
// forms of the STRRET structure
void GetDisplayNameOf([In] IntPtr pidl, [In] SHGDN uFlags, [Out] out IntPtr pName);
// sets the display name of the specified object.
// If it changes the ID as well, it returns the new ID which is
// alocated by the task allocator.
void SetNameOf([In] IntPtr hwnd,
[In] IntPtr pidl,
[In, MarshalAs(UnmanagedType.LPWStr)] string pszName,
[In] SHGDN uFlags,
[Out] out IntPtr ppidlOut);
}
/// <summary>
/// Shell Namespace helper
/// </summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ShellItem),
]
internal interface IShellItem
{
[return: MarshalAs(UnmanagedType.Interface)]
object BindToHandler(IBindCtx pbc, [In] ref Guid bhid, [In] ref Guid riid);
IShellItem GetParent();
[return: MarshalAs(UnmanagedType.LPWStr)]
string GetDisplayName(SIGDN sigdnName);
SFGAO GetAttributes(SFGAO sfgaoMask);
int Compare(IShellItem psi, SICHINT hint);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ShellItemArray),
]
internal interface IShellItemArray
{
[return: MarshalAs(UnmanagedType.Interface)]
object BindToHandler(IBindCtx pbc, [In] ref Guid rbhid, [In] ref Guid riid);
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyStore(int flags, [In] ref Guid riid);
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyDescriptionList([In] ref PKEY keyType, [In] ref Guid riid);
uint GetAttributes(SIATTRIBFLAGS dwAttribFlags, uint sfgaoMask);
uint GetCount();
IShellItem GetItemAt(uint dwIndex);
[return: MarshalAs(UnmanagedType.Interface)]
object EnumItems();
}
/// <summary>
/// Shell Namespace helper 2
/// </summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ShellItem2),
]
interface IShellItem2 : IShellItem
{
#region IShellItem redeclarations
[return: MarshalAs(UnmanagedType.Interface)]
new object BindToHandler([In] IBindCtx pbc, [In] ref Guid bhid, [In] ref Guid riid);
new IShellItem GetParent();
[return: MarshalAs(UnmanagedType.LPWStr)]
new string GetDisplayName(SIGDN sigdnName);
new SFGAO GetAttributes(SFGAO sfgaoMask);
new int Compare(IShellItem psi, SICHINT hint);
#endregion
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyStore(
GPS flags,
[In] ref Guid riid);
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyStoreWithCreateObject(
GPS flags,
[MarshalAs(UnmanagedType.IUnknown)] object punkCreateObject, // factory for low-rights creation of type ICreateObject
[In] ref Guid riid);
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyStoreForKeys(
IntPtr rgKeys,
uint cKeys,
GPS flags,
[In] ref Guid riid);
[return: MarshalAs(UnmanagedType.Interface)]
object GetPropertyDescriptionList(
IntPtr keyType,
[In] ref Guid riid);
// Ensures any cached information in this item is up to date, or returns __HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND) if the item does not exist.
void Update(IBindCtx pbc);
/// <SecurityNote>
/// Critical : Calls critical methods
/// <SecurityNote>
[SecurityCritical]
PROPVARIANT GetProperty(IntPtr key);
Guid GetCLSID(IntPtr key);
FILETIME GetFileTime(IntPtr key);
int GetInt32(IntPtr key);
[return: MarshalAs(UnmanagedType.LPWStr)]
string GetString(IntPtr key);
uint GetUInt32(IntPtr key);
ulong GetUInt64(IntPtr key);
[return: MarshalAs(UnmanagedType.Bool)]
void GetBool(IntPtr key);
}
[
ComImport,
InterfaceTypeAttribute(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ShellLink),
]
internal interface IShellLinkW
{
void GetPath([Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pszFile, int cchMaxPath, [In, Out] WIN32_FIND_DATAW pfd, SLGP fFlags);
void GetIDList(out IntPtr ppidl);
void SetIDList(IntPtr pidl);
void GetDescription([Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pszFile, int cchMaxName);
void SetDescription([MarshalAs(UnmanagedType.LPWStr)] string pszName);
void GetWorkingDirectory([Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pszDir, int cchMaxPath);
void SetWorkingDirectory([MarshalAs(UnmanagedType.LPWStr)] string pszDir);
void GetArguments([Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pszArgs, int cchMaxPath);
void SetArguments([MarshalAs(UnmanagedType.LPWStr)] string pszArgs);
short GetHotKey();
void SetHotKey(short wHotKey);
uint GetShowCmd();
void SetShowCmd(uint iShowCmd);
void GetIconLocation([Out, MarshalAs(UnmanagedType.LPWStr)] StringBuilder pszIconPath, int cchIconPath, out int piIcon);
void SetIconLocation([MarshalAs(UnmanagedType.LPWStr)] string pszIconPath, int iIcon);
void SetRelativePath([MarshalAs(UnmanagedType.LPWStr)] string pszPathRel, uint dwReserved);
void Resolve(IntPtr hwnd, uint fFlags);
void SetPath([MarshalAs(UnmanagedType.LPWStr)] string pszFile);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.TaskbarList),
]
internal interface ITaskbarList
{
/// <summary>
/// This function must be called first to validate use of other members.
/// </summary>
void HrInit();
/// <summary>
/// This function adds a tab for hwnd to the taskbar.
/// </summary>
/// <param name="hwnd">The HWND for which to add the tab.</param>
void AddTab(IntPtr hwnd);
/// <summary>
/// This function deletes a tab for hwnd from the taskbar.
/// </summary>
/// <param name="hwnd">The HWND for which the tab is to be deleted.</param>
void DeleteTab(IntPtr hwnd);
/// <summary>
/// This function activates the tab associated with hwnd on the taskbar.
/// </summary>
/// <param name="hwnd">The HWND for which the tab is to be actuvated.</param>
void ActivateTab(IntPtr hwnd);
/// <summary>
/// This function marks hwnd in the taskbar as the active tab.
/// </summary>
/// <param name="hwnd">The HWND to activate.</param>
void SetActiveAlt(IntPtr hwnd);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.TaskbarList2),
]
internal interface ITaskbarList2 : ITaskbarList
{
#region ITaskbarList redeclaration
new void HrInit();
new void AddTab(IntPtr hwnd);
new void DeleteTab(IntPtr hwnd);
new void ActivateTab(IntPtr hwnd);
new void SetActiveAlt(IntPtr hwnd);
#endregion
/// <summary>
/// Marks a window as full-screen.
/// </summary>
/// <param name="hwnd">The handle of the window to be marked.</param>
/// <param name="fFullscreen">A Boolean value marking the desired full-screen status of the window.</param>
/// <remarks>
/// Setting the value of fFullscreen to true, the Shell treats this window as a full-screen window, and the taskbar
/// is moved to the bottom of the z-order when this window is active. Setting the value of fFullscreen to false
/// removes the full-screen marking, but <i>does not</i> cause the Shell to treat the window as though it were
/// definitely not full-screen. With a false fFullscreen value, the Shell depends on its automatic detection facility
/// to specify how the window should be treated, possibly still flagging the window as full-screen.
/// </remarks>
void MarkFullscreenWindow(IntPtr hwnd, [MarshalAs(UnmanagedType.Bool)] bool fFullscreen);
}
// Used to remove items from the automatic destination lists created when apps or the system call SHAddToRecentDocs to report usage of a document.
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ApplicationDestinations)
]
internal interface IApplicationDestinations
{
// Set the App User Model ID for the application removing destinations from its list. If an AppID is not provided
// via this method, the system will use a heuristically determined ID. This method must be called before
// RemoveDestination or RemoveAllDestinations.
void SetAppID([In, MarshalAs(UnmanagedType.LPWStr)] string pszAppID);
// Remove an IShellItem or an IShellLink from the automatic destination list
void RemoveDestination([MarshalAs(UnmanagedType.IUnknown)] object punk);
// Clear the frequent and recent destination lists for this application.
void RemoveAllDestinations();
}
/// <summary>
/// Allows an application to retrieve the most recent and frequent documents opened in that app, as reported via SHAddToRecentDocs
/// </summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ApplicationDocumentLists)
]
internal interface IApplicationDocumentLists
{
/// <summary>
/// Set the App User Model ID for the application retrieving this list. If an AppID is not provided via this method,
/// the system will use a heuristically determined ID. This method must be called before GetList.
/// </summary>
/// <param name="pszAppID">App Id.</param>
void SetAppID([MarshalAs(UnmanagedType.LPWStr)] string pszAppID);
/// <summary>
/// Retrieve an IEnumObjects or IObjectArray for IShellItems and/or IShellLinks.
/// Items may appear in both the frequent and recent lists.
/// </summary>
/// <param name="?"></param>
/// <returns></returns>
[return: MarshalAs(UnmanagedType.IUnknown)]
object GetList([In] APPDOCLISTTYPE listtype, [In] uint cItemsDesired, [In] ref Guid riid);
}
// Custom Destination List
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.CustomDestinationList)
]
internal interface ICustomDestinationList
{
void SetAppID([In, MarshalAs(UnmanagedType.LPWStr)] string pszAppID);
// Retrieve IObjectArray of IShellItems or IShellLinks that represent removed destinations
[return: MarshalAs(UnmanagedType.Interface)]
object BeginList(out uint pcMaxSlots, [In] ref Guid riid);
// PreserveSig because this will return custom errors when attempting to add unregistered ShellItems.
// Can't readily detect that case without just trying to append it.
[PreserveSig]
HRESULT AppendCategory([MarshalAs(UnmanagedType.LPWStr)] string pszCategory, IObjectArray poa);
void AppendKnownCategory(KDC category);
[PreserveSig]
HRESULT AddUserTasks(IObjectArray poa);
void CommitList();
// Retrieve IObjectCollection of IShellItems
[return: MarshalAs(UnmanagedType.Interface)]
object GetRemovedDestinations([In] ref Guid riid);
void DeleteList([MarshalAs(UnmanagedType.LPWStr)] string pszAppID);
void AbortList();
}
/// <summary>
/// Provides access to the App User Model ID on objects supporting this value.
/// </summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ObjectWithAppUserModelId)
]
internal interface IObjectWithAppUserModelId
{
void SetAppID([MarshalAs(UnmanagedType.LPWStr)] string pszAppID);
[return: MarshalAs(UnmanagedType.LPWStr)]
string GetAppID();
};
/// <summary>
/// Provides access to the ProgID associated with an object
/// </summary>
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.ObjectWithProgId)
]
internal interface IObjectWithProgId
{
void SetProgID([MarshalAs(UnmanagedType.LPWStr)] string pszProgID);
[return: MarshalAs(UnmanagedType.LPWStr)]
string GetProgID();
};
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.TaskbarList3),
]
internal interface ITaskbarList3 : ITaskbarList2
{
#region ITaskbarList2 redeclaration
#region ITaskbarList redeclaration
new void HrInit();
new void AddTab(IntPtr hwnd);
new void DeleteTab(IntPtr hwnd);
new void ActivateTab(IntPtr hwnd);
new void SetActiveAlt(IntPtr hwnd);
#endregion
new void MarkFullscreenWindow(IntPtr hwnd, [MarshalAs(UnmanagedType.Bool)] bool fFullscreen);
#endregion
[PreserveSig]
HRESULT SetProgressValue(IntPtr hwnd, ulong ullCompleted, ulong ullTotal);
[PreserveSig]
HRESULT SetProgressState(IntPtr hwnd, TBPF tbpFlags);
[PreserveSig]
HRESULT RegisterTab(IntPtr hwndTab, IntPtr hwndMDI);
[PreserveSig]
HRESULT UnregisterTab(IntPtr hwndTab);
[PreserveSig]
HRESULT SetTabOrder(IntPtr hwndTab, IntPtr hwndInsertBefore);
[PreserveSig]
HRESULT SetTabActive(IntPtr hwndTab, IntPtr hwndMDI, uint dwReserved);
[PreserveSig]
HRESULT ThumbBarAddButtons(IntPtr hwnd, uint cButtons, [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] THUMBBUTTON[] pButtons);
[PreserveSig]
HRESULT ThumbBarUpdateButtons(IntPtr hwnd, uint cButtons, [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] THUMBBUTTON[] pButtons);
[PreserveSig]
HRESULT ThumbBarSetImageList(IntPtr hwnd, [MarshalAs(UnmanagedType.IUnknown)] object himl);
[PreserveSig]
HRESULT SetOverlayIcon(IntPtr hwnd, IntPtr hIcon, [MarshalAs(UnmanagedType.LPWStr)] string pszDescription);
[PreserveSig]
HRESULT SetThumbnailTooltip(IntPtr hwnd, [MarshalAs(UnmanagedType.LPWStr)] string pszTip);
// Using RefRECT to making passing NULL possible. Removes clipping from the HWND.
[PreserveSig]
HRESULT SetThumbnailClip(IntPtr hwnd, RefRECT prcClip);
}
[
ComImport,
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid(IID.TaskbarList3),
]
internal interface ITaskbarList4 : ITaskbarList3
{
#region ITaskbarList3 redeclaration
#region ITaskbarList2 redeclaration
#region ITaskbarList redeclaration
new void HrInit();
new void AddTab(IntPtr hwnd);
new void DeleteTab(IntPtr hwnd);
new void ActivateTab(IntPtr hwnd);
new void SetActiveAlt(IntPtr hwnd);
#endregion
new void MarkFullscreenWindow(IntPtr hwnd, [MarshalAs(UnmanagedType.Bool)] bool fFullscreen);
#endregion
[PreserveSig] new HRESULT SetProgressValue(IntPtr hwnd, ulong ullCompleted, ulong ullTotal);
[PreserveSig] new HRESULT SetProgressState(IntPtr hwnd, TBPF tbpFlags);
[PreserveSig] new HRESULT RegisterTab(IntPtr hwndTab, IntPtr hwndMDI);
[PreserveSig] new HRESULT UnregisterTab(IntPtr hwndTab);
[PreserveSig] new HRESULT SetTabOrder(IntPtr hwndTab, IntPtr hwndInsertBefore);
[PreserveSig] new HRESULT SetTabActive(IntPtr hwndTab, IntPtr hwndMDI, uint dwReserved);
[PreserveSig] new HRESULT ThumbBarAddButtons(IntPtr hwnd, uint cButtons, [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] THUMBBUTTON[] pButtons);
[PreserveSig] new HRESULT ThumbBarUpdateButtons(IntPtr hwnd, uint cButtons, [MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] THUMBBUTTON[] pButtons);
[PreserveSig] new HRESULT ThumbBarSetImageList(IntPtr hwnd, [MarshalAs(UnmanagedType.IUnknown)] object himl);
[PreserveSig] new HRESULT SetOverlayIcon(IntPtr hwnd, IntPtr hIcon, [MarshalAs(UnmanagedType.LPWStr)] string pszDescription);
[PreserveSig] new HRESULT SetThumbnailTooltip(IntPtr hwnd, [MarshalAs(UnmanagedType.LPWStr)] string pszTip);
// Using RefRECT to making passing NULL possible. Removes clipping from the HWND.
[PreserveSig] new HRESULT SetThumbnailClip(IntPtr hwnd, RefRECT prcClip);
#endregion
void SetTabProperties(IntPtr hwndTab, STPF stpFlags);
}
#endregion
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics;
using System.Reflection.Metadata.Ecma335;
namespace System.Reflection.Metadata
{
public struct MethodDefinition
{
private readonly MetadataReader _reader;
// Workaround: JIT doesn't generate good code for nested structures, so use RowId.
private readonly uint _treatmentAndRowId;
internal MethodDefinition(MetadataReader reader, uint treatmentAndRowId)
{
Debug.Assert(reader != null);
Debug.Assert(treatmentAndRowId != 0);
_reader = reader;
_treatmentAndRowId = treatmentAndRowId;
}
private uint RowId
{
get { return _treatmentAndRowId & TokenTypeIds.RIDMask; }
}
private MethodDefTreatment Treatment
{
get { return (MethodDefTreatment)(_treatmentAndRowId >> TokenTypeIds.RowIdBitCount); }
}
private MethodDefinitionHandle Handle
{
get { return MethodDefinitionHandle.FromRowId(RowId); }
}
public StringHandle Name
{
get
{
if (Treatment == 0)
{
return _reader.MethodDefTable.GetName(Handle);
}
return GetProjectedName();
}
}
public BlobHandle Signature
{
get
{
if (Treatment == 0)
{
return _reader.MethodDefTable.GetSignature(Handle);
}
return GetProjectedSignature();
}
}
public int RelativeVirtualAddress
{
get
{
if (Treatment == 0)
{
return _reader.MethodDefTable.GetRva(Handle);
}
return GetProjectedRelativeVirtualAddress();
}
}
public MethodAttributes Attributes
{
get
{
if (Treatment == 0)
{
return _reader.MethodDefTable.GetFlags(Handle);
}
return GetProjectedFlags();
}
}
public MethodImplAttributes ImplAttributes
{
get
{
if (Treatment == 0)
{
return _reader.MethodDefTable.GetImplFlags(Handle);
}
return GetProjectedImplFlags();
}
}
public TypeDefinitionHandle GetDeclaringType()
{
return _reader.GetDeclaringType(Handle);
}
public ParameterHandleCollection GetParameters()
{
return new ParameterHandleCollection(_reader, Handle);
}
public GenericParameterHandleCollection GetGenericParameters()
{
return _reader.GenericParamTable.FindGenericParametersForMethod(Handle);
}
public MethodImport GetImport()
{
uint implMapRid = _reader.ImplMapTable.FindImplForMethod(Handle);
if (implMapRid == 0)
{
return default(MethodImport);
}
return _reader.ImplMapTable[implMapRid];
}
public CustomAttributeHandleCollection GetCustomAttributes()
{
return new CustomAttributeHandleCollection(_reader, Handle);
}
public DeclarativeSecurityAttributeHandleCollection GetDeclarativeSecurityAttributes()
{
return new DeclarativeSecurityAttributeHandleCollection(_reader, Handle);
}
#region Projections
private StringHandle GetProjectedName()
{
if ((Treatment & MethodDefTreatment.KindMask) == MethodDefTreatment.DisposeMethod)
{
return StringHandle.FromVirtualIndex(StringHandle.VirtualIndex.Dispose);
}
return _reader.MethodDefTable.GetName(Handle);
}
private MethodAttributes GetProjectedFlags()
{
MethodAttributes flags = _reader.MethodDefTable.GetFlags(Handle);
MethodDefTreatment treatment = Treatment;
if ((treatment & MethodDefTreatment.KindMask) == MethodDefTreatment.HiddenInterfaceImplementation)
{
flags = (flags & ~MethodAttributes.MemberAccessMask) | MethodAttributes.Private;
}
if ((treatment & MethodDefTreatment.MarkAbstractFlag) != 0)
{
flags |= MethodAttributes.Abstract;
}
if ((treatment & MethodDefTreatment.MarkPublicFlag) != 0)
{
flags = (flags & ~MethodAttributes.MemberAccessMask) | MethodAttributes.Public;
}
return flags | MethodAttributes.HideBySig;
}
private MethodImplAttributes GetProjectedImplFlags()
{
MethodImplAttributes flags = _reader.MethodDefTable.GetImplFlags(Handle);
switch (Treatment & MethodDefTreatment.KindMask)
{
case MethodDefTreatment.DelegateMethod:
flags |= MethodImplAttributes.Runtime;
break;
case MethodDefTreatment.DisposeMethod:
case MethodDefTreatment.AttributeMethod:
case MethodDefTreatment.InterfaceMethod:
case MethodDefTreatment.HiddenInterfaceImplementation:
case MethodDefTreatment.Other:
flags |= MethodImplAttributes.Runtime | MethodImplAttributes.InternalCall;
break;
}
return flags;
}
private BlobHandle GetProjectedSignature()
{
return _reader.MethodDefTable.GetSignature(Handle);
}
private int GetProjectedRelativeVirtualAddress()
{
return 0;
}
#endregion
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System;
using System.Collections.Generic;
using System.Xml;
using System.Management.Automation.Internal;
namespace Microsoft.PowerShell.Commands.Internal.Format
{
/// <summary>
/// class to load the XML document into data structures.
/// It encapsulates the file format specific code
/// </summary>
internal sealed partial class TypeInfoDataBaseLoader : XmlLoaderBase
{
private ControlBase LoadTableControl(XmlNode controlNode)
{
using (this.StackFrame(controlNode))
{
TableControlBody tableBody = new TableControlBody();
bool headersNodeFound = false; // cardinality 0..1
bool rowEntriesNodeFound = false; // cardinality 1
bool hideHeadersNodeFound = false; // cardinality 0..1
bool autosizeNodeFound = false; // cardinality 0..1
foreach (XmlNode n in controlNode.ChildNodes)
{
if (MatchNodeName(n, XmlTags.HideTableHeadersNode))
{
if (hideHeadersNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
hideHeadersNodeFound = true;
if (!this.ReadBooleanNode(n, out tableBody.header.hideHeader))
{
return null; //fatal error
}
}
else if (MatchNodeName(n, XmlTags.AutoSizeNode))
{
if (autosizeNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
autosizeNodeFound = true;
bool tempVal;
if (!this.ReadBooleanNode(n, out tempVal))
{
return null; // fatal error
}
tableBody.autosize = tempVal;
}
else if (MatchNodeName(n, XmlTags.TableHeadersNode))
{
if (headersNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
headersNodeFound = true;
// now read the columns header section
LoadHeadersSection(tableBody, n);
if (tableBody.header.columnHeaderDefinitionList == null)
{
// if we have an empty list, it means there was a failure
return null; // fatal error
}
}
else if (MatchNodeName(n, XmlTags.TableRowEntriesNode))
{
if (rowEntriesNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
rowEntriesNodeFound = true;
// now read the columns section
LoadRowEntriesSection(tableBody, n);
if (tableBody.defaultDefinition == null)
{
return null; // fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
}
if (!rowEntriesNodeFound)
{
this.ReportMissingNode(XmlTags.TableRowEntriesNode);
return null; // fatal error
}
// CHECK: verify consistency of headers and row entries
if (tableBody.header.columnHeaderDefinitionList.Count != 0)
{
// CHECK: if there are headers in the list, their number has to match
// the default row definition item count
if (tableBody.header.columnHeaderDefinitionList.Count !=
tableBody.defaultDefinition.rowItemDefinitionList.Count)
{
//Error at XPath {0} in file {1}: Header item count = {2} does not match default row item count = {3}.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.IncorrectHeaderItemCount, ComputeCurrentXPath(), FilePath,
tableBody.header.columnHeaderDefinitionList.Count,
tableBody.defaultDefinition.rowItemDefinitionList.Count));
return null; // fatal error
}
}
// CHECK: if there are alternative row definitions, they should have the same # of items
if (tableBody.optionalDefinitionList.Count != 0)
{
int k = 0;
foreach (TableRowDefinition trd in tableBody.optionalDefinitionList)
{
if (trd.rowItemDefinitionList.Count !=
tableBody.defaultDefinition.rowItemDefinitionList.Count)
{
//Error at XPath {0} in file {1}: Row item count = {2} on alternative set #{3} does not match default row item count = {4}.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.IncorrectRowItemCount, ComputeCurrentXPath(), FilePath,
trd.rowItemDefinitionList.Count,
tableBody.defaultDefinition.rowItemDefinitionList.Count, k + 1));
return null; // fatal error
}
k++;
}
}
return tableBody;
}
}
private void LoadHeadersSection(TableControlBody tableBody, XmlNode headersNode)
{
using (this.StackFrame(headersNode))
{
int columnIndex = 0;
foreach (XmlNode n in headersNode.ChildNodes)
{
if (MatchNodeName(n, XmlTags.TableColumnHeaderNode))
{
TableColumnHeaderDefinition chd = LoadColumnHeaderDefinition(n, columnIndex++);
if (chd != null)
tableBody.header.columnHeaderDefinitionList.Add(chd);
else
{
//Error at XPath {0} in file {1}: Column header definition is invalid; all headers are discarded.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.InvalidColumnHeader, ComputeCurrentXPath(), FilePath));
tableBody.header.columnHeaderDefinitionList = null;
return; // fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
}
// NOTICE: the list can be empty if no entries were found
}
}
private TableColumnHeaderDefinition LoadColumnHeaderDefinition(XmlNode columnHeaderNode, int index)
{
using (this.StackFrame(columnHeaderNode, index))
{
TableColumnHeaderDefinition chd = new TableColumnHeaderDefinition();
bool labelNodeFound = false; // cardinality 0..1
bool widthNodeFound = false; // cardinality 0..1
bool alignmentNodeFound = false; // cardinality 0..1
foreach (XmlNode n in columnHeaderNode.ChildNodes)
{
if (MatchNodeNameWithAttributes(n, XmlTags.LabelNode))
{
if (labelNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
labelNodeFound = true;
chd.label = LoadLabel(n);
if (chd.label == null)
{
return null; // fatal error
}
}
else if (MatchNodeName(n, XmlTags.WidthNode))
{
if (widthNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
widthNodeFound = true;
int wVal;
if (ReadPositiveIntegerValue(n, out wVal))
{
chd.width = wVal;
}
else
{
//Error at XPath {0} in file {1}: Invalid {2} value.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.InvalidNodeValue, ComputeCurrentXPath(), FilePath, XmlTags.WidthNode));
return null; //fatal error
}
}
else if (MatchNodeName(n, XmlTags.AlignmentNode))
{
if (alignmentNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
alignmentNodeFound = true;
if (!LoadAlignmentValue(n, out chd.alignment))
{
return null; // fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
} // foreach
return chd;
} // using
}
private bool ReadPositiveIntegerValue(XmlNode n, out int val)
{
val = -1;
string text = GetMandatoryInnerText(n);
if (text == null)
return false;
bool isInteger = int.TryParse(text, out val);
if (!isInteger || val <= 0)
{
//Error at XPath {0} in file {1}: A positive integer is expected.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.ExpectPositiveInteger, ComputeCurrentXPath(), FilePath));
return false;
}
return true;
}
private bool LoadAlignmentValue(XmlNode n, out int alignmentValue)
{
alignmentValue = TextAlignment.Undefined;
string alignmentString = GetMandatoryInnerText(n);
if (alignmentString == null)
{
return false; // fatal error
}
if (string.Equals(n.InnerText, XMLStringValues.AlignmentLeft, StringComparison.OrdinalIgnoreCase))
{
alignmentValue = TextAlignment.Left;
}
else if (string.Equals(n.InnerText, XMLStringValues.AlignmentRight, StringComparison.OrdinalIgnoreCase))
{
alignmentValue = TextAlignment.Right;
}
else if (string.Equals(n.InnerText, XMLStringValues.AlignmentCenter, StringComparison.OrdinalIgnoreCase))
{
alignmentValue = TextAlignment.Center;
}
else
{
//Error at XPath {0} in file {1}: "{2}" is not an valid alignment value.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.InvalidAlignmentValue, ComputeCurrentXPath(), FilePath, alignmentString));
return false; // fatal error
}
return true;
}
private void LoadRowEntriesSection(TableControlBody tableBody, XmlNode rowEntriesNode)
{
using (this.StackFrame(rowEntriesNode))
{
int rowEntryIndex = 0;
foreach (XmlNode n in rowEntriesNode.ChildNodes)
{
if (MatchNodeName(n, XmlTags.TableRowEntryNode))
{
TableRowDefinition trd = LoadRowEntryDefinition(n, rowEntryIndex++);
if (trd == null)
{
//Error at XPath {0} in file {1}: {2} failed to load.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.LoadTagFailed, ComputeCurrentXPath(), FilePath, XmlTags.TableRowEntryNode));
tableBody.defaultDefinition = null;
return; // fatal error
}
// determine if we have a default entry and if it's already set
if (trd.appliesTo == null)
{
if (tableBody.defaultDefinition == null)
{
tableBody.defaultDefinition = trd;
}
else
{
//Error at XPath {0} in file {1}: There cannot be more than one default {2}.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.TooManyDefaultShapeEntry, ComputeCurrentXPath(), FilePath, XmlTags.TableRowEntryNode));
tableBody.defaultDefinition = null;
return; // fatal error
}
}
else
{
tableBody.optionalDefinitionList.Add(trd);
}
}
else
{
this.ProcessUnknownNode(n);
}
}
if (tableBody.defaultDefinition == null)
{
//Error at XPath {0} in file {1}: There must be at least one default {2}.
this.ReportError(StringUtil.Format(FormatAndOutXmlLoadingStrings.NoDefaultShapeEntry, ComputeCurrentXPath(), FilePath, XmlTags.TableRowEntryNode));
}
}
}
private TableRowDefinition LoadRowEntryDefinition(XmlNode rowEntryNode, int index)
{
using (this.StackFrame(rowEntryNode, index))
{
bool appliesToNodeFound = false; // cardinality 0..1
bool columnEntriesNodeFound = false; // cardinality 1
bool multiLineFound = false; // cardinality 0..1
TableRowDefinition trd = new TableRowDefinition();
foreach (XmlNode n in rowEntryNode.ChildNodes)
{
if (MatchNodeName(n, XmlTags.EntrySelectedByNode))
{
if (appliesToNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
appliesToNodeFound = true;
// optional section
trd.appliesTo = LoadAppliesToSection(n, true);
}
else if (MatchNodeName(n, XmlTags.TableColumnItemsNode))
{
if (columnEntriesNodeFound)
{
this.ProcessDuplicateNode(n);
return null; //fatal
}
LoadColumnEntries(n, trd);
if (trd.rowItemDefinitionList == null)
{
return null; // fatal error
}
}
else if (MatchNodeName(n, XmlTags.MultiLineNode))
{
if (multiLineFound)
{
this.ProcessDuplicateNode(n);
return null; //fatal
}
multiLineFound = true;
if (!this.ReadBooleanNode(n, out trd.multiLine))
{
return null; //fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
}
return trd;
}
}
private void LoadColumnEntries(XmlNode columnEntriesNode, TableRowDefinition trd)
{
using (this.StackFrame(columnEntriesNode))
{
int columnEntryIndex = 0;
foreach (XmlNode n in columnEntriesNode.ChildNodes)
{
if (MatchNodeName(n, XmlTags.TableColumnItemNode))
{
TableRowItemDefinition rid = LoadColumnEntry(n, columnEntryIndex++);
if (rid != null)
{
trd.rowItemDefinitionList.Add(rid);
}
else
{
// we failed one entry: fatal error to percolate up
// remove all the entries
trd.rowItemDefinitionList = null;
return; // fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
}
}
}
private TableRowItemDefinition LoadColumnEntry(XmlNode columnEntryNode, int index)
{
using (this.StackFrame(columnEntryNode, index))
{
// process Mshexpression, format string and text token
ViewEntryNodeMatch match = new ViewEntryNodeMatch(this);
List<XmlNode> unprocessedNodes = new List<XmlNode>();
if (!match.ProcessExpressionDirectives(columnEntryNode, unprocessedNodes))
{
return null; // fatal error
}
TableRowItemDefinition rid = new TableRowItemDefinition();
// process the remaining nodes
bool alignmentNodeFound = false; // cardinality 0..1
foreach (XmlNode n in unprocessedNodes)
{
if (MatchNodeName(n, XmlTags.AlignmentNode))
{
if (alignmentNodeFound)
{
this.ProcessDuplicateNode(n);
return null; // fatal error
}
alignmentNodeFound = true;
if (!LoadAlignmentValue(n, out rid.alignment))
{
return null; // fatal error
}
}
else
{
this.ProcessUnknownNode(n);
}
}
// finally build the item to return
// add either the text token or the MshExpression with optional format string
if (match.TextToken != null)
{
rid.formatTokenList.Add(match.TextToken);
}
else if (match.Expression != null)
{
FieldPropertyToken fpt = new FieldPropertyToken();
fpt.expression = match.Expression;
fpt.fieldFormattingDirective.formatString = match.FormatString;
rid.formatTokenList.Add(fpt);
}
return rid;
} // using
}
}
}
| |
using System;
using System.IO;
using System.Net;
using System.Text;
using System.Linq;
using System.Threading.Tasks;
using System.Collections.Generic;
using Xamarin.SSO.Client;
namespace XamarinStore
{
public class WebService
{
public static readonly WebService Shared = new WebService ();
public User CurrentUser { get; set; }
public Order CurrentOrder { get; set; }
XamarinSSOClient client = new XamarinSSOClient ("https://auth.xamarin.com", "0c833t3w37jq58dj249dt675a465k6b0rz090zl3jpoa9jw8vz7y6awpj5ox0qmb");
public WebService ()
{
CurrentOrder = new Order ();
}
public async Task<bool> Login (string username, string password)
{
AccountResponse response;
try {
var request = Task.Run (() => response = client.CreateToken (username, password));
response = await request;
if (response.Success) {
var user = response.User;
CurrentUser = new User {
LastName = user.LastName,
FirstName = user.FirstName,
Email = username,
Token = response.Token
};
return true;
} else {
Console.WriteLine ("Login failed: {0}", response.Error);
}
} catch (Exception ex) {
Console.WriteLine ("Login failed for some reason...: {0}", ex.Message);
}
return false;
}
List<Product> products;
public async Task<List<Product>> GetProducts()
{
if (products == null) {
products = await Task.Factory.StartNew (() => {
try {
string extraParams = "";
//TODO: Get a Monkey!!!
//extraParams = "?includeMonkeys=true";
var request = CreateRequest ("products" + extraParams);
string response = ReadResponseText (request);
return Newtonsoft.Json.JsonConvert.DeserializeObject<List<Product>> (response);
} catch (Exception ex) {
Console.WriteLine (ex);
return new List<Product> ();
}
});
}
return products;
}
bool hasPreloadedImages;
public async Task PreloadImages(float imageWidth)
{
if (hasPreloadedImages)
return;
hasPreloadedImages = true;
//Lets precach the countries too
#pragma warning disable 4014
GetCountries ();
#pragma warning restore 4014
await Task.Factory.StartNew (() => {
var imagUrls = products.SelectMany (x => x.ImageUrls.Select (y => Product.ImageForSize (y, imageWidth))).ToList ();
imagUrls.ForEach( async (x) => await FileCache.Download(x));
});
}
List<Country> countries = new List<Country>();
public Task<List<Country>> GetCountries()
{
return Task.Factory.StartNew (() => {
try {
if(countries.Count > 0)
return countries;
var request = CreateRequest ("Countries");
string response = ReadResponseText (request);
countries = Newtonsoft.Json.JsonConvert.DeserializeObject<List<Country>> (response);
return countries;
} catch (Exception ex) {
Console.WriteLine (ex);
return new List<Country> ();
}
});
}
public async Task<string> GetCountryCode(string country)
{
var c = (await GetCountries ()).FirstOrDefault (x => x.Name == country) ?? new Country();
return c.Code;
}
public async Task<string> GetCountryFromCode(string code)
{
var c = (await GetCountries ()).FirstOrDefault (x => x.Code == code) ?? new Country();
return c.Name;
}
//No need to await anything, and no need to spawn a task to return a list.
#pragma warning disable 1998
public async Task<List<string>> GetStates(string country)
{
if (country.ToLower () == "united states")
return new List<string> {
"Alabama",
"Alaska",
"Arizona",
"Arkansas",
"California",
"Colorado",
"Connecticut",
"Delaware",
"District of Columbia",
"Florida",
"Georgia",
"Hawaii",
"Idaho",
"Illinois",
"Indiana",
"Iowa",
"Kansas",
"Kentucky",
"Louisiana",
"Maine",
"Maryland",
"Massachusetts",
"Michigan",
"Minnesota",
"Mississippi",
"Missouri",
"Montana",
"Nebraska",
"Nevada",
"New Hampshire",
"New Jersey",
"New Mexico",
"New York",
"North Carolina",
"North Dakota",
"Ohio",
"Oklahoma",
"Oregon",
"Pennsylvania",
"Rhode Island",
"South Carolina",
"South Dakota",
"Tennessee",
"Texas",
"Utah",
"Vermont",
"Virginia",
"Washington",
"West Virginia",
"Wisconsin",
"Wyoming",
};
return new List<string> ();
}
#pragma warning restore 1998
static HttpWebRequest CreateRequest(string location)
{
var request = (HttpWebRequest)WebRequest.Create ("https://xamarin-store-app.xamarin.com/api/"+ location);
request.Method = "GET";
request.ContentType = "application/json";
request.Accept = "application/json";
return request;
}
public Task<OrderResult> PlaceOrder (User user, bool verify = false) {
if (user == null) throw new ArgumentNullException("user");
return Task.Factory.StartNew (() => {
try {
var content = Encoding.UTF8.GetBytes (CurrentOrder.GetJson (user));
var request = CreateRequest ("order" + (verify ? "?verify=1" : ""));
request.Method = "POST";
request.ContentLength = content.Length;
using (Stream s = request.GetRequestStream ()) {
s.Write (content, 0, content.Length);
}
string response = ReadResponseText (request);
var result = Newtonsoft.Json.JsonConvert.DeserializeObject<OrderResult> (response);
if(!verify && result.Success)
CurrentOrder = new Order();
return result;
} catch (Exception ex) {
return new OrderResult {
Success = false,
Message = ex.Message,
};
}
});
}
protected static string ReadResponseText (HttpWebRequest req) {
using (WebResponse resp = req.GetResponse ()) {
using (Stream s = (resp).GetResponseStream ()) {
using (var r = new StreamReader (s, Encoding.UTF8)) {
return r.ReadToEnd ();
}
}
}
}
}
}
| |
using System;
namespace Versioning
{
public class SalesRecord : Sage_Container, ILinkRecord
{
/* Autogenerated by sage_wrapper_generator.pl */
SageDataObject110.SalesRecord sr11;
SageDataObject120.SalesRecord sr12;
SageDataObject130.SalesRecord sr13;
SageDataObject140.SalesRecord sr14;
SageDataObject150.SalesRecord sr15;
SageDataObject160.SalesRecord sr16;
SageDataObject170.SalesRecord sr17;
public SalesRecord(object inner, int version)
: base(version) {
switch (m_version) {
case 11: {
sr11 = (SageDataObject110.SalesRecord)inner;
m_fields = new Fields(sr11.Fields,m_version);
return;
}
case 12: {
sr12 = (SageDataObject120.SalesRecord)inner;
m_fields = new Fields(sr12.Fields,m_version);
return;
}
case 13: {
sr13 = (SageDataObject130.SalesRecord)inner;
m_fields = new Fields(sr13.Fields,m_version);
return;
}
case 14: {
sr14 = (SageDataObject140.SalesRecord)inner;
m_fields = new Fields(sr14.Fields,m_version);
return;
}
case 15: {
sr15 = (SageDataObject150.SalesRecord)inner;
m_fields = new Fields(sr15.Fields,m_version);
return;
}
case 16: {
sr16 = (SageDataObject160.SalesRecord)inner;
m_fields = new Fields(sr16.Fields,m_version);
return;
}
case 17: {
sr17 = (SageDataObject170.SalesRecord)inner;
m_fields = new Fields(sr17.Fields,m_version);
return;
}
default: throw new InvalidOperationException("ver");
}
}
/* Autogenerated with record_generator.pl */
const string ACCOUNT_REF = "ACCOUNT_REF";
const string SALES_RECORD = "SalesRecord";
public bool AddNew() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.AddNew();
break;
}
case 12: {
ret = sr12.AddNew();
break;
}
case 13: {
ret = sr13.AddNew();
break;
}
case 14: {
ret = sr14.AddNew();
break;
}
case 15: {
ret = sr15.AddNew();
break;
}
case 16: {
ret = sr16.AddNew();
break;
}
case 17: {
ret = sr17.AddNew();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Update() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.Update();
break;
}
case 12: {
ret = sr12.Update();
break;
}
case 13: {
ret = sr13.Update();
break;
}
case 14: {
ret = sr14.Update();
break;
}
case 15: {
ret = sr15.Update();
break;
}
case 16: {
ret = sr16.Update();
break;
}
case 17: {
ret = sr17.Update();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Edit() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.Edit();
break;
}
case 12: {
ret = sr12.Edit();
break;
}
case 13: {
ret = sr13.Edit();
break;
}
case 14: {
ret = sr14.Edit();
break;
}
case 15: {
ret = sr15.Edit();
break;
}
case 16: {
ret = sr16.Edit();
break;
}
case 17: {
ret = sr17.Edit();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Find(bool partial) {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.Find(partial);
break;
}
case 12: {
ret = sr12.Find(partial);
break;
}
case 13: {
ret = sr13.Find(partial);
break;
}
case 14: {
ret = sr14.Find(partial);
break;
}
case 15: {
ret = sr15.Find(partial);
break;
}
case 16: {
ret = sr16.Find(partial);
break;
}
case 17: {
ret = sr17.Find(partial);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MoveFirst() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.MoveFirst();
break;
}
case 12: {
ret = sr12.MoveFirst();
break;
}
case 13: {
ret = sr13.MoveFirst();
break;
}
case 14: {
ret = sr14.MoveFirst();
break;
}
case 15: {
ret = sr15.MoveFirst();
break;
}
case 16: {
ret = sr16.MoveFirst();
break;
}
case 17: {
ret = sr17.MoveFirst();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MoveNext() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.MoveNext();
break;
}
case 12: {
ret = sr12.MoveNext();
break;
}
case 13: {
ret = sr13.MoveNext();
break;
}
case 14: {
ret = sr14.MoveNext();
break;
}
case 15: {
ret = sr15.MoveNext();
break;
}
case 16: {
ret = sr16.MoveNext();
break;
}
case 17: {
ret = sr17.MoveNext();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MoveLast() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.MoveLast();
break;
}
case 12: {
ret = sr12.MoveLast();
break;
}
case 13: {
ret = sr13.MoveLast();
break;
}
case 14: {
ret = sr14.MoveLast();
break;
}
case 15: {
ret = sr15.MoveLast();
break;
}
case 16: {
ret = sr16.MoveLast();
break;
}
case 17: {
ret = sr17.MoveLast();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MovePrev() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.MovePrev();
break;
}
case 12: {
ret = sr12.MovePrev();
break;
}
case 13: {
ret = sr13.MovePrev();
break;
}
case 14: {
ret = sr14.MovePrev();
break;
}
case 15: {
ret = sr15.MovePrev();
break;
}
case 16: {
ret = sr16.MovePrev();
break;
}
case 17: {
ret = sr17.MovePrev();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool CanRemove() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.CanRemove();
break;
}
case 12: {
ret = sr12.CanRemove();
break;
}
case 13: {
ret = sr13.CanRemove();
break;
}
case 14: {
ret = sr14.CanRemove();
break;
}
case 15: {
ret = sr15.CanRemove();
break;
}
case 16: {
ret = sr16.CanRemove();
break;
}
case 17: {
ret = sr17.CanRemove();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Remove() {
bool ret;
switch (m_version) {
case 11: {
ret = sr11.Remove();
break;
}
case 12: {
ret = sr12.Remove();
break;
}
case 13: {
ret = sr13.Remove();
break;
}
case 14: {
ret = sr14.Remove();
break;
}
case 15: {
ret = sr15.Remove();
break;
}
case 16: {
ret = sr16.Remove();
break;
}
case 17: {
ret = sr17.Remove();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public object Link {
get {
object ret;
switch (m_version) {
case 11: {
ret = sr11.Link;
break;
}
case 12: {
ret = sr12.Link;
break;
}
case 13: {
ret = sr13.Link;
break;
}
case 14: {
ret = sr14.Link;
break;
}
case 15: {
ret = sr15.Link;
break;
}
case 16: {
ret = sr16.Link;
break;
}
case 17: {
ret = sr17.Link;
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
set {
switch (m_version) {
case 11: {
sr11.Link = value;
break;
}
case 12: {
sr12.Link = value;
break;
}
case 13: {
sr13.Link = value;
break;
}
case 14: {
sr14.Link = value;
break;
}
case 15: {
sr15.Link = value;
break;
}
case 16: {
sr16.Link = value;
break;
}
case 17: {
sr17.Link = value;
break;
}
}
}
}
// Manually added
public void Move(int IRows) {
switch (m_version) {
case 11: {
sr11.Move(IRows);
break;
}
case 12: {
sr12.Move(IRows);
break;
}
case 13: {
sr13.Move(IRows);
break;
}
case 14: {
sr14.Move(IRows);
break;
}
case 15: {
sr15.Move(IRows);
break;
}
case 16: {
sr16.Move(IRows);
break;
}
case 17: {
sr17.Move(IRows);
break;
}
default: throw new InvalidOperationException("ver");
}
}
public int Count {
get {
int ret;
switch (m_version) {
case 11: {
ret = sr11.Count;
break;
}
case 12: {
ret = sr12.Count;
break;
}
case 13: {
ret = sr13.Count;
break;
}
case 14: {
ret = sr14.Count;
break;
}
case 15: {
ret = sr15.Count;
break;
}
case 16: {
ret = sr16.Count;
break;
}
case 17: {
ret = sr17.Count;
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
}
public int RecordNumber {
get {
int ret;
switch (m_version) {
case 11: {
ret = sr11.RecordNumber();
break;
}
case 12: {
ret = sr12.RecordNumber();
break;
}
case 13: {
ret = sr13.RecordNumber();
break;
}
case 14: {
ret = sr14.RecordNumber();
break;
}
case 15: {
ret = sr15.RecordNumber();
break;
}
case 16: {
ret = sr16.RecordNumber();
break;
}
case 17: {
ret = sr17.RecordNumber();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
}
}
}
| |
using System;
using System.IO;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using RestSharp;
using IO.Swagger.Client;
using IO.Swagger.Model;
namespace IO.Swagger.Api
{
/// <summary>
/// Represents a collection of functions to interact with the API endpoints
/// </summary>
public interface IDeedApi
{
#region Synchronous Operations
/// <summary>
/// Deed
/// </summary>
/// <remarks>
/// The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </remarks>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>OperativeDeed</returns>
OperativeDeed DeedDeedReferenceGet (string deedReference);
/// <summary>
/// Deed
/// </summary>
/// <remarks>
/// The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </remarks>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>ApiResponse of OperativeDeed</returns>
ApiResponse<OperativeDeed> DeedDeedReferenceGetWithHttpInfo (string deedReference);
#endregion Synchronous Operations
#region Asynchronous Operations
/// <summary>
/// Deed
/// </summary>
/// <remarks>
/// The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </remarks>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>Task of OperativeDeed</returns>
System.Threading.Tasks.Task<OperativeDeed> DeedDeedReferenceGetAsync (string deedReference);
/// <summary>
/// Deed
/// </summary>
/// <remarks>
/// The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </remarks>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>Task of ApiResponse (OperativeDeed)</returns>
System.Threading.Tasks.Task<ApiResponse<OperativeDeed>> DeedDeedReferenceGetAsyncWithHttpInfo (string deedReference);
#endregion Asynchronous Operations
}
/// <summary>
/// Represents a collection of functions to interact with the API endpoints
/// </summary>
public class DeedApi : IDeedApi
{
/// <summary>
/// Initializes a new instance of the <see cref="DeedApi"/> class.
/// </summary>
/// <returns></returns>
public DeedApi(String basePath)
{
this.Configuration = new Configuration(new ApiClient(basePath));
// ensure API client has configuration ready
if (Configuration.ApiClient.Configuration == null)
{
this.Configuration.ApiClient.Configuration = this.Configuration;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="DeedApi"/> class
/// using Configuration object
/// </summary>
/// <param name="configuration">An instance of Configuration</param>
/// <returns></returns>
public DeedApi(Configuration configuration = null)
{
if (configuration == null) // use the default one in Configuration
this.Configuration = Configuration.Default;
else
this.Configuration = configuration;
// ensure API client has configuration ready
if (Configuration.ApiClient.Configuration == null)
{
this.Configuration.ApiClient.Configuration = this.Configuration;
}
}
/// <summary>
/// Gets the base path of the API client.
/// </summary>
/// <value>The base path</value>
public String GetBasePath()
{
return this.Configuration.ApiClient.RestClient.BaseUrl.ToString();
}
/// <summary>
/// Sets the base path of the API client.
/// </summary>
/// <value>The base path</value>
[Obsolete("SetBasePath is deprecated, please do 'Configuraiton.ApiClient = new ApiClient(\"http://new-path\")' instead.")]
public void SetBasePath(String basePath)
{
// do nothing
}
/// <summary>
/// Gets or sets the configuration object
/// </summary>
/// <value>An instance of the Configuration</value>
public Configuration Configuration {get; set;}
/// <summary>
/// Gets the default header.
/// </summary>
/// <returns>Dictionary of HTTP header</returns>
[Obsolete("DefaultHeader is deprecated, please use Configuration.DefaultHeader instead.")]
public Dictionary<String, String> DefaultHeader()
{
return this.Configuration.DefaultHeader;
}
/// <summary>
/// Add default header.
/// </summary>
/// <param name="key">Header field name.</param>
/// <param name="value">Header field value.</param>
/// <returns></returns>
[Obsolete("AddDefaultHeader is deprecated, please use Configuration.AddDefaultHeader instead.")]
public void AddDefaultHeader(string key, string value)
{
this.Configuration.AddDefaultHeader(key, value);
}
/// <summary>
/// Deed The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </summary>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>OperativeDeed</returns>
public OperativeDeed DeedDeedReferenceGet (string deedReference)
{
ApiResponse<OperativeDeed> localVarResponse = DeedDeedReferenceGetWithHttpInfo(deedReference);
return localVarResponse.Data;
}
/// <summary>
/// Deed The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </summary>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>ApiResponse of OperativeDeed</returns>
public ApiResponse< OperativeDeed > DeedDeedReferenceGetWithHttpInfo (string deedReference)
{
// verify the required parameter 'deedReference' is set
if (deedReference == null)
throw new ApiException(400, "Missing required parameter 'deedReference' when calling DeedApi->DeedDeedReferenceGet");
var localVarPath = "/deed/{deed_reference}";
var localVarPathParams = new Dictionary<String, String>();
var localVarQueryParams = new Dictionary<String, String>();
var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader);
var localVarFormParams = new Dictionary<String, String>();
var localVarFileParams = new Dictionary<String, FileParameter>();
Object localVarPostBody = null;
// to determine the Content-Type header
String[] localVarHttpContentTypes = new String[] {
};
String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes);
// to determine the Accept header
String[] localVarHttpHeaderAccepts = new String[] {
"application/json"
};
String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts);
if (localVarHttpHeaderAccept != null)
localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept);
// set "format" to json by default
// e.g. /pet/{petId}.{format} becomes /pet/{petId}.json
localVarPathParams.Add("format", "json");
if (deedReference != null) localVarPathParams.Add("deed_reference", Configuration.ApiClient.ParameterToString(deedReference)); // path parameter
// make the HTTP request
IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath,
Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams,
localVarPathParams, localVarHttpContentType);
int localVarStatusCode = (int) localVarResponse.StatusCode;
if (localVarStatusCode >= 400)
throw new ApiException (localVarStatusCode, "Error calling DeedDeedReferenceGet: " + localVarResponse.Content, localVarResponse.Content);
else if (localVarStatusCode == 0)
throw new ApiException (localVarStatusCode, "Error calling DeedDeedReferenceGet: " + localVarResponse.ErrorMessage, localVarResponse.ErrorMessage);
return new ApiResponse<OperativeDeed>(localVarStatusCode,
localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()),
(OperativeDeed) Configuration.ApiClient.Deserialize(localVarResponse, typeof(OperativeDeed)));
}
/// <summary>
/// Deed The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </summary>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>Task of OperativeDeed</returns>
public async System.Threading.Tasks.Task<OperativeDeed> DeedDeedReferenceGetAsync (string deedReference)
{
ApiResponse<OperativeDeed> localVarResponse = await DeedDeedReferenceGetAsyncWithHttpInfo(deedReference);
return localVarResponse.Data;
}
/// <summary>
/// Deed The Deed endpoint returns details of a specific deed based on the unique deed reference.\nThe response includes the Title Number, Property information, Borrower(s) information and deed information.
/// </summary>
/// <exception cref="IO.Swagger.Client.ApiException">Thrown when fails to make API call</exception>
/// <param name="deedReference">Unique reference of the deed.</param>
/// <returns>Task of ApiResponse (OperativeDeed)</returns>
public async System.Threading.Tasks.Task<ApiResponse<OperativeDeed>> DeedDeedReferenceGetAsyncWithHttpInfo (string deedReference)
{
// verify the required parameter 'deedReference' is set
if (deedReference == null) throw new ApiException(400, "Missing required parameter 'deedReference' when calling DeedDeedReferenceGet");
var localVarPath = "/deed/{deed_reference}";
var localVarPathParams = new Dictionary<String, String>();
var localVarQueryParams = new Dictionary<String, String>();
var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader);
var localVarFormParams = new Dictionary<String, String>();
var localVarFileParams = new Dictionary<String, FileParameter>();
Object localVarPostBody = null;
// to determine the Content-Type header
String[] localVarHttpContentTypes = new String[] {
};
String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes);
// to determine the Accept header
String[] localVarHttpHeaderAccepts = new String[] {
"application/json"
};
String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts);
if (localVarHttpHeaderAccept != null)
localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept);
// set "format" to json by default
// e.g. /pet/{petId}.{format} becomes /pet/{petId}.json
localVarPathParams.Add("format", "json");
if (deedReference != null) localVarPathParams.Add("deed_reference", Configuration.ApiClient.ParameterToString(deedReference)); // path parameter
// make the HTTP request
IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath,
Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams,
localVarPathParams, localVarHttpContentType);
int localVarStatusCode = (int) localVarResponse.StatusCode;
if (localVarStatusCode >= 400)
throw new ApiException (localVarStatusCode, "Error calling DeedDeedReferenceGet: " + localVarResponse.Content, localVarResponse.Content);
else if (localVarStatusCode == 0)
throw new ApiException (localVarStatusCode, "Error calling DeedDeedReferenceGet: " + localVarResponse.ErrorMessage, localVarResponse.ErrorMessage);
return new ApiResponse<OperativeDeed>(localVarStatusCode,
localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()),
(OperativeDeed) Configuration.ApiClient.Deserialize(localVarResponse, typeof(OperativeDeed)));
}
}
}
| |
#region License
//
// Author: Nate Kohari <nate@enkari.com>
// Copyright (c) 2007-2010, Enkari, Ltd.
//
// Dual-licensed under the Apache License, Version 2.0, and the Microsoft Public License (Ms-PL).
// See the file LICENSE.txt for details.
//
#endregion
#region Using Directives
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Telerik.JustMock.AutoMock.Ninject.Infrastructure;
using Telerik.JustMock.AutoMock.Ninject.Infrastructure.Disposal;
using Telerik.JustMock.AutoMock.Ninject.Infrastructure.Introspection;
using Telerik.JustMock.AutoMock.Ninject.Infrastructure.Language;
#endregion
namespace Telerik.JustMock.AutoMock.Ninject.Components
{
/// <summary>
/// An internal container that manages and resolves components that contribute to Ninject.
/// </summary>
public class ComponentContainer : DisposableObject, IComponentContainer
{
private readonly Multimap<Type, Type> _mappings = new Multimap<Type, Type>();
private readonly Dictionary<Type, INinjectComponent> _instances = new Dictionary<Type, INinjectComponent>();
private readonly HashSet<KeyValuePair<Type, Type>> transients = new HashSet<KeyValuePair<Type, Type>>();
/// <summary>
/// Gets or sets the kernel that owns the component container.
/// </summary>
public IKernel Kernel { get; set; }
/// <summary>
/// Releases resources held by the object.
/// </summary>
public override void Dispose(bool disposing)
{
if (disposing && !IsDisposed)
{
foreach (INinjectComponent instance in _instances.Values)
instance.Dispose();
_mappings.Clear();
_instances.Clear();
}
base.Dispose(disposing);
}
/// <summary>
/// Registers a component in the container.
/// </summary>
/// <typeparam name="TComponent">The component type.</typeparam>
/// <typeparam name="TImplementation">The component's implementation type.</typeparam>
public void Add<TComponent, TImplementation>()
where TComponent : INinjectComponent
where TImplementation : TComponent, INinjectComponent
{
_mappings.Add(typeof(TComponent), typeof(TImplementation));
}
/// <summary>
/// Registers a transient component in the container.
/// </summary>
/// <typeparam name="TComponent">The component type.</typeparam>
/// <typeparam name="TImplementation">The component's implementation type.</typeparam>
public void AddTransient<TComponent, TImplementation>()
where TComponent : INinjectComponent
where TImplementation : TComponent, INinjectComponent
{
this.Add<TComponent, TImplementation>();
this.transients.Add(new KeyValuePair<Type, Type>(typeof(TComponent), typeof(TImplementation)));
}
/// <summary>
/// Removes all registrations for the specified component.
/// </summary>
/// <typeparam name="T">The component type.</typeparam>
public void RemoveAll<T>()
where T : INinjectComponent
{
RemoveAll(typeof(T));
}
/// <summary>
/// Removes all registrations for the specified component.
/// </summary>
/// <param name="component">The component type.</param>
public void RemoveAll(Type component)
{
Ensure.ArgumentNotNull(component, "component");
foreach (Type implementation in _mappings[component])
{
if (_instances.ContainsKey(implementation))
_instances[implementation].Dispose();
_instances.Remove(implementation);
}
_mappings.RemoveAll(component);
}
/// <summary>
/// Gets one instance of the specified component.
/// </summary>
/// <typeparam name="T">The component type.</typeparam>
/// <returns>The instance of the component.</returns>
public T Get<T>()
where T : INinjectComponent
{
return (T) Get(typeof(T));
}
/// <summary>
/// Gets all available instances of the specified component.
/// </summary>
/// <typeparam name="T">The component type.</typeparam>
/// <returns>A series of instances of the specified component.</returns>
public IEnumerable<T> GetAll<T>()
where T : INinjectComponent
{
return GetAll(typeof(T)).Cast<T>();
}
/// <summary>
/// Gets one instance of the specified component.
/// </summary>
/// <param name="component">The component type.</param>
/// <returns>The instance of the component.</returns>
public object Get(Type component)
{
Ensure.ArgumentNotNull(component, "component");
if (component == typeof(IKernel))
return Kernel;
if (component.IsGenericType)
{
Type gtd = component.GetGenericTypeDefinition();
Type argument = component.GetGenericArguments()[0];
#if WINDOWS_PHONE
Type discreteGenericType =
typeof (IEnumerable<>).MakeGenericType(argument);
if (gtd.IsInterface && discreteGenericType.IsAssignableFrom(component))
return GetAll(argument).CastSlow(argument);
#else
if (gtd.IsInterface && typeof (IEnumerable<>).IsAssignableFrom(gtd))
return GetAll(argument).CastSlow(argument);
#endif
}
Type implementation = _mappings[component].FirstOrDefault();
if (implementation == null)
throw new InvalidOperationException(ExceptionFormatter.NoSuchComponentRegistered(component));
return ResolveInstance(component, implementation);
}
/// <summary>
/// Gets all available instances of the specified component.
/// </summary>
/// <param name="component">The component type.</param>
/// <returns>A series of instances of the specified component.</returns>
public IEnumerable<object> GetAll(Type component)
{
Ensure.ArgumentNotNull(component, "component");
return _mappings[component]
.Select(implementation => ResolveInstance(component, implementation));
}
private object ResolveInstance(Type component, Type implementation)
{
lock (_instances)
return _instances.ContainsKey(implementation) ? _instances[implementation] : CreateNewInstance(component, implementation);
}
private object CreateNewInstance(Type component, Type implementation)
{
ConstructorInfo constructor = SelectConstructor(component, implementation);
var arguments = constructor.GetParameters().Select(parameter => Get(parameter.ParameterType)).ToArray();
try
{
var instance = constructor.Invoke(arguments) as INinjectComponent;
instance.Settings = Kernel.Settings;
if (!this.transients.Contains(new KeyValuePair<Type, Type>(component, implementation)))
{
_instances.Add(implementation, instance);
}
return instance;
}
catch (TargetInvocationException ex)
{
ex.RethrowInnerException();
return null;
}
}
private static ConstructorInfo SelectConstructor(Type component, Type implementation)
{
var constructor = implementation.GetConstructors().OrderByDescending(c => c.GetParameters().Length).FirstOrDefault();
if (constructor == null)
throw new InvalidOperationException(ExceptionFormatter.NoConstructorsAvailableForComponent(component, implementation));
return constructor;
}
#if SILVERLIGHT_30 || SILVERLIGHT_20 || WINDOWS_PHONE || NETCF_35
private class HashSet<T>
{
private IDictionary<T, object> data = new Dictionary<T,object>();
public void Add(T o)
{
this.data.Add(o, null);
}
public bool Contains(T o)
{
return this.data.ContainsKey(o);
}
}
#endif
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Text
{
public static partial class Extensions
{
/// <summary>
/// ITextSnapshot implementation of SourceText
/// </summary>
private class SnapshotSourceText : SourceText
{
/// <summary>
/// Use a separate class for closed files to simplify memory leak investigations
/// </summary>
internal sealed class ClosedSnapshotSourceText : SnapshotSourceText
{
public ClosedSnapshotSourceText(ITextSnapshot roslynSnapshot, Encoding encodingOpt)
: base(roslynSnapshot, encodingOpt, containerOpt: null)
{
}
}
private static readonly Func<int, int, string> s_textLog = (v1, v2) => string.Format("FullRange : from {0} to {1}", v1, v2);
/// <summary>
/// The ITextSnapshot backing the SourceText instance
/// </summary>
protected readonly ITextSnapshot RoslynSnapshot;
private readonly Encoding _encodingOpt;
private readonly TextBufferContainer _containerOpt;
private readonly int _reiteratedVersion;
private SnapshotSourceText(ITextSnapshot editorSnapshot, Encoding encodingOpt)
{
Contract.ThrowIfNull(editorSnapshot);
this.RoslynSnapshot = TextBufferMapper.ToRoslyn(editorSnapshot);
_containerOpt = TextBufferContainer.From(editorSnapshot.TextBuffer);
_reiteratedVersion = editorSnapshot.Version.ReiteratedVersionNumber;
_encodingOpt = encodingOpt;
}
public SnapshotSourceText(ITextSnapshot roslynSnapshot, Encoding encodingOpt, TextBufferContainer containerOpt)
{
Contract.ThrowIfNull(roslynSnapshot);
this.RoslynSnapshot = roslynSnapshot;
_encodingOpt = encodingOpt;
_containerOpt = containerOpt;
}
/// <summary>
/// A weak map of all Editor ITextSnapshots and their associated SourceText
/// </summary>
private static readonly ConditionalWeakTable<ITextSnapshot, SnapshotSourceText> s_textSnapshotMap = new ConditionalWeakTable<ITextSnapshot, SnapshotSourceText>();
private static readonly ConditionalWeakTable<ITextSnapshot, SnapshotSourceText>.CreateValueCallback s_createTextCallback = CreateText;
public static SourceText From(ITextSnapshot editorSnapshot)
{
if (editorSnapshot == null)
{
throw new ArgumentNullException(nameof(editorSnapshot));
}
return s_textSnapshotMap.GetValue(editorSnapshot, s_createTextCallback);
}
// Use this as a secondary cache to catch ITextSnapshots that have the same ReiteratedVersionNumber as a previously created SnapshotSourceText
private static readonly ConditionalWeakTable<ITextBuffer, StrongBox<SnapshotSourceText>> s_textBufferLatestSnapshotMap = new ConditionalWeakTable<ITextBuffer, StrongBox<SnapshotSourceText>>();
private static SnapshotSourceText CreateText(ITextSnapshot editorSnapshot)
{
var strongBox = s_textBufferLatestSnapshotMap.GetOrCreateValue(editorSnapshot.TextBuffer);
var text = strongBox.Value;
if (text != null && text._reiteratedVersion == editorSnapshot.Version.ReiteratedVersionNumber)
{
return text;
}
text = new SnapshotSourceText(editorSnapshot, editorSnapshot.TextBuffer.GetEncodingOrUTF8());
strongBox.Value = text;
return text;
}
public override Encoding Encoding
{
get { return _encodingOpt; }
}
public ITextSnapshot EditorSnapshot
{
get { return TextBufferMapper.ToEditor(this.RoslynSnapshot); }
}
protected static ITextBufferCloneService TextBufferFactory
{
get
{
// simplest way to get text factory
var ws = PrimaryWorkspace.Workspace;
if (ws != null)
{
return ws.Services.GetService<ITextBufferCloneService>();
}
return null;
}
}
public override SourceTextContainer Container
{
get
{
return _containerOpt ?? base.Container;
}
}
public override int Length
{
get
{
var res = this.RoslynSnapshot.Length;
return res;
}
}
public override char this[int position]
{
get { return this.RoslynSnapshot[position]; }
}
#region Lines
protected override TextLineCollection GetLinesCore()
{
return new LineInfo(this);
}
private class LineInfo : TextLineCollection
{
private readonly SnapshotSourceText _text;
public LineInfo(SnapshotSourceText text)
{
_text = text;
}
public override int Count
{
get { return _text.RoslynSnapshot.LineCount; }
}
public override TextLine this[int index]
{
get
{
var line = _text.RoslynSnapshot.GetLineFromLineNumber(index);
return TextLine.FromSpan(_text, TextSpan.FromBounds(line.Start, line.End));
}
}
public override int IndexOf(int position)
{
return _text.RoslynSnapshot.GetLineNumberFromPosition(position);
}
public override TextLine GetLineFromPosition(int position)
{
return this[this.IndexOf(position)];
}
public override LinePosition GetLinePosition(int position)
{
ITextSnapshotLine textLine = _text.RoslynSnapshot.GetLineFromPosition(position);
return new LinePosition(textLine.LineNumber, position - textLine.Start);
}
}
#endregion
public override string ToString()
{
return this.RoslynSnapshot.GetText();
}
public override string ToString(TextSpan textSpan)
{
var editorSpan = new Span(textSpan.Start, textSpan.Length);
var res = this.RoslynSnapshot.GetText(editorSpan);
return res;
}
public override SourceText WithChanges(IEnumerable<TextChange> changes)
{
if (changes == null)
{
throw new ArgumentNullException(nameof(changes));
}
if (!changes.Any())
{
return this;
}
// check whether we can use text buffer factory
var factory = TextBufferFactory;
if (factory == null)
{
// if we can't get the factory, use the default implementation
return base.WithChanges(changes);
}
// otherwise, create a new cloned snapshot
var buffer = factory.Clone(RoslynSnapshot.GetFullSpan());
var baseSnapshot = buffer.CurrentSnapshot;
// apply the change to the buffer
using (var edit = buffer.CreateEdit())
{
foreach (var change in changes)
{
edit.Replace(change.Span.ToSpan(), change.NewText);
}
edit.Apply();
}
return new ChangedSourceText(this, baseSnapshot, buffer.CurrentSnapshot);
}
/// <summary>
/// Perf: Optimize calls to GetChangeRanges after WithChanges by using editor snapshots
/// </summary>
private class ChangedSourceText : SnapshotSourceText
{
private readonly SnapshotSourceText _baseText;
private readonly ITextSnapshot _baseSnapshot;
public ChangedSourceText(SnapshotSourceText baseText, ITextSnapshot baseSnapshot, ITextSnapshot currentSnapshot)
: base(currentSnapshot, baseText.Encoding, containerOpt: null)
{
_baseText = baseText;
_baseSnapshot = baseSnapshot;
}
public override IReadOnlyList<TextChangeRange> GetChangeRanges(SourceText oldText)
{
if (oldText == null)
{
throw new ArgumentNullException(nameof(oldText));
}
// if they are the same text there is no change.
if (oldText == this)
{
return TextChangeRange.NoChanges;
}
if (oldText != _baseText)
{
return new[] { new TextChangeRange(new TextSpan(0, oldText.Length), this.Length) };
}
return GetChangeRanges(_baseSnapshot, _baseSnapshot.Length, this.RoslynSnapshot);
}
}
public override void CopyTo(int sourceIndex, char[] destination, int destinationIndex, int count)
{
this.RoslynSnapshot.CopyTo(sourceIndex, destination, destinationIndex, count);
}
public override void Write(TextWriter textWriter, TextSpan span, CancellationToken cancellationToken)
{
this.RoslynSnapshot.Write(textWriter, span.ToSpan());
}
#region GetChangeRangesImplementation
public override IReadOnlyList<TextChangeRange> GetChangeRanges(SourceText oldText)
{
if (oldText == null)
{
throw new ArgumentNullException(nameof(oldText));
}
// if they are the same text there is no change.
if (oldText == this)
{
return TextChangeRange.NoChanges;
}
// first, check whether the text buffer is still alive.
var container = this.Container as TextBufferContainer;
if (container != null)
{
var lastEventArgs = container.LastEventArgs;
if (lastEventArgs != null && lastEventArgs.OldText == oldText && lastEventArgs.NewText == this)
{
return lastEventArgs.Changes;
}
}
var oldSnapshot = oldText.FindCorrespondingEditorTextSnapshot();
var newSnapshot = this.FindCorrespondingEditorTextSnapshot();
return GetChangeRanges(oldSnapshot, oldText.Length, newSnapshot);
}
private IReadOnlyList<TextChangeRange> GetChangeRanges(ITextSnapshot oldSnapshot, int oldTextLength, ITextSnapshot newSnapshot)
{
if (oldSnapshot == null ||
newSnapshot == null ||
oldSnapshot.TextBuffer != newSnapshot.TextBuffer)
{
// Claim its all changed
Logger.Log(FunctionId.Workspace_SourceText_GetChangeRanges, "Invalid Snapshots");
return ImmutableArray.Create<TextChangeRange>(new TextChangeRange(new TextSpan(0, oldTextLength), this.Length));
}
else if (oldSnapshot.Version.ReiteratedVersionNumber == newSnapshot.Version.ReiteratedVersionNumber)
{
// content of two snapshot must be same even if versions are different
return TextChangeRange.NoChanges;
}
else
{
return GetChangeRanges(oldSnapshot, newSnapshot, forward: oldSnapshot.Version.VersionNumber <= newSnapshot.Version.VersionNumber);
}
}
private static readonly Func<ITextChange, TextChangeRange> s_forwardTextChangeRange = c => CreateTextChangeRange(c, forward: true);
private static readonly Func<ITextChange, TextChangeRange> s_backwardTextChangeRange = c => CreateTextChangeRange(c, forward: false);
private IReadOnlyList<TextChangeRange> GetChangeRanges(ITextSnapshot snapshot1, ITextSnapshot snapshot2, bool forward)
{
var oldSnapshot = forward ? snapshot1 : snapshot2;
var newSnapshot = forward ? snapshot2 : snapshot1;
INormalizedTextChangeCollection changes = null;
for (var oldVersion = oldSnapshot.Version;
oldVersion != newSnapshot.Version;
oldVersion = oldVersion.Next)
{
if (oldVersion.Changes.Count != 0)
{
if (changes != null)
{
// Oops - more than one "textual" change between these snapshots, bail and try to find smallest changes span
Logger.Log(FunctionId.Workspace_SourceText_GetChangeRanges, s_textLog, snapshot1.Version.VersionNumber, snapshot2.Version.VersionNumber);
return new[] { GetChangeRanges(oldSnapshot.Version, newSnapshot.Version, forward) };
}
else
{
changes = oldVersion.Changes;
}
}
}
if (changes == null)
{
return ImmutableArray.Create<TextChangeRange>();
}
else
{
return ImmutableArray.CreateRange(changes.Select(forward ? s_forwardTextChangeRange : s_backwardTextChangeRange));
}
}
private TextChangeRange GetChangeRanges(ITextVersion oldVersion, ITextVersion newVersion, bool forward)
{
TextChangeRange? range = null;
var iterator = GetMultipleVersionTextChanges(oldVersion, newVersion, forward);
foreach (var changes in forward ? iterator : iterator.Reverse())
{
range = range.Accumulate(changes);
}
Contract.Requires(range.HasValue);
return range.Value;
}
private static IEnumerable<IEnumerable<TextChangeRange>> GetMultipleVersionTextChanges(ITextVersion oldVersion, ITextVersion newVersion, bool forward)
{
for (var version = oldVersion; version != newVersion; version = version.Next)
{
yield return version.Changes.Select(forward ? s_forwardTextChangeRange : s_backwardTextChangeRange);
}
}
private static TextChangeRange CreateTextChangeRange(ITextChange change, bool forward)
{
if (forward)
{
return new TextChangeRange(new TextSpan(change.OldSpan.Start, change.OldSpan.Length), change.NewLength);
}
else
{
return new TextChangeRange(new TextSpan(change.NewSpan.Start, change.NewSpan.Length), change.OldLength);
}
}
#endregion
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class BinaryNullableOrTests
{
#region Test methods
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableByteOrTest(bool useInterpreter)
{
byte?[] array = new byte?[] { null, 0, 1, byte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableByteOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableSByteOrTest(bool useInterpreter)
{
sbyte?[] array = new sbyte?[] { null, 0, 1, -1, sbyte.MinValue, sbyte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableSByteOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableUShortOrTest(bool useInterpreter)
{
ushort?[] array = new ushort?[] { null, 0, 1, ushort.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableUShortOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableShortOrTest(bool useInterpreter)
{
short?[] array = new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableShortOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableUIntOrTest(bool useInterpreter)
{
uint?[] array = new uint?[] { null, 0, 1, uint.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableUIntOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableIntOrTest(bool useInterpreter)
{
int?[] array = new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableIntOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableULongOrTest(bool useInterpreter)
{
ulong?[] array = new ulong?[] { null, 0, 1, ulong.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableULongOr(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableLongOrTest(bool useInterpreter)
{
long?[] array = new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableLongOr(array[i], array[j], useInterpreter);
}
}
}
#endregion
#region Test verifiers
private static void VerifyNullableByteOr(byte? a, byte? b, bool useInterpreter)
{
Expression<Func<byte?>> e =
Expression.Lambda<Func<byte?>>(
Expression.Or(
Expression.Constant(a, typeof(byte?)),
Expression.Constant(b, typeof(byte?))),
Enumerable.Empty<ParameterExpression>());
Func<byte?> f = e.Compile(useInterpreter);
Assert.Equal((byte?)(a | b), f());
}
private static void VerifyNullableSByteOr(sbyte? a, sbyte? b, bool useInterpreter)
{
Expression<Func<sbyte?>> e =
Expression.Lambda<Func<sbyte?>>(
Expression.Or(
Expression.Constant(a, typeof(sbyte?)),
Expression.Constant(b, typeof(sbyte?))),
Enumerable.Empty<ParameterExpression>());
Func<sbyte?> f = e.Compile(useInterpreter);
Assert.Equal((sbyte?)(a | b), f());
}
private static void VerifyNullableUShortOr(ushort? a, ushort? b, bool useInterpreter)
{
Expression<Func<ushort?>> e =
Expression.Lambda<Func<ushort?>>(
Expression.Or(
Expression.Constant(a, typeof(ushort?)),
Expression.Constant(b, typeof(ushort?))),
Enumerable.Empty<ParameterExpression>());
Func<ushort?> f = e.Compile(useInterpreter);
Assert.Equal((ushort?)(a | b), f());
}
private static void VerifyNullableShortOr(short? a, short? b, bool useInterpreter)
{
Expression<Func<short?>> e =
Expression.Lambda<Func<short?>>(
Expression.Or(
Expression.Constant(a, typeof(short?)),
Expression.Constant(b, typeof(short?))),
Enumerable.Empty<ParameterExpression>());
Func<short?> f = e.Compile(useInterpreter);
Assert.Equal((short?)(a | b), f());
}
private static void VerifyNullableUIntOr(uint? a, uint? b, bool useInterpreter)
{
Expression<Func<uint?>> e =
Expression.Lambda<Func<uint?>>(
Expression.Or(
Expression.Constant(a, typeof(uint?)),
Expression.Constant(b, typeof(uint?))),
Enumerable.Empty<ParameterExpression>());
Func<uint?> f = e.Compile(useInterpreter);
Assert.Equal(a | b, f());
}
private static void VerifyNullableIntOr(int? a, int? b, bool useInterpreter)
{
Expression<Func<int?>> e =
Expression.Lambda<Func<int?>>(
Expression.Or(
Expression.Constant(a, typeof(int?)),
Expression.Constant(b, typeof(int?))),
Enumerable.Empty<ParameterExpression>());
Func<int?> f = e.Compile(useInterpreter);
Assert.Equal(a | b, f());
}
private static void VerifyNullableULongOr(ulong? a, ulong? b, bool useInterpreter)
{
Expression<Func<ulong?>> e =
Expression.Lambda<Func<ulong?>>(
Expression.Or(
Expression.Constant(a, typeof(ulong?)),
Expression.Constant(b, typeof(ulong?))),
Enumerable.Empty<ParameterExpression>());
Func<ulong?> f = e.Compile(useInterpreter);
Assert.Equal(a | b, f());
}
private static void VerifyNullableLongOr(long? a, long? b, bool useInterpreter)
{
Expression<Func<long?>> e =
Expression.Lambda<Func<long?>>(
Expression.Or(
Expression.Constant(a, typeof(long?)),
Expression.Constant(b, typeof(long?))),
Enumerable.Empty<ParameterExpression>());
Func<long?> f = e.Compile(useInterpreter);
Assert.Equal(a | b, f());
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.Sql.Fluent
{
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core;
using Microsoft.Azure.Management.Sql.Fluent.SqlServerDnsAliasOperations.Definition;
using Microsoft.Azure.Management.Sql.Fluent.SqlServerDnsAliasOperations.SqlServerDnsAliasOperationsDefinition;
using Microsoft.Azure.Management.Sql.Fluent.Models;
using System;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions;
/// <summary>
/// Implementation for SqlServerDnsAlias.
/// </summary>
///GENTHASH:Y29tLm1pY3Jvc29mdC5henVyZS5tYW5hZ2VtZW50LnNxbC5pbXBsZW1lbnRhdGlvbi5TcWxTZXJ2ZXJEbnNBbGlhc0ltcGw=
internal partial class SqlServerDnsAliasImpl :
ChildResource<
Models.ServerDnsAliasInner,
Microsoft.Azure.Management.Sql.Fluent.SqlServerImpl,
Microsoft.Azure.Management.Sql.Fluent.ISqlServer>,
ISqlServerDnsAlias,
ISqlServerDnsAliasOperationsDefinition
{
private ISqlManager sqlServerManager;
private string resourceGroupName;
private string sqlServerName;
private string name;
string ICreatable<ISqlServerDnsAlias>.Name => this.Name();
/// <summary>
/// Creates an instance of external child resource in-memory.
/// </summary>
/// <param name="name">The name of this external child resource.</param>
/// <param name="parent">Reference to the parent of this external child resource.</param>
/// <param name="innerObject">Reference to the inner object representing this external child resource.</param>
/// <param name="sqlServerManager">Reference to the SQL server manager that accesses DNS alias operations.</param>
///GENMHASH:6EB3A0B36FAECA791EFBB40C3F76F7E2:FC8025F8B79091C30C130C9AAECEC55E
internal SqlServerDnsAliasImpl(string name, SqlServerImpl parent, ServerDnsAliasInner innerObject, ISqlManager sqlServerManager)
: base(innerObject, parent)
{
this.name = name;
this.sqlServerManager = sqlServerManager;
this.resourceGroupName = parent.ResourceGroupName;
this.sqlServerName = parent.Name;
}
/// <summary>
/// Creates an instance of external child resource in-memory.
/// </summary>
/// <param name="resourceGroupName">The resource group name.</param>
/// <param name="sqlServerName">The parent SQL server name.</param>
/// <param name="name">The name of this external child resource.</param>
/// <param name="innerObject">Reference to the inner object representing this external child resource.</param>
/// <param name="sqlServerManager">Reference to the SQL server manager that accesses DNS alias operations.</param>
///GENMHASH:F6E88B09AC6260EB63A570307518001D:38F5303BD24DCCFCF927664812C8523C
internal SqlServerDnsAliasImpl(string resourceGroupName, string sqlServerName, string name, ServerDnsAliasInner innerObject, ISqlManager sqlServerManager)
: base(innerObject, null)
{
this.name = name;
this.sqlServerManager = sqlServerManager;
this.resourceGroupName = resourceGroupName;
this.sqlServerName = sqlServerName;
}
/// <summary>
/// Creates an instance of external child resource in-memory.
/// </summary>
/// <param name="name">The name of this external child resource.</param>
/// <param name="innerObject">Reference to the inner object representing this external child resource.</param>
/// <param name="sqlServerManager">Reference to the SQL server manager that accesses DNS alias operations.</param>
///GENMHASH:288BB2F1178A7F50CBD9A307E67BDFB9:394B41296E95933E362FA2C6FB5E1428
internal SqlServerDnsAliasImpl(string name, ServerDnsAliasInner innerObject, ISqlManager sqlServerManager)
: base(innerObject, null)
{
this.name = name;
this.sqlServerManager = sqlServerManager;
if (innerObject != null && innerObject.Id != null)
{
if (innerObject.Id != null)
{
ResourceId resourceId = ResourceId.FromString(innerObject.Id);
this.resourceGroupName = resourceId.ResourceGroupName;
this.sqlServerName = resourceId.Parent.Name;
}
}
}
public override string Name()
{
return this.name;
}
///GENMHASH:E9EDBD2E8DC2C547D1386A58778AA6B9:7EBD4102FEBFB0AD7091EA1ACBD84F8B
public string ResourceGroupName()
{
return this.resourceGroupName;
}
///GENMHASH:F4658130CC69EFC4BCEC371A932CA322:93FFFDCDC68A9A454B56689F7777C24E
public SqlServerDnsAliasImpl WithExistingSqlServerId(string sqlServerId)
{
ResourceId resourceId = ResourceId.FromString(sqlServerId);
this.resourceGroupName = resourceId.ResourceGroupName;
this.sqlServerName = resourceId.Name;
return this;
}
///GENMHASH:61F5809AB3B985C61AC40B98B1FBC47E:998832D58C98F6DCF3637916D2CC70B9
public string SqlServerName()
{
return this.sqlServerName;
}
///GENMHASH:0BA2C0DAE27266D79653208FF06A9B80:99EDD7AD8CCAED9BC095807A7E85DE17
public SqlServerDnsAliasImpl WithExistingSqlServer(string resourceGroupName, string sqlServerName)
{
this.resourceGroupName = resourceGroupName;
this.sqlServerName = sqlServerName;
return this;
}
///GENMHASH:A0EEAA3D4BFB322B5036FE92D9F0F641:303BB606C8439FAB777DDCE1767E86E9
public SqlServerDnsAliasImpl WithExistingSqlServer(ISqlServer sqlServer)
{
this.resourceGroupName = sqlServer.ResourceGroupName;
this.sqlServerName = sqlServer.Name;
return this;
}
///GENMHASH:481C5BD52B28860819AE7BB31A646B75:9D385BED2B37376E10BD9B753B53CA59
public string AzureDnsRecord()
{
return this.Inner.AzureDnsRecord;
}
///GENMHASH:0FEDA307DAD2022B36843E8905D26EAD:95BA1017B6D636BB0934427C9B74AB8D
public async Task DeleteAsync(CancellationToken cancellationToken = default(CancellationToken))
{
await this.DeleteResourceAsync(cancellationToken);
}
///GENMHASH:65E6085BB9054A86F6A84772E3F5A9EC:DDDB0698C2ABADC0443C1F8E57C83AD8
public void Delete()
{
Extensions.Synchronize(() => this.DeleteAsync());
}
///GENMHASH:5AD91481A0966B059A478CD4E9DD9466:83CA15F39854FC167786AB594BA5E27A
protected async Task<Models.ServerDnsAliasInner> GetInnerAsync(CancellationToken cancellationToken = default(CancellationToken))
{
return await this.sqlServerManager.Inner.ServerDnsAliases
.GetAsync(this.resourceGroupName, this.sqlServerName, this.Name(), cancellationToken);
}
///GENMHASH:7A0398C4BB6EBF42CC817EE638D40E9C:2DC6B3BEB4C8A428A0339820143BFEB3
public string ParentId()
{
var resourceId = ResourceId.FromString(this.Id());
return resourceId?.Parent?.Id;
}
///GENMHASH:E24A9768E91CD60E963E43F00AA1FDFE:9E5F63F5A2A804B72CFD1E2E0B23C31A
public async Task DeleteResourceAsync(CancellationToken cancellationToken = default(CancellationToken))
{
await this.sqlServerManager.Inner.ServerDnsAliases
.DeleteAsync(this.resourceGroupName, this.sqlServerName, this.Name(), cancellationToken);
}
///GENMHASH:ACA2D5620579D8158A29586CA1FF4BC6:899F2B088BBBD76CCBC31221756265BC
public string Id()
{
return this.Inner.Id;
}
///GENMHASH:507A92D4DCD93CE9595A78198DEBDFCF:16AD01F8BDD93611BB283CC787483C90
public async Task<Microsoft.Azure.Management.Sql.Fluent.ISqlServerDnsAlias> UpdateResourceAsync(CancellationToken cancellationToken = default(CancellationToken))
{
return await this.CreateResourceAsync(cancellationToken);
}
///GENMHASH:0202A00A1DCF248D2647DBDBEF2CA865:986A9A19092BED34F1F296126196EE63
public async Task<Microsoft.Azure.Management.Sql.Fluent.ISqlServerDnsAlias> CreateResourceAsync(CancellationToken cancellationToken = default(CancellationToken))
{
var serverDnsAliasInner = await this.sqlServerManager.Inner.ServerDnsAliases
.CreateOrUpdateAsync(this.resourceGroupName, this.sqlServerName, this.Name(), cancellationToken);
this.SetInner(serverDnsAliasInner);
return this;
}
public ISqlServerDnsAlias Refresh()
{
return Extensions.Synchronize(() => this.RefreshAsync());
}
public async Task<ISqlServerDnsAlias> RefreshAsync(CancellationToken cancellationToken = default(CancellationToken))
{
this.SetInner(await this.GetInnerAsync(cancellationToken));
return this;
}
public ISqlServerDnsAlias Create()
{
return Extensions.Synchronize(() => this.CreateAsync());
}
public async Task<ISqlServerDnsAlias> CreateAsync(CancellationToken cancellationToken = default(CancellationToken), bool multiThreaded = true)
{
return await this.CreateResourceAsync(cancellationToken);
}
}
}
| |
using System;
using System.Collections.Generic;
using Vevo.Domain;
using Vevo.Domain.Products;
using Vevo.Domain.Stores;
using Vevo.Shared.Utilities;
using Vevo.WebUI;
using Vevo.WebUI.Products;
public partial class AdvancedSearchResult : Vevo.Deluxe.WebUI.Base.BaseLicenseLanguagePage
{
private string CategoryIDs
{
get
{
if (Request.QueryString["CategoryIDs"] == null)
return String.Empty;
else
return Request.QueryString["CategoryIDs"];
}
}
private string DepartmentIDs
{
get
{
if (Request.QueryString["DepartmentIDs"] == null)
return String.Empty;
else
return Request.QueryString["DepartmentIDs"];
}
}
private string ManufacturerID
{
get
{
if (Request.QueryString["ManufacturerID"] == null)
return String.Empty;
else
return Request.QueryString["ManufacturerID"];
}
}
private string Keyword
{
get
{
if (Request.QueryString["Keyword"] == null)
return String.Empty;
else
return Request.QueryString["Keyword"];
}
}
private string SearchType
{
get
{
if (Request.QueryString["Type"] == null)
return String.Empty;
else
return Request.QueryString["Type"];
}
}
private bool IsQuickSearch
{
get
{
if (Request.QueryString["Quick"] == null)
return false;
else
return ConvertUtilities.ToBoolean( Request.QueryString["Quick"] );
}
}
private string Price1
{
get
{
if (Request.QueryString["Price1"] == null)
return String.Empty;
else
return Request.QueryString["Price1"];
}
}
private string Price2
{
get
{
if (Request.QueryString["Price2"] == null)
return String.Empty;
else
return Request.QueryString["Price2"];
}
}
private string ProductSearchType
{
get
{
if (Request.QueryString["SearchType"] == null)
return String.Empty;
else
return Request.QueryString["SearchType"];
}
}
private bool IsNewSearch
{
get
{
if (Request.QueryString["IsNewSearch"] == null)
return false;
else
return Convert.ToBoolean( Request.QueryString["IsNewSearch"] );
}
}
private int CurrentPage
{
get
{
int result;
string page = Request.QueryString["Page"];
if (String.IsNullOrEmpty( page ) ||
!int.TryParse( page, out result ))
return 1;
else
return result;
}
}
private string ContentMenuItemIDs
{
get
{
if (Request.QueryString["ContentMenuItemIDs"] == null)
return String.Empty;
else
return Request.QueryString["ContentMenuItemIDs"];
}
}
private int CalculateNumberOfPage( int itemsPerPage, int totalItems )
{
return (int) Math.Ceiling( (double) totalItems / itemsPerPage );
}
private string[] SplitColumn( string str )
{
char[] delimiter = new char[] { ',', ':', ';' };
string[] result = str.Split( delimiter );
return result;
}
private void PopulateProductControl()
{
BaseProductListControl productListControl = new BaseProductListControl();
productListControl = LoadControl( String.Format(
"{0}{1}",
SystemConst.LayoutProductListPath,
DataAccessContext.Configurations.GetValue( "DefaultProductListLayout" ) ) ) as BaseProductListControl;
string[] productSearchType = DataAccessContext.Configurations.GetValueList( "ProductSearchBy" );
if (!String.IsNullOrEmpty( ProductSearchType ))
{
productSearchType = SplitColumn( ProductSearchType );
}
productListControl.ID = "uxProductList";
productListControl.DataRetriever = new DataAccessCallbacks.ProductListRetriever( GetSearchResult );
productListControl.IsSearchResult = true;
productListControl.UserDefinedParameters = new object[] {
CategoryIDs,
DepartmentIDs,
ManufacturerID,
Keyword,
Price1,
Price2,
productSearchType,
SearchType,
IsNewSearch};
uxCatalogControlPanel.Controls.Add( productListControl );
}
private void PopulateDepartmentProductControl()
{
BaseProductListControl productListControl = new BaseProductListControl();
productListControl = LoadControl( String.Format(
"{0}{1}",
SystemConst.LayoutProductListPath,
DataAccessContext.Configurations.GetValue( "DefaultProductListLayout" ) ) ) as BaseProductListControl;
productListControl.ID = "uxDepartmentProductList";
productListControl.DataRetriever = new DataAccessCallbacks.ProductListRetriever( GetSearchDepartmentResult );
productListControl.IsSearchResult = true;
productListControl.UserDefinedParameters = new object[] {
DepartmentIDs,
Keyword,
Price1,
Price2,
DataAccessContext.Configurations.GetValueList( "ProductSearchBy" ),
SearchType };
uxDepartmentPanel.Controls.Add( productListControl );
}
protected void Page_Load( object sender, EventArgs e )
{
if (IsNewSearch)
{
uxDefaultTitle.Text = "[$HeadProduct] for \"" + Keyword + "\"";
}
else
{
uxBackLink.Visible = false;
}
PopulateProductControl();
if (DataAccessContext.Configurations.GetBoolValue( "DepartmentListModuleDisplay", new StoreRetriever().GetStore() ) && !IsNewSearch)
{
if (!IsQuickSearch)
PopulateDepartmentProductControl();
else
uxCheckDepartmentPanel.Visible = false;
}
else
{
uxCheckDepartmentPanel.Visible = false;
}
if (!IsNewSearch)
{
if (!IsQuickSearch)
{
uxContentList.DataRetriever = new DataAccessCallbacks.ContentListRetriever( GetSearchContentResult );
uxContentList.UserDefinedParameters =
new object[] { ContentMenuItemIDs, Keyword, DataAccessContext.Configurations.GetValueList( "ContentSearchBy" ), SearchType };
}
else
{
uxAdvancedContentSearchResult.Visible = false;
}
}
else
{
uxAdvancedContentSearchResult.Visible = false;
}
}
private static IList<Product> GetSearchResult(
Culture culture,
string sortBy,
int startIndex,
int endIndex,
object[] userDefined,
out int howManyItems )
{
if (String.IsNullOrEmpty( userDefined[3].ToString() ))
{
howManyItems = 0;
return null;
}
if (!String.IsNullOrEmpty( userDefined[0].ToString() )
|| !String.IsNullOrEmpty( userDefined[1].ToString() )
|| !String.IsNullOrEmpty( userDefined[2].ToString() )
|| !String.IsNullOrEmpty( userDefined[3].ToString() )
|| !String.IsNullOrEmpty( userDefined[4].ToString() )
|| !String.IsNullOrEmpty( userDefined[5].ToString() ))
{
if ((bool) userDefined[8])
{
return DataAccessContext.ProductRepository.AdvancedSearch(
culture,
(string) userDefined[0],
(string) userDefined[1],
(string) userDefined[2],
sortBy,
(string) userDefined[3],
(string) userDefined[4],
(string) userDefined[5],
(string[]) userDefined[6],
startIndex,
endIndex,
out howManyItems,
new StoreRetriever().GetCurrentStoreID(),
DataAccessContext.Configurations.GetValue( "RootCategory", new StoreRetriever().GetStore() ),
DataAccessContext.Configurations.GetValue( "RootDepartment", new StoreRetriever().GetStore() ),
(string) userDefined[7]
);
}
else
{
return DataAccessContext.ProductRepository.AdvancedSearch(
culture,
(string) userDefined[0],
sortBy,
(string) userDefined[3],
(string) userDefined[4],
(string) userDefined[5],
(string[]) userDefined[6],
startIndex,
endIndex,
out howManyItems,
new StoreRetriever().GetCurrentStoreID(),
DataAccessContext.Configurations.GetValue( "RootCategory", new StoreRetriever().GetStore() ),
(string) userDefined[7]
);
}
}
else
{
howManyItems = 0;
return null;
}
}
private static IList<Product> GetSearchDepartmentResult(
Culture culture,
string sortBy,
int startIndex,
int endIndex,
object[] userDefined,
out int howManyItems )
{
if (!String.IsNullOrEmpty( userDefined[0].ToString() )
|| !String.IsNullOrEmpty( userDefined[1].ToString() )
|| !String.IsNullOrEmpty( userDefined[2].ToString() )
|| !String.IsNullOrEmpty( userDefined[3].ToString() ))
{
return DataAccessContext.ProductRepository.AdvancedSearchDepartment(
culture,
(string) userDefined[0],
sortBy,
(string) userDefined[1],
(string) userDefined[2],
(string) userDefined[3],
(string[]) userDefined[4],
startIndex,
endIndex,
out howManyItems,
new StoreRetriever().GetCurrentStoreID(),
DataAccessContext.Configurations.GetValue( "RootDepartment", new StoreRetriever().GetStore() ),
(string) userDefined[5]
);
}
else
{
howManyItems = 0;
return null;
}
}
private static IList<Vevo.Domain.Contents.Content> GetSearchContentResult(
Culture cultureID,
string sortBy,
int startIndex,
int endIndex,
object[] userDefined,
out int howManyItems )
{
if (!String.IsNullOrEmpty( userDefined[0].ToString() )
|| !String.IsNullOrEmpty( userDefined[1].ToString() ))
{
return DataAccessContext.ContentRepository.AdvancedSearch(
cultureID,
new StoreRetriever().GetCurrentStoreID(),
(string) userDefined[0],
sortBy,
(string) userDefined[1],
(string[]) userDefined[2],
startIndex,
endIndex,
(string) userDefined[3],
out howManyItems );
}
else
{
howManyItems = 0;
return null;
}
}
}
| |
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Abp.Application.Editions;
using Abp.Application.Features;
using Abp.Authorization.Users;
using Abp.Collections.Extensions;
using Abp.Domain.Repositories;
using Abp.Domain.Services;
using Abp.Domain.Uow;
using Abp.Events.Bus.Entities;
using Abp.Events.Bus.Handlers;
using Abp.Localization;
using Abp.Runtime.Caching;
using Abp.UI;
using Abp.Zero;
namespace Abp.MultiTenancy
{
/// <summary>
/// Tenant manager.
/// Implements domain logic for <see cref="AbpTenant{TUser}"/>.
/// </summary>
/// <typeparam name="TTenant">Type of the application Tenant</typeparam>
/// <typeparam name="TUser">Type of the application User</typeparam>
public class AbpTenantManager<TTenant, TUser> : IDomainService,
IEventHandler<EntityChangedEventData<TTenant>>,
IEventHandler<EntityDeletedEventData<Edition>>
where TTenant : AbpTenant<TUser>
where TUser : AbpUserBase
{
public AbpEditionManager EditionManager { get; set; }
public ILocalizationManager LocalizationManager { get; set; }
protected string LocalizationSourceName { get; set; }
public ICacheManager CacheManager { get; set; }
public IFeatureManager FeatureManager { get; set; }
public IUnitOfWorkManager UnitOfWorkManager { get; set; }
protected IRepository<TTenant> TenantRepository { get; set; }
protected IRepository<TenantFeatureSetting, long> TenantFeatureRepository { get; set; }
private readonly IAbpZeroFeatureValueStore _featureValueStore;
public AbpTenantManager(
IRepository<TTenant> tenantRepository,
IRepository<TenantFeatureSetting, long> tenantFeatureRepository,
AbpEditionManager editionManager,
IAbpZeroFeatureValueStore featureValueStore)
{
_featureValueStore = featureValueStore;
TenantRepository = tenantRepository;
TenantFeatureRepository = tenantFeatureRepository;
EditionManager = editionManager;
LocalizationManager = NullLocalizationManager.Instance;
LocalizationSourceName = AbpZeroConsts.LocalizationSourceName;
}
public virtual IQueryable<TTenant> Tenants { get { return TenantRepository.GetAll(); } }
public virtual async Task CreateAsync(TTenant tenant)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
await ValidateTenantAsync(tenant);
if (await TenantRepository.FirstOrDefaultAsync(t => t.TenancyName == tenant.TenancyName) != null)
{
throw new UserFriendlyException(string.Format(L("TenancyNameIsAlreadyTaken"), tenant.TenancyName));
}
await TenantRepository.InsertAsync(tenant);
});
}
public virtual void Create(TTenant tenant)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
ValidateTenant(tenant);
if (TenantRepository.FirstOrDefault(t => t.TenancyName == tenant.TenancyName) != null)
{
throw new UserFriendlyException(string.Format(L("TenancyNameIsAlreadyTaken"), tenant.TenancyName));
}
TenantRepository.Insert(tenant);
});
}
public virtual async Task UpdateAsync(TTenant tenant)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
if (await TenantRepository.FirstOrDefaultAsync(t => t.TenancyName == tenant.TenancyName && t.Id != tenant.Id) != null)
{
throw new UserFriendlyException(string.Format(L("TenancyNameIsAlreadyTaken"), tenant.TenancyName));
}
await TenantRepository.UpdateAsync(tenant);
});
}
public virtual void Update(TTenant tenant)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
if (TenantRepository.FirstOrDefault(t => t.TenancyName == tenant.TenancyName && t.Id != tenant.Id) != null)
{
throw new UserFriendlyException(string.Format(L("TenancyNameIsAlreadyTaken"), tenant.TenancyName));
}
TenantRepository.Update(tenant);
});
}
public virtual async Task<TTenant> FindByIdAsync(int id)
{
return await UnitOfWorkManager.WithUnitOfWorkAsync(async () => await TenantRepository.FirstOrDefaultAsync(id));
}
public virtual TTenant FindById(int id)
{
return UnitOfWorkManager.WithUnitOfWork(() => TenantRepository.FirstOrDefault(id));
}
public virtual async Task<TTenant> GetByIdAsync(int id)
{
var tenant = await FindByIdAsync(id);
if (tenant == null)
{
throw new AbpException("There is no tenant with id: " + id);
}
return tenant;
}
public virtual TTenant GetById(int id)
{
var tenant = FindById(id);
if (tenant == null)
{
throw new AbpException("There is no tenant with id: " + id);
}
return tenant;
}
public virtual async Task<TTenant> FindByTenancyNameAsync(string tenancyName)
{
return await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
return await TenantRepository.FirstOrDefaultAsync(t => t.TenancyName == tenancyName);
});
}
public virtual TTenant FindByTenancyName(string tenancyName)
{
return UnitOfWorkManager.WithUnitOfWork(() =>
{
return TenantRepository.FirstOrDefault(t => t.TenancyName == tenancyName);
});
}
public virtual async Task DeleteAsync(TTenant tenant)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
await TenantRepository.DeleteAsync(tenant);
});
}
public virtual void Delete(TTenant tenant)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
TenantRepository.Delete(tenant);
});
}
public Task<string> GetFeatureValueOrNullAsync(int tenantId, string featureName)
{
return _featureValueStore.GetValueOrNullAsync(tenantId, featureName);
}
public string GetFeatureValueOrNull(int tenantId, string featureName)
{
return _featureValueStore.GetValueOrNull(tenantId, featureName);
}
public virtual async Task<IReadOnlyList<NameValue>> GetFeatureValuesAsync(int tenantId)
{
var values = new List<NameValue>();
foreach (var feature in FeatureManager.GetAll())
{
values.Add(new NameValue(feature.Name, await GetFeatureValueOrNullAsync(tenantId, feature.Name) ?? feature.DefaultValue));
}
return values;
}
public virtual IReadOnlyList<NameValue> GetFeatureValues(int tenantId)
{
var values = new List<NameValue>();
foreach (var feature in FeatureManager.GetAll())
{
values.Add(new NameValue(feature.Name, GetFeatureValueOrNull(tenantId, feature.Name) ?? feature.DefaultValue));
}
return values;
}
public virtual async Task SetFeatureValuesAsync(int tenantId, params NameValue[] values)
{
if (values.IsNullOrEmpty())
{
return;
}
foreach (var value in values)
{
await SetFeatureValueAsync(tenantId, value.Name, value.Value);
}
}
public virtual void SetFeatureValues(int tenantId, params NameValue[] values)
{
if (values.IsNullOrEmpty())
{
return;
}
foreach (var value in values)
{
SetFeatureValue(tenantId, value.Name, value.Value);
}
}
public virtual async Task SetFeatureValueAsync(int tenantId, string featureName, string value)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
await SetFeatureValueAsync(await GetByIdAsync(tenantId), featureName, value);
});
}
public virtual void SetFeatureValue(int tenantId, string featureName, string value)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
SetFeatureValue(GetById(tenantId), featureName, value);
});
}
public virtual async Task SetFeatureValueAsync(TTenant tenant, string featureName, string value)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
//No need to change if it's already equals to the current value
if (await GetFeatureValueOrNullAsync(tenant.Id, featureName) == value)
{
return;
}
//Get the current feature setting
TenantFeatureSetting currentSetting;
using (UnitOfWorkManager.Current.EnableFilter(AbpDataFilters.MayHaveTenant))
using (UnitOfWorkManager.Current.SetTenantId(tenant.Id))
{
currentSetting = await TenantFeatureRepository.FirstOrDefaultAsync(f => f.Name == featureName);
}
//Get the feature
var feature = FeatureManager.GetOrNull(featureName);
if (feature == null)
{
if (currentSetting != null)
{
await TenantFeatureRepository.DeleteAsync(currentSetting);
}
return;
}
//Determine default value
var defaultValue = tenant.EditionId.HasValue
? (await EditionManager.GetFeatureValueOrNullAsync(tenant.EditionId.Value, featureName) ?? feature.DefaultValue)
: feature.DefaultValue;
//No need to store value if it's default
if (value == defaultValue)
{
if (currentSetting != null)
{
await TenantFeatureRepository.DeleteAsync(currentSetting);
}
return;
}
//Insert/update the feature value
if (currentSetting == null)
{
await TenantFeatureRepository.InsertAsync(new TenantFeatureSetting(tenant.Id, featureName, value));
}
else
{
currentSetting.Value = value;
}
});
}
public virtual void SetFeatureValue(TTenant tenant, string featureName, string value)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
//No need to change if it's already equals to the current value
if (GetFeatureValueOrNull(tenant.Id, featureName) == value)
{
return;
}
//Get the current feature setting
TenantFeatureSetting currentSetting;
using (UnitOfWorkManager.Current.EnableFilter(AbpDataFilters.MayHaveTenant))
using (UnitOfWorkManager.Current.SetTenantId(tenant.Id))
{
currentSetting = TenantFeatureRepository.FirstOrDefault(f => f.Name == featureName);
}
//Get the feature
var feature = FeatureManager.GetOrNull(featureName);
if (feature == null)
{
if (currentSetting != null)
{
TenantFeatureRepository.Delete(currentSetting);
}
return;
}
//Determine default value
var defaultValue = tenant.EditionId.HasValue
? (EditionManager.GetFeatureValueOrNull(tenant.EditionId.Value, featureName) ?? feature.DefaultValue)
: feature.DefaultValue;
//No need to store value if it's default
if (value == defaultValue)
{
if (currentSetting != null)
{
TenantFeatureRepository.Delete(currentSetting);
}
return;
}
//Insert/update the feature value
if (currentSetting == null)
{
TenantFeatureRepository.Insert(new TenantFeatureSetting(tenant.Id, featureName, value));
}
else
{
currentSetting.Value = value;
}
});
}
/// <summary>
/// Resets all custom feature settings for a tenant.
/// Tenant will have features according to it's edition.
/// </summary>
/// <param name="tenantId">Tenant Id</param>
public virtual async Task ResetAllFeaturesAsync(int tenantId)
{
await UnitOfWorkManager.WithUnitOfWorkAsync(async () =>
{
using (UnitOfWorkManager.Current.EnableFilter(AbpDataFilters.MayHaveTenant))
using (UnitOfWorkManager.Current.SetTenantId(tenantId))
{
await TenantFeatureRepository.DeleteAsync(f => f.TenantId == tenantId);
}
});
}
/// <summary>
/// Resets all custom feature settings for a tenant.
/// Tenant will have features according to it's edition.
/// </summary>
/// <param name="tenantId">Tenant Id</param>
public virtual void ResetAllFeatures(int tenantId)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
using (UnitOfWorkManager.Current.EnableFilter(AbpDataFilters.MayHaveTenant))
using (UnitOfWorkManager.Current.SetTenantId(tenantId))
{
TenantFeatureRepository.Delete(f => f.TenantId == tenantId);
}
});
}
protected virtual async Task ValidateTenantAsync(TTenant tenant)
{
await ValidateTenancyNameAsync(tenant.TenancyName);
}
protected virtual void ValidateTenant(TTenant tenant)
{
ValidateTenancyName(tenant.TenancyName);
}
protected virtual Task ValidateTenancyNameAsync(string tenancyName)
{
if (!Regex.IsMatch(tenancyName, AbpTenant<TUser>.TenancyNameRegex))
{
throw new UserFriendlyException(L("InvalidTenancyName"));
}
return Task.FromResult(0);
}
protected virtual void ValidateTenancyName(string tenancyName)
{
if (!Regex.IsMatch(tenancyName, AbpTenant<TUser>.TenancyNameRegex))
{
throw new UserFriendlyException(L("InvalidTenancyName"));
}
}
protected virtual string L(string name)
{
return LocalizationManager.GetString(LocalizationSourceName, name);
}
protected virtual string L(string name, CultureInfo cultureInfo)
{
return LocalizationManager.GetString(LocalizationSourceName, name, cultureInfo);
}
public void HandleEvent(EntityChangedEventData<TTenant> eventData)
{
if (eventData.Entity.IsTransient())
{
return;
}
CacheManager.GetTenantFeatureCache().Remove(eventData.Entity.Id);
}
public virtual void HandleEvent(EntityDeletedEventData<Edition> eventData)
{
UnitOfWorkManager.WithUnitOfWork(() =>
{
var relatedTenants = TenantRepository.GetAllList(t => t.EditionId == eventData.Entity.Id);
foreach (var relatedTenant in relatedTenants)
{
relatedTenant.EditionId = null;
}
});
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.IO
{
/// <summary>Provides an implementation of FileSystem for Unix systems.</summary>
internal sealed partial class UnixFileSystem : FileSystem
{
public override int MaxPath { get { return Interop.Sys.MaxPath; } }
public override int MaxDirectoryPath { get { return Interop.Sys.MaxPath; } }
public override FileStreamBase Open(string fullPath, FileMode mode, FileAccess access, FileShare share, int bufferSize, FileOptions options, FileStream parent)
{
return new UnixFileStream(fullPath, mode, access, share, bufferSize, options, parent);
}
public override void CopyFile(string sourceFullPath, string destFullPath, bool overwrite)
{
// The destination path may just be a directory into which the file should be copied.
// If it is, append the filename from the source onto the destination directory
if (DirectoryExists(destFullPath))
{
destFullPath = Path.Combine(destFullPath, Path.GetFileName(sourceFullPath));
}
// Copy the contents of the file from the source to the destination, creating the destination in the process
using (var src = new FileStream(sourceFullPath, FileMode.Open, FileAccess.Read, FileShare.Read, FileStream.DefaultBufferSize, FileOptions.None))
using (var dst = new FileStream(destFullPath, overwrite ? FileMode.Create : FileMode.CreateNew, FileAccess.ReadWrite, FileShare.None, FileStream.DefaultBufferSize, FileOptions.None))
{
Interop.CheckIo(Interop.Sys.CopyFile(src.SafeFileHandle, dst.SafeFileHandle));
}
}
public override void MoveFile(string sourceFullPath, string destFullPath)
{
// The desired behavior for Move(source, dest) is to not overwrite the destination file
// if it exists. Since rename(source, dest) will replace the file at 'dest' if it exists,
// link/unlink are used instead. Note that the Unix FileSystemWatcher will treat a Move
// as a Creation and Deletion instead of a Rename and thus differ from Windows.
if (Interop.Sys.Link(sourceFullPath, destFullPath) < 0)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
if (errorInfo.Error == Interop.Error.EXDEV) // rename fails across devices / mount points
{
CopyFile(sourceFullPath, destFullPath, overwrite: false);
}
else if (errorInfo.Error == Interop.Error.ENOENT && !Directory.Exists(Path.GetDirectoryName(destFullPath))) // The parent directory of destFile can't be found
{
// Windows distinguishes between whether the directory or the file isn't found,
// and throws a different exception in these cases. We attempt to approximate that
// here; there is a race condition here, where something could change between
// when the error occurs and our checks, but it's the best we can do, and the
// worst case in such a race condition (which could occur if the file system is
// being manipulated concurrently with these checks) is that we throw a
// FileNotFoundException instead of DirectoryNotFoundexception.
throw Interop.GetExceptionForIoErrno(errorInfo, destFullPath, isDirectory: true);
}
else
{
throw Interop.GetExceptionForIoErrno(errorInfo);
}
}
DeleteFile(sourceFullPath);
}
public override void DeleteFile(string fullPath)
{
if (Interop.Sys.Unlink(fullPath) < 0)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
// ENOENT means it already doesn't exist; nop
if (errorInfo.Error != Interop.Error.ENOENT)
{
if (errorInfo.Error == Interop.Error.EISDIR)
errorInfo = Interop.Error.EACCES.Info();
throw Interop.GetExceptionForIoErrno(errorInfo, fullPath);
}
}
}
public override void CreateDirectory(string fullPath)
{
// NOTE: This logic is primarily just carried forward from Win32FileSystem.CreateDirectory.
int length = fullPath.Length;
// We need to trim the trailing slash or the code will try to create 2 directories of the same name.
if (length >= 2 && PathHelpers.EndsInDirectorySeparator(fullPath))
{
length--;
}
// For paths that are only // or ///
if (length == 2 && PathInternal.IsDirectorySeparator(fullPath[1]))
{
throw new IOException(SR.Format(SR.IO_CannotCreateDirectory, fullPath));
}
// We can save a bunch of work if the directory we want to create already exists.
if (DirectoryExists(fullPath))
{
return;
}
// Attempt to figure out which directories don't exist, and only create the ones we need.
bool somepathexists = false;
Stack<string> stackDir = new Stack<string>();
int lengthRoot = PathInternal.GetRootLength(fullPath);
if (length > lengthRoot)
{
int i = length - 1;
while (i >= lengthRoot && !somepathexists)
{
string dir = fullPath.Substring(0, i + 1);
if (!DirectoryExists(dir)) // Create only the ones missing
{
stackDir.Push(dir);
}
else
{
somepathexists = true;
}
while (i > lengthRoot && !PathInternal.IsDirectorySeparator(fullPath[i]))
{
i--;
}
i--;
}
}
int count = stackDir.Count;
if (count == 0 && !somepathexists)
{
string root = Directory.InternalGetDirectoryRoot(fullPath);
if (!DirectoryExists(root))
{
throw Interop.GetExceptionForIoErrno(Interop.Error.ENOENT.Info(), fullPath, isDirectory: true);
}
return;
}
// Create all the directories
int result = 0;
Interop.ErrorInfo firstError = default(Interop.ErrorInfo);
string errorString = fullPath;
while (stackDir.Count > 0)
{
string name = stackDir.Pop();
if (name.Length >= MaxDirectoryPath)
{
throw new PathTooLongException(SR.IO_PathTooLong);
}
// The mkdir command uses 0777 by default (it'll be AND'd with the process umask internally).
// We do the same.
result = Interop.Sys.MkDir(name, (int)Interop.Sys.Permissions.Mask);
if (result < 0 && firstError.Error == 0)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
// While we tried to avoid creating directories that don't
// exist above, there are a few cases that can fail, e.g.
// a race condition where another process or thread creates
// the directory first, or there's a file at the location.
if (errorInfo.Error != Interop.Error.EEXIST)
{
firstError = errorInfo;
}
else if (FileExists(name) || (!DirectoryExists(name, out errorInfo) && errorInfo.Error == Interop.Error.EACCES))
{
// If there's a file in this directory's place, or if we have ERROR_ACCESS_DENIED when checking if the directory already exists throw.
firstError = errorInfo;
errorString = name;
}
}
}
// Only throw an exception if creating the exact directory we wanted failed to work correctly.
if (result < 0 && firstError.Error != 0)
{
throw Interop.GetExceptionForIoErrno(firstError, errorString, isDirectory: true);
}
}
public override void MoveDirectory(string sourceFullPath, string destFullPath)
{
if (Interop.Sys.Rename(sourceFullPath, destFullPath) < 0)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
switch (errorInfo.Error)
{
case Interop.Error.EACCES: // match Win32 exception
throw new IOException(SR.Format(SR.UnauthorizedAccess_IODenied_Path, sourceFullPath), errorInfo.RawErrno);
default:
throw Interop.GetExceptionForIoErrno(errorInfo, sourceFullPath, isDirectory: true);
}
}
}
public override void RemoveDirectory(string fullPath, bool recursive)
{
if (!DirectoryExists(fullPath))
{
throw Interop.GetExceptionForIoErrno(Interop.Error.ENOENT.Info(), fullPath, isDirectory: true);
}
RemoveDirectoryInternal(fullPath, recursive, throwOnTopLevelDirectoryNotFound: true);
}
private void RemoveDirectoryInternal(string fullPath, bool recursive, bool throwOnTopLevelDirectoryNotFound)
{
Exception firstException = null;
if (recursive)
{
try
{
foreach (string item in EnumeratePaths(fullPath, "*", SearchOption.TopDirectoryOnly, SearchTarget.Both))
{
if (!ShouldIgnoreDirectory(Path.GetFileName(item)))
{
try
{
if (DirectoryExists(item))
{
RemoveDirectoryInternal(item, recursive, throwOnTopLevelDirectoryNotFound: false);
}
else
{
DeleteFile(item);
}
}
catch (Exception exc)
{
if (firstException != null)
{
firstException = exc;
}
}
}
}
}
catch (Exception exc)
{
if (firstException != null)
{
firstException = exc;
}
}
if (firstException != null)
{
throw firstException;
}
}
if (Interop.Sys.RmDir(fullPath) < 0)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
switch (errorInfo.Error)
{
case Interop.Error.EACCES:
case Interop.Error.EPERM:
case Interop.Error.EROFS:
case Interop.Error.EISDIR:
throw new IOException(SR.Format(SR.UnauthorizedAccess_IODenied_Path, fullPath)); // match Win32 exception
case Interop.Error.ENOENT:
if (!throwOnTopLevelDirectoryNotFound)
{
return;
}
goto default;
default:
throw Interop.GetExceptionForIoErrno(errorInfo, fullPath, isDirectory: true);
}
}
}
public override bool DirectoryExists(string fullPath)
{
Interop.ErrorInfo ignored;
return DirectoryExists(fullPath, out ignored);
}
private static bool DirectoryExists(string fullPath, out Interop.ErrorInfo errorInfo)
{
return FileExists(fullPath, Interop.Sys.FileTypes.S_IFDIR, out errorInfo);
}
public override bool FileExists(string fullPath)
{
Interop.ErrorInfo ignored;
return FileExists(fullPath, Interop.Sys.FileTypes.S_IFREG, out ignored);
}
private static bool FileExists(string fullPath, int fileType, out Interop.ErrorInfo errorInfo)
{
Interop.Sys.FileStatus fileinfo;
errorInfo = default(Interop.ErrorInfo);
int result = Interop.Sys.Stat(fullPath, out fileinfo);
if (result < 0)
{
errorInfo = Interop.Sys.GetLastErrorInfo();
return false;
}
return (fileinfo.Mode & Interop.Sys.FileTypes.S_IFMT) == fileType;
}
public override IEnumerable<string> EnumeratePaths(string path, string searchPattern, SearchOption searchOption, SearchTarget searchTarget)
{
return new FileSystemEnumerable<string>(path, searchPattern, searchOption, searchTarget, (p, _) => p);
}
public override IEnumerable<FileSystemInfo> EnumerateFileSystemInfos(string fullPath, string searchPattern, SearchOption searchOption, SearchTarget searchTarget)
{
switch (searchTarget)
{
case SearchTarget.Files:
return new FileSystemEnumerable<FileInfo>(fullPath, searchPattern, searchOption, searchTarget, (path, isDir) =>
new FileInfo(path, null));
case SearchTarget.Directories:
return new FileSystemEnumerable<DirectoryInfo>(fullPath, searchPattern, searchOption, searchTarget, (path, isDir) =>
new DirectoryInfo(path, null));
default:
return new FileSystemEnumerable<FileSystemInfo>(fullPath, searchPattern, searchOption, searchTarget, (path, isDir) => isDir ?
(FileSystemInfo)new DirectoryInfo(path, null) :
(FileSystemInfo)new FileInfo(path, null));
}
}
private sealed class FileSystemEnumerable<T> : IEnumerable<T>
{
private readonly PathPair _initialDirectory;
private readonly string _searchPattern;
private readonly SearchOption _searchOption;
private readonly bool _includeFiles;
private readonly bool _includeDirectories;
private readonly Func<string, bool, T> _translateResult;
private IEnumerator<T> _firstEnumerator;
internal FileSystemEnumerable(
string userPath, string searchPattern,
SearchOption searchOption, SearchTarget searchTarget,
Func<string, bool, T> translateResult)
{
// Basic validation of the input path
if (userPath == null)
{
throw new ArgumentNullException("path");
}
if (string.IsNullOrWhiteSpace(userPath))
{
throw new ArgumentException(SR.Argument_EmptyPath, "path");
}
// Validate and normalize the search pattern. If after doing so it's empty,
// matching Win32 behavior we can skip all additional validation and effectively
// return an empty enumerable.
searchPattern = NormalizeSearchPattern(searchPattern);
if (searchPattern.Length > 0)
{
PathHelpers.CheckSearchPattern(searchPattern);
PathHelpers.ThrowIfEmptyOrRootedPath(searchPattern);
// If the search pattern contains any paths, make sure we factor those into
// the user path, and then trim them off.
int lastSlash = searchPattern.LastIndexOf(Path.DirectorySeparatorChar);
if (lastSlash >= 0)
{
if (lastSlash >= 1)
{
userPath = Path.Combine(userPath, searchPattern.Substring(0, lastSlash));
}
searchPattern = searchPattern.Substring(lastSlash + 1);
}
string fullPath = Path.GetFullPath(userPath);
// Store everything for the enumerator
_initialDirectory = new PathPair(userPath, fullPath);
_searchPattern = searchPattern;
_searchOption = searchOption;
_includeFiles = (searchTarget & SearchTarget.Files) != 0;
_includeDirectories = (searchTarget & SearchTarget.Directories) != 0;
_translateResult = translateResult;
}
// Open the first enumerator so that any errors are propagated synchronously.
_firstEnumerator = Enumerate();
}
public IEnumerator<T> GetEnumerator()
{
return Interlocked.Exchange(ref _firstEnumerator, null) ?? Enumerate();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private IEnumerator<T> Enumerate()
{
return Enumerate(
_initialDirectory.FullPath != null ?
OpenDirectory(_initialDirectory.FullPath) :
null);
}
private IEnumerator<T> Enumerate(Microsoft.Win32.SafeHandles.SafeDirectoryHandle dirHandle)
{
if (dirHandle == null)
{
// Empty search
yield break;
}
Debug.Assert(!dirHandle.IsInvalid);
Debug.Assert(!dirHandle.IsClosed);
// Maintain a stack of the directories to explore, in the case of SearchOption.AllDirectories
// Lazily-initialized only if we find subdirectories that will be explored.
Stack<PathPair> toExplore = null;
PathPair dirPath = _initialDirectory;
while (dirHandle != null)
{
try
{
// Read each entry from the enumerator
Interop.Sys.DirectoryEntry dirent;
while (Interop.Sys.ReadDir(dirHandle, out dirent) == 0)
{
// Get from the dir entry whether the entry is a file or directory.
// We classify everything as a file unless we know it to be a directory.
bool isDir;
if (dirent.InodeType == Interop.Sys.NodeType.DT_DIR)
{
// We know it's a directory.
isDir = true;
}
else if (dirent.InodeType == Interop.Sys.NodeType.DT_LNK || dirent.InodeType == Interop.Sys.NodeType.DT_UNKNOWN)
{
// It's a symlink or unknown: stat to it to see if we can resolve it to a directory.
// If we can't (e.g.symlink to a file, broken symlink, etc.), we'll just treat it as a file.
Interop.ErrorInfo errnoIgnored;
isDir = DirectoryExists(Path.Combine(dirPath.FullPath, dirent.InodeName), out errnoIgnored);
}
else
{
// Otherwise, treat it as a file. This includes regular files, FIFOs, etc.
isDir = false;
}
// Yield the result if the user has asked for it. In the case of directories,
// always explore it by pushing it onto the stack, regardless of whether
// we're returning directories.
if (isDir)
{
if (!ShouldIgnoreDirectory(dirent.InodeName))
{
string userPath = null;
if (_searchOption == SearchOption.AllDirectories)
{
if (toExplore == null)
{
toExplore = new Stack<PathPair>();
}
userPath = Path.Combine(dirPath.UserPath, dirent.InodeName);
toExplore.Push(new PathPair(userPath, Path.Combine(dirPath.FullPath, dirent.InodeName)));
}
if (_includeDirectories &&
Interop.Sys.FnMatch(_searchPattern, dirent.InodeName, Interop.Sys.FnMatchFlags.FNM_NONE) == 0)
{
yield return _translateResult(userPath ?? Path.Combine(dirPath.UserPath, dirent.InodeName), /*isDirectory*/true);
}
}
}
else if (_includeFiles &&
Interop.Sys.FnMatch(_searchPattern, dirent.InodeName, Interop.Sys.FnMatchFlags.FNM_NONE) == 0)
{
yield return _translateResult(Path.Combine(dirPath.UserPath, dirent.InodeName), /*isDirectory*/false);
}
}
}
finally
{
// Close the directory enumerator
dirHandle.Dispose();
dirHandle = null;
}
if (toExplore != null && toExplore.Count > 0)
{
// Open the next directory.
dirPath = toExplore.Pop();
dirHandle = OpenDirectory(dirPath.FullPath);
}
}
}
private static string NormalizeSearchPattern(string searchPattern)
{
if (searchPattern == "." || searchPattern == "*.*")
{
searchPattern = "*";
}
else if (PathHelpers.EndsInDirectorySeparator(searchPattern))
{
searchPattern += "*";
}
return searchPattern;
}
private static Microsoft.Win32.SafeHandles.SafeDirectoryHandle OpenDirectory(string fullPath)
{
Microsoft.Win32.SafeHandles.SafeDirectoryHandle handle = Interop.Sys.OpenDir(fullPath);
if (handle.IsInvalid)
{
throw Interop.GetExceptionForIoErrno(Interop.Sys.GetLastErrorInfo(), fullPath, isDirectory: true);
}
return handle;
}
}
/// <summary>Determines whether the specified directory name should be ignored.</summary>
/// <param name="name">The name to evaluate.</param>
/// <returns>true if the name is "." or ".."; otherwise, false.</returns>
private static bool ShouldIgnoreDirectory(string name)
{
return name == "." || name == "..";
}
public override string GetCurrentDirectory()
{
return Interop.Sys.GetCwd();
}
public override void SetCurrentDirectory(string fullPath)
{
Interop.CheckIo(Interop.Sys.ChDir(fullPath), fullPath);
}
public override FileAttributes GetAttributes(string fullPath)
{
return new FileInfo(fullPath, null).Attributes;
}
public override void SetAttributes(string fullPath, FileAttributes attributes)
{
new FileInfo(fullPath, null).Attributes = attributes;
}
public override DateTimeOffset GetCreationTime(string fullPath)
{
return new FileInfo(fullPath, null).CreationTime;
}
public override void SetCreationTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
IFileSystemObject info = asDirectory ?
(IFileSystemObject)new DirectoryInfo(fullPath, null) :
(IFileSystemObject)new FileInfo(fullPath, null);
info.CreationTime = time;
}
public override DateTimeOffset GetLastAccessTime(string fullPath)
{
return new FileInfo(fullPath, null).LastAccessTime;
}
public override void SetLastAccessTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
IFileSystemObject info = asDirectory ?
(IFileSystemObject)new DirectoryInfo(fullPath, null) :
(IFileSystemObject)new FileInfo(fullPath, null);
info.LastAccessTime = time;
}
public override DateTimeOffset GetLastWriteTime(string fullPath)
{
return new FileInfo(fullPath, null).LastWriteTime;
}
public override void SetLastWriteTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
IFileSystemObject info = asDirectory ?
(IFileSystemObject)new DirectoryInfo(fullPath, null) :
(IFileSystemObject)new FileInfo(fullPath, null);
info.LastWriteTime = time;
}
public override IFileSystemObject GetFileSystemInfo(string fullPath, bool asDirectory)
{
return asDirectory ?
(IFileSystemObject)new DirectoryInfo(fullPath, null) :
(IFileSystemObject)new FileInfo(fullPath, null);
}
}
}
| |
using Microsoft.AspNetCore.Mvc;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.Extensions.Options;
using R4Mvc.Tools.Extensions;
using System.Collections.Generic;
using System.Linq;
using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory;
namespace R4Mvc.Tools.Services
{
public class ControllerGeneratorService : IControllerGeneratorService
{
private readonly IViewLocatorService _viewLocator;
private readonly Settings _settings;
public ControllerGeneratorService(IViewLocatorService viewLocator, IOptions<Settings> settings)
{
_viewLocator = viewLocator;
_settings = settings.Value;
}
public string GetControllerArea(INamedTypeSymbol controllerSymbol)
{
var areaAttribute = controllerSymbol.GetAttributes()
.Where(a => a.AttributeClass.InheritsFrom<AreaAttribute>())
.FirstOrDefault();
if (areaAttribute == null)
return string.Empty;
if (areaAttribute.AttributeClass.ToDisplayString() == typeof(AreaAttribute).FullName)
return areaAttribute.ConstructorArguments[0].Value?.ToString();
// parse the constructor to get the area name from derived types
if (areaAttribute.AttributeClass.BaseType.ToDisplayString() == typeof(AreaAttribute).FullName)
{
// direct descendant. Reading the area name from the constructor
var constructorInit = areaAttribute.AttributeConstructor.DeclaringSyntaxReferences
.SelectMany(s => s.SyntaxTree.GetRoot().DescendantNodesAndSelf().OfType<ClassDeclarationSyntax>().Where(c => c.Identifier.Text == areaAttribute.AttributeClass.Name))
.SelectMany(s => s.DescendantNodesAndSelf().OfType<ConstructorInitializerSyntax>())
.First();
if (constructorInit.ArgumentList.Arguments.Count > 0)
{
var arg = constructorInit.ArgumentList.Arguments[0];
if (arg.Expression is LiteralExpressionSyntax litExp)
{
return litExp.Token.ValueText;
}
}
}
return string.Empty;
}
public ClassDeclarationSyntax GeneratePartialController(INamedTypeSymbol controllerSymbol, string areaKey, string areaName, string controllerName, string projectRoot)
{
// build controller partial class node
// add a default constructor if there are some but none are zero length
var genControllerClass = SyntaxNodeHelpers.CreateClass(
controllerSymbol.Name,
controllerSymbol.TypeParameters.Select(tp => TypeParameter(tp.Name)).ToArray(),
SyntaxKind.PublicKeyword,
SyntaxKind.PartialKeyword);
var gotCustomConstructors = controllerSymbol.Constructors
.Where(c => c.DeclaredAccessibility == Accessibility.Public)
.Where(SyntaxNodeHelpers.IsNotR4MVCGenerated)
.Where(c => !c.IsImplicitlyDeclared)
.Any();
if (!gotCustomConstructors)
{
genControllerClass = genControllerClass.WithDefaultConstructor(true, SyntaxKind.PublicKeyword);
}
genControllerClass = genControllerClass.WithDummyConstructor(true, SyntaxKind.ProtectedKeyword);
genControllerClass = AddRedirectMethods(genControllerClass);
// add all method stubs, TODO criteria for this: only public virtual actionresults?
// add subclasses, fields, properties, constants for action names
genControllerClass = AddParameterlessMethods(genControllerClass, controllerSymbol);
var actionsExpression = !string.IsNullOrEmpty(areaKey)
? SyntaxNodeHelpers.MemberAccess(_settings.HelpersPrefix + "." + areaKey, controllerName)
: SyntaxNodeHelpers.MemberAccess(_settings.HelpersPrefix, controllerName);
genControllerClass =
genControllerClass
.WithProperty("Actions", controllerSymbol.Name, actionsExpression, SyntaxKind.PublicKeyword)
.WithStringField(
"Area",
areaName,
true,
SyntaxKind.PublicKeyword,
SyntaxKind.ReadOnlyKeyword)
.WithStringField(
"Name",
controllerName,
true,
SyntaxKind.PublicKeyword,
SyntaxKind.ReadOnlyKeyword)
.WithStringField(
"NameConst",
controllerName,
true,
SyntaxKind.PublicKeyword,
SyntaxKind.ConstKeyword)
.WithField("s_actions", "ActionNamesClass", SyntaxKind.StaticKeyword, SyntaxKind.ReadOnlyKeyword)
.WithProperty("ActionNames", "ActionNamesClass", IdentifierName("s_actions"), SyntaxKind.PublicKeyword)
.WithActionNameClass(controllerSymbol)
.WithActionConstantsClass(controllerSymbol)
.WithViewsClass(controllerName, areaName, _viewLocator.FindViews(projectRoot));
return genControllerClass;
}
public ClassDeclarationSyntax GenerateR4Controller(INamedTypeSymbol controllerSymbol)
{
// create R4MVC_[Controller] class inheriting from partial
var r4ControllerClass =
SyntaxNodeHelpers.CreateClass(
GetR4MVCControllerClassName(controllerSymbol),
null,
SyntaxKind.PublicKeyword,
SyntaxKind.PartialKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.WithBaseTypes(controllerSymbol.ToQualifiedName())
.WithDefaultDummyBaseConstructor(false, SyntaxKind.PublicKeyword);
r4ControllerClass = AddMethodOverrides(r4ControllerClass, controllerSymbol);
return r4ControllerClass;
}
private ClassDeclarationSyntax AddRedirectMethods(ClassDeclarationSyntax node)
{
var methods = new[]
{
MethodDeclaration(IdentifierName("RedirectToRouteResult"), Identifier("RedirectToAction"))
.WithModifiers(SyntaxKind.ProtectedKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.AddParameterListParameters(
Parameter(Identifier("result")).WithType(IdentifierName("IActionResult")))
.WithBody(
Block(
// var callInfo = result.GetR4MvcResult();
LocalDeclarationStatement(
SyntaxNodeHelpers.VariableDeclaration("callInfo",
InvocationExpression(SyntaxNodeHelpers.MemberAccess("result", "GetR4MvcResult")))),
// return RedirectToRoute(callInfo.RouteValueDictionary);
ReturnStatement(
InvocationExpression(IdentifierName("RedirectToRoute"))
.WithArgumentList(
SyntaxNodeHelpers.MemberAccess("callInfo", "RouteValueDictionary"))))),
MethodDeclaration(IdentifierName("RedirectToRouteResult"), Identifier("RedirectToAction"))
.WithModifiers(SyntaxKind.ProtectedKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.AddParameterListParameters(
Parameter(Identifier("taskResult")).WithGenericType("Task", "IActionResult"))
.WithBody(
Block(
// return RedirectToAction(taskResult.Result);
ReturnStatement(
InvocationExpression(IdentifierName("RedirectToAction"))
.WithArgumentList(
SyntaxNodeHelpers.MemberAccess("taskResult", "Result"))))),
MethodDeclaration(IdentifierName("RedirectToRouteResult"), Identifier("RedirectToActionPermanent"))
.WithModifiers(SyntaxKind.ProtectedKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.AddParameterListParameters(
Parameter(Identifier("result")).WithType(IdentifierName("IActionResult")))
.WithBody(
Block(
// var callInfo = result.GetR4MvcResult();
LocalDeclarationStatement(
SyntaxNodeHelpers.VariableDeclaration("callInfo",
InvocationExpression(SyntaxNodeHelpers.MemberAccess("result", "GetR4MvcResult")))),
// return RedirectToRoutePermanent(callInfo.RouteValueDictionary);
ReturnStatement(
InvocationExpression(IdentifierName("RedirectToRoutePermanent"))
.WithArgumentList(
SyntaxNodeHelpers.MemberAccess("callInfo", "RouteValueDictionary"))))),
MethodDeclaration(IdentifierName("RedirectToRouteResult"), Identifier("RedirectToActionPermanent"))
.WithModifiers(SyntaxKind.ProtectedKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.AddParameterListParameters(
Parameter(Identifier("taskResult")).WithGenericType("Task", "IActionResult"))
.WithBody(
Block(
// return RedirectToActionPermanent(taskResult.Result);
ReturnStatement(
InvocationExpression(IdentifierName("RedirectToActionPermanent"))
.WithArgumentList(
SyntaxNodeHelpers.MemberAccess("taskResult", "Result"))))),
};
return node.AddMembers(methods);
}
private ClassDeclarationSyntax AddParameterlessMethods(ClassDeclarationSyntax node, ITypeSymbol mvcSymbol)
{
var methods = mvcSymbol.GetPublicNonGeneratedMethods()
.GroupBy(m => m.Name)
.Where(g => !g.Any(m => m.Parameters.Length == 0))
.Select(g => MethodDeclaration(IdentifierName("IActionResult"), Identifier(g.Key))
.WithModifiers(SyntaxKind.PublicKeyword, SyntaxKind.VirtualKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateNonActionAttribute())
.WithAttributes(SyntaxNodeHelpers.CreateGeneratedCodeAttribute(), SyntaxNodeHelpers.CreateDebugNonUserCodeAttribute())
.WithBody(
Block(
// return new R4Mvc_Microsoft_AspNetCore_Mvc_ActionResult(Area, Name, ActionNames.{Action});
ReturnStatement(
ObjectCreationExpression(IdentifierName(Constants.ActionResultClass))
.WithArgumentList(
IdentifierName("Area"),
IdentifierName("Name"),
SyntaxNodeHelpers.MemberAccess("ActionNames", g.Key))))));
return node.AddMembers(methods.ToArray());
}
private ClassDeclarationSyntax AddMethodOverrides(ClassDeclarationSyntax node, ITypeSymbol mvcSymbol)
{
const string overrideMethodSuffix = "Override";
var methods = mvcSymbol.GetPublicNonGeneratedMethods()
.SelectMany(m =>
{
var statements = new List<StatementSyntax>
{
// var callInfo = new R4Mvc_Microsoft_AspNetCore_Mvc_ActionResult(Area, Name, ActionNames.{Action});
LocalDeclarationStatement(
SyntaxNodeHelpers.VariableDeclaration("callInfo",
ObjectCreationExpression(IdentifierName(Constants.ActionResultClass))
.WithArgumentList(
IdentifierName("Area"),
IdentifierName("Name"),
SyntaxNodeHelpers.MemberAccess("ActionNames", m.Name)))),
};
foreach (var param in m.Parameters)
statements.Add(
ExpressionStatement(
InvocationExpression(
SyntaxNodeHelpers.MemberAccess("ModelUnbinderHelpers", "AddRouteValues"))
.WithArgumentList(
SyntaxNodeHelpers.MemberAccess("callInfo", "RouteValueDictionary"),
LiteralExpression(SyntaxKind.StringLiteralExpression, Literal(param.Name)),
IdentifierName(param.Name))));
statements.Add(
// {Action}Override(callInfo, {parameters});
ExpressionStatement(
InvocationExpression(IdentifierName(m.Name + overrideMethodSuffix))
.WithArgumentList(
new[] { IdentifierName("callInfo") }
.Concat(m.Parameters.Select(p => IdentifierName(p.Name)))
.ToArray())));
statements.Add(
// return callInfo;
m.ReturnType.ToString().Contains("Task<")
? ReturnStatement(
InvocationExpression(
SyntaxNodeHelpers.MemberAccess("Task", "FromResult"))
.WithArgumentList(
BinaryExpression(
SyntaxKind.AsExpression,
IdentifierName("callInfo"),
IdentifierName(m.ReturnType.ToString().Substring(m.ReturnType.ToString().IndexOf('<') + 1).TrimEnd('>')))))
: ReturnStatement(IdentifierName("callInfo")));
return new[]
{
MethodDeclaration(PredefinedType(Token(SyntaxKind.VoidKeyword)), Identifier(m.Name + overrideMethodSuffix))
.WithModifiers(SyntaxKind.PartialKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateNonActionAttribute())
.AddParameterListParameters(
Parameter(Identifier("callInfo")).WithType(IdentifierName(Constants.ActionResultClass)))
.AddParameterListParameters(m.Parameters
.Select(p => Parameter(Identifier(p.Name))
.WithType(IdentifierName(p.Type.ToString())))
.ToArray())
.WithSemicolonToken(Token(SyntaxKind.SemicolonToken)),
MethodDeclaration(IdentifierName(m.ReturnType.ToString()), Identifier(m.Name))
.WithModifiers(SyntaxKind.PublicKeyword, SyntaxKind.OverrideKeyword)
.WithAttributes(SyntaxNodeHelpers.CreateNonActionAttribute())
.AddParameterListParameters(m.Parameters
.Select(p => Parameter(Identifier(p.Name))
.WithType(IdentifierName(p.Type.ToString())))
.ToArray())
.WithBody(
Block(statements.ToArray())),
};
});
return node.AddMembers(methods.ToArray());
}
internal static string GetR4MVCControllerClassName(INamedTypeSymbol controllerClass)
{
return string.Format("R4MVC_{0}", controllerClass.Name);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Timers;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Data;
using OpenSim.Framework;
using OpenSim.Services.Interfaces;
namespace OpenSim.Groups
{
public class GroupsService : GroupsServiceBase
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public const GroupPowers DefaultEveryonePowers = GroupPowers.AllowSetHome |
GroupPowers.Accountable |
GroupPowers.JoinChat |
GroupPowers.AllowVoiceChat |
GroupPowers.ReceiveNotices |
GroupPowers.StartProposal |
GroupPowers.VoteOnProposal;
public const GroupPowers OwnerPowers = GroupPowers.Accountable |
GroupPowers.AllowEditLand |
GroupPowers.AllowFly |
GroupPowers.AllowLandmark |
GroupPowers.AllowRez |
GroupPowers.AllowSetHome |
GroupPowers.AllowVoiceChat |
GroupPowers.AssignMember |
GroupPowers.AssignMemberLimited |
GroupPowers.ChangeActions |
GroupPowers.ChangeIdentity |
GroupPowers.ChangeMedia |
GroupPowers.ChangeOptions |
GroupPowers.CreateRole |
GroupPowers.DeedObject |
GroupPowers.DeleteRole |
GroupPowers.Eject |
GroupPowers.FindPlaces |
GroupPowers.HostEvent |
GroupPowers.Invite |
GroupPowers.JoinChat |
GroupPowers.LandChangeIdentity |
GroupPowers.LandDeed |
GroupPowers.LandDivideJoin |
GroupPowers.LandEdit |
GroupPowers.LandEjectAndFreeze |
GroupPowers.LandGardening |
GroupPowers.LandManageAllowed |
GroupPowers.LandManageBanned |
GroupPowers.LandManagePasses |
GroupPowers.LandOptions |
GroupPowers.LandRelease |
GroupPowers.LandSetSale |
GroupPowers.ModerateChat |
GroupPowers.ObjectManipulate |
GroupPowers.ObjectSetForSale |
GroupPowers.ReceiveNotices |
GroupPowers.RemoveMember |
GroupPowers.ReturnGroupOwned |
GroupPowers.ReturnGroupSet |
GroupPowers.ReturnNonGroup |
GroupPowers.RoleProperties |
GroupPowers.SendNotices |
GroupPowers.SetLandingPoint |
GroupPowers.StartProposal |
GroupPowers.VoteOnProposal;
#region Daily Cleanup
private Timer m_CleanupTimer;
public GroupsService(IConfigSource config, string configName)
: base(config, configName)
{
}
public GroupsService(IConfigSource config)
: this(config, string.Empty)
{
// Once a day
m_CleanupTimer = new Timer(24 * 60 * 60 * 1000);
m_CleanupTimer.AutoReset = true;
m_CleanupTimer.Elapsed += new ElapsedEventHandler(m_CleanupTimer_Elapsed);
m_CleanupTimer.Enabled = true;
m_CleanupTimer.Start();
}
private void m_CleanupTimer_Elapsed(object sender, ElapsedEventArgs e)
{
m_Database.DeleteOldNotices();
m_Database.DeleteOldInvites();
}
#endregion
public UUID CreateGroup(string RequestingAgentID, string name, string charter, bool showInList, UUID insigniaID, int membershipFee, bool openEnrollment,
bool allowPublish, bool maturePublish, UUID founderID, out string reason)
{
reason = string.Empty;
// Check if the group already exists
if (m_Database.RetrieveGroup(name) != null)
{
reason = "A group with that name already exists";
return UUID.Zero;
}
// Create the group
GroupData data = new GroupData();
data.GroupID = UUID.Random();
data.Data = new Dictionary<string, string>();
data.Data["Name"] = name;
data.Data["Charter"] = charter;
data.Data["InsigniaID"] = insigniaID.ToString();
data.Data["FounderID"] = founderID.ToString();
data.Data["MembershipFee"] = membershipFee.ToString();
data.Data["OpenEnrollment"] = openEnrollment ? "1" : "0";
data.Data["ShowInList"] = showInList ? "1" : "0";
data.Data["AllowPublish"] = allowPublish ? "1" : "0";
data.Data["MaturePublish"] = maturePublish ? "1" : "0";
UUID roleID = UUID.Random();
data.Data["OwnerRoleID"] = roleID.ToString();
if (!m_Database.StoreGroup(data))
return UUID.Zero;
// Create Everyone role
_AddOrUpdateGroupRole(RequestingAgentID, data.GroupID, UUID.Zero, "Everyone", "Everyone in the group", "Member of " + name, (ulong)DefaultEveryonePowers, true);
// Create Owner role
_AddOrUpdateGroupRole(RequestingAgentID, data.GroupID, roleID, "Owners", "Owners of the group", "Owner of " + name, (ulong)OwnerPowers, true);
// Add founder to group
_AddAgentToGroup(RequestingAgentID, founderID.ToString(), data.GroupID, roleID);
return data.GroupID;
}
public void UpdateGroup(string RequestingAgentID, UUID groupID, string charter, bool showInList, UUID insigniaID, int membershipFee, bool openEnrollment, bool allowPublish, bool maturePublish)
{
GroupData data = m_Database.RetrieveGroup(groupID);
if (data == null)
return;
// Check perms
if (!HasPower(RequestingAgentID, groupID, GroupPowers.ChangeActions))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at updating group {1} denied because of lack of permission", RequestingAgentID, groupID);
return;
}
data.GroupID = groupID;
data.Data["Charter"] = charter;
data.Data["ShowInList"] = showInList ? "1" : "0";
data.Data["InsigniaID"] = insigniaID.ToString();
data.Data["MembershipFee"] = membershipFee.ToString();
data.Data["OpenEnrollment"] = openEnrollment ? "1" : "0";
data.Data["AllowPublish"] = allowPublish ? "1" : "0";
data.Data["MaturePublish"] = maturePublish ? "1" : "0";
m_Database.StoreGroup(data);
}
public ExtendedGroupRecord GetGroupRecord(string RequestingAgentID, UUID GroupID)
{
GroupData data = m_Database.RetrieveGroup(GroupID);
return _GroupDataToRecord(data);
}
public ExtendedGroupRecord GetGroupRecord(string RequestingAgentID, string GroupName)
{
GroupData data = m_Database.RetrieveGroup(GroupName);
return _GroupDataToRecord(data);
}
public List<DirGroupsReplyData> FindGroups(string RequestingAgentID, string search)
{
List<DirGroupsReplyData> groups = new List<DirGroupsReplyData>();
GroupData[] data = m_Database.RetrieveGroups(search);
if (data != null && data.Length > 0)
{
foreach (GroupData d in data)
{
// Don't list group proxies
if (d.Data.ContainsKey("Location") && d.Data["Location"] != string.Empty)
continue;
DirGroupsReplyData g = new DirGroupsReplyData();
g.groupID = d.GroupID;
if (d.Data.ContainsKey("Name"))
g.groupName = d.Data["Name"];
else
m_log.DebugFormat("[Groups]: Key Name not found");
g.members = m_Database.MemberCount(d.GroupID);
groups.Add(g);
}
}
return groups;
}
public List<ExtendedGroupMembersData> GetGroupMembers(string RequestingAgentID, UUID GroupID)
{
List<ExtendedGroupMembersData> members = new List<ExtendedGroupMembersData>();
GroupData group = m_Database.RetrieveGroup(GroupID);
if (group == null)
return members;
// Unfortunately this doesn't quite work on legacy group data because of a bug
// that's also being fixed here on CreateGroup. The OwnerRoleID sent to the DB was wrong.
// See how to find the ownerRoleID a few lines below.
UUID ownerRoleID = new UUID(group.Data["OwnerRoleID"]);
RoleData[] roles = m_Database.RetrieveRoles(GroupID);
if (roles == null)
// something wrong with this group
return members;
List<RoleData> rolesList = new List<RoleData>(roles);
// Let's find the "real" ownerRoleID
RoleData ownerRole = rolesList.Find(r => r.Data["Powers"] == ((long)OwnerPowers).ToString());
if (ownerRole != null)
ownerRoleID = ownerRole.RoleID;
// Check visibility?
// When we don't want to check visibility, we pass it "all" as the requestingAgentID
bool checkVisibility = !RequestingAgentID.Equals(UUID.Zero.ToString());
if (checkVisibility)
{
// Is the requester a member of the group?
bool isInGroup = false;
if (m_Database.RetrieveMember(GroupID, RequestingAgentID) != null)
isInGroup = true;
if (!isInGroup) // reduce the roles to the visible ones
rolesList = rolesList.FindAll(r => (UInt64.Parse(r.Data["Powers"]) & (ulong)GroupPowers.MemberVisible) != 0);
}
MembershipData[] datas = m_Database.RetrieveMembers(GroupID);
if (datas == null || (datas != null && datas.Length == 0))
return members;
// OK, we have everything we need
foreach (MembershipData d in datas)
{
RoleMembershipData[] rolememberships = m_Database.RetrieveMemberRoles(GroupID, d.PrincipalID);
List<RoleMembershipData> rolemembershipsList = new List<RoleMembershipData>(rolememberships);
ExtendedGroupMembersData m = new ExtendedGroupMembersData();
// What's this person's current role in the group?
UUID selectedRole = new UUID(d.Data["SelectedRoleID"]);
RoleData selected = rolesList.Find(r => r.RoleID == selectedRole);
if (selected != null)
{
m.Title = selected.Data["Title"];
m.AgentPowers = UInt64.Parse(selected.Data["Powers"]);
}
m.AgentID = d.PrincipalID;
m.AcceptNotices = d.Data["AcceptNotices"] == "1" ? true : false;
m.Contribution = Int32.Parse(d.Data["Contribution"]);
m.ListInProfile = d.Data["ListInProfile"] == "1" ? true : false;
// Is this person an owner of the group?
m.IsOwner = (rolemembershipsList.Find(r => r.RoleID == ownerRoleID) != null) ? true : false;
members.Add(m);
}
return members;
}
public bool AddGroupRole(string RequestingAgentID, UUID groupID, UUID roleID, string name, string description, string title, ulong powers, out string reason)
{
reason = string.Empty;
// check that the requesting agent has permissions to add role
if (!HasPower(RequestingAgentID, groupID, GroupPowers.CreateRole))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at creating role in group {1} denied because of lack of permission", RequestingAgentID, groupID);
reason = "Insufficient permission to create role";
return false;
}
return _AddOrUpdateGroupRole(RequestingAgentID, groupID, roleID, name, description, title, powers, true);
}
public bool UpdateGroupRole(string RequestingAgentID, UUID groupID, UUID roleID, string name, string description, string title, ulong powers)
{
// check perms
if (!HasPower(RequestingAgentID, groupID, GroupPowers.ChangeActions))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at changing role in group {1} denied because of lack of permission", RequestingAgentID, groupID);
return false;
}
return _AddOrUpdateGroupRole(RequestingAgentID, groupID, roleID, name, description, title, powers, false);
}
public void RemoveGroupRole(string RequestingAgentID, UUID groupID, UUID roleID)
{
// check perms
if (!HasPower(RequestingAgentID, groupID, GroupPowers.DeleteRole))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at deleting role from group {1} denied because of lack of permission", RequestingAgentID, groupID);
return;
}
// Can't delete Everyone and Owners roles
if (roleID == UUID.Zero)
{
m_log.DebugFormat("[Groups]: Attempt at deleting Everyone role from group {0} denied", groupID);
return;
}
GroupData group = m_Database.RetrieveGroup(groupID);
if (group == null)
{
m_log.DebugFormat("[Groups]: Attempt at deleting role from non-existing group {0}", groupID);
return;
}
if (roleID == new UUID(group.Data["OwnerRoleID"]))
{
m_log.DebugFormat("[Groups]: Attempt at deleting Owners role from group {0} denied", groupID);
return;
}
_RemoveGroupRole(groupID, roleID);
}
public List<GroupRolesData> GetGroupRoles(string RequestingAgentID, UUID GroupID)
{
// TODO: check perms
return _GetGroupRoles(GroupID);
}
public List<ExtendedGroupRoleMembersData> GetGroupRoleMembers(string RequestingAgentID, UUID GroupID)
{
// TODO: check perms
// Is the requester a member of the group?
bool isInGroup = false;
if (m_Database.RetrieveMember(GroupID, RequestingAgentID) != null)
isInGroup = true;
return _GetGroupRoleMembers(GroupID, isInGroup);
}
public bool AddAgentToGroup(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID, string token, out string reason)
{
reason = string.Empty;
_AddAgentToGroup(RequestingAgentID, AgentID, GroupID, RoleID, token);
return true;
}
public bool RemoveAgentFromGroup(string RequestingAgentID, string AgentID, UUID GroupID)
{
// check perms
if (RequestingAgentID != AgentID && !HasPower(RequestingAgentID, GroupID, GroupPowers.Eject))
return false;
_RemoveAgentFromGroup(RequestingAgentID, AgentID, GroupID);
return true;
}
public bool AddAgentToGroupInvite(string RequestingAgentID, UUID inviteID, UUID groupID, UUID roleID, string agentID)
{
// Check whether the invitee is already a member of the group
MembershipData m = m_Database.RetrieveMember(groupID, agentID);
if (m != null)
return false;
// Check permission to invite
if (!HasPower(RequestingAgentID, groupID, GroupPowers.Invite))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at inviting to group {1} denied because of lack of permission", RequestingAgentID, groupID);
return false;
}
// Check whether there are pending invitations and delete them
InvitationData invite = m_Database.RetrieveInvitation(groupID, agentID);
if (invite != null)
m_Database.DeleteInvite(invite.InviteID);
invite = new InvitationData();
invite.InviteID = inviteID;
invite.PrincipalID = agentID;
invite.GroupID = groupID;
invite.RoleID = roleID;
invite.Data = new Dictionary<string, string>();
return m_Database.StoreInvitation(invite);
}
public GroupInviteInfo GetAgentToGroupInvite(string RequestingAgentID, UUID inviteID)
{
InvitationData data = m_Database.RetrieveInvitation(inviteID);
if (data == null)
return null;
GroupInviteInfo inviteInfo = new GroupInviteInfo();
inviteInfo.AgentID = data.PrincipalID;
inviteInfo.GroupID = data.GroupID;
inviteInfo.InviteID = data.InviteID;
inviteInfo.RoleID = data.RoleID;
return inviteInfo;
}
public void RemoveAgentToGroupInvite(string RequestingAgentID, UUID inviteID)
{
m_Database.DeleteInvite(inviteID);
}
public bool AddAgentToGroupRole(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID)
{
//if (!m_Database.CheckOwnerRole(RequestingAgentID, GroupID, RoleID))
// return;
// check permissions
bool limited = HasPower(RequestingAgentID, GroupID, GroupPowers.AssignMemberLimited);
bool unlimited = HasPower(RequestingAgentID, GroupID, GroupPowers.AssignMember) | IsOwner(RequestingAgentID, GroupID);
if (!limited || !unlimited)
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at assigning {1} to role {2} denied because of lack of permission", RequestingAgentID, AgentID, RoleID);
return false;
}
// AssignMemberLimited means that the person can assign another person to the same roles that she has in the group
if (!unlimited && limited)
{
// check whether person's has this role
RoleMembershipData rolemembership = m_Database.RetrieveRoleMember(GroupID, RoleID, AgentID);
if (rolemembership == null)
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at assigning {1} to role {2} denied because of limited permission", RequestingAgentID, AgentID, RoleID);
return false;
}
}
_AddAgentToGroupRole(RequestingAgentID, AgentID, GroupID, RoleID);
return true;
}
public bool RemoveAgentFromGroupRole(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID)
{
// Don't remove from Everyone role!
if (RoleID == UUID.Zero)
return false;
// check permissions
bool unlimited = HasPower(RequestingAgentID, GroupID, GroupPowers.AssignMember) || IsOwner(RequestingAgentID, GroupID);
if (!unlimited)
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at removing {1} from role {2} denied because of lack of permission", RequestingAgentID, AgentID, RoleID);
return false;
}
RoleMembershipData rolemember = m_Database.RetrieveRoleMember(GroupID, RoleID, AgentID);
if (rolemember == null)
return false;
m_Database.DeleteRoleMember(rolemember);
// Find another role for this person
UUID newRoleID = UUID.Zero; // Everyone
RoleMembershipData[] rdata = m_Database.RetrieveMemberRoles(GroupID, AgentID);
if (rdata != null)
foreach (RoleMembershipData r in rdata)
{
if (r.RoleID != UUID.Zero)
{
newRoleID = r.RoleID;
break;
}
}
MembershipData member = m_Database.RetrieveMember(GroupID, AgentID);
if (member != null)
{
member.Data["SelectedRoleID"] = newRoleID.ToString();
m_Database.StoreMember(member);
}
return true;
}
public List<GroupRolesData> GetAgentGroupRoles(string RequestingAgentID, string AgentID, UUID GroupID)
{
List<GroupRolesData> roles = new List<GroupRolesData>();
// TODO: check permissions
RoleMembershipData[] data = m_Database.RetrieveMemberRoles(GroupID, AgentID);
if (data == null || (data != null && data.Length ==0))
return roles;
foreach (RoleMembershipData d in data)
{
RoleData rdata = m_Database.RetrieveRole(GroupID, d.RoleID);
if (rdata == null) // hippos
continue;
GroupRolesData r = new GroupRolesData();
r.Name = rdata.Data["Name"];
r.Powers = UInt64.Parse(rdata.Data["Powers"]);
r.RoleID = rdata.RoleID;
r.Title = rdata.Data["Title"];
roles.Add(r);
}
return roles;
}
public ExtendedGroupMembershipData SetAgentActiveGroup(string RequestingAgentID, string AgentID, UUID GroupID)
{
// TODO: check perms
PrincipalData principal = new PrincipalData();
principal.PrincipalID = AgentID;
principal.ActiveGroupID = GroupID;
m_Database.StorePrincipal(principal);
return GetAgentGroupMembership(RequestingAgentID, AgentID, GroupID);
}
public ExtendedGroupMembershipData GetAgentActiveMembership(string RequestingAgentID, string AgentID)
{
// 1. get the principal data for the active group
PrincipalData principal = m_Database.RetrievePrincipal(AgentID);
if (principal == null)
return null;
return GetAgentGroupMembership(RequestingAgentID, AgentID, principal.ActiveGroupID);
}
public ExtendedGroupMembershipData GetAgentGroupMembership(string RequestingAgentID, string AgentID, UUID GroupID)
{
return GetAgentGroupMembership(RequestingAgentID, AgentID, GroupID, null);
}
private ExtendedGroupMembershipData GetAgentGroupMembership(string RequestingAgentID, string AgentID, UUID GroupID, MembershipData membership)
{
// 2. get the active group
GroupData group = m_Database.RetrieveGroup(GroupID);
if (group == null)
return null;
// 3. get the membership info if we don't have it already
if (membership == null)
{
membership = m_Database.RetrieveMember(group.GroupID, AgentID);
if (membership == null)
return null;
}
// 4. get the active role
UUID activeRoleID = new UUID(membership.Data["SelectedRoleID"]);
RoleData role = m_Database.RetrieveRole(group.GroupID, activeRoleID);
ExtendedGroupMembershipData data = new ExtendedGroupMembershipData();
data.AcceptNotices = membership.Data["AcceptNotices"] == "1" ? true : false;
data.AccessToken = membership.Data["AccessToken"];
data.Active = true;
data.ActiveRole = activeRoleID;
data.AllowPublish = group.Data["AllowPublish"] == "1" ? true : false;
data.Charter = group.Data["Charter"];
data.Contribution = Int32.Parse(membership.Data["Contribution"]);
data.FounderID = new UUID(group.Data["FounderID"]);
data.GroupID = new UUID(group.GroupID);
data.GroupName = group.Data["Name"];
data.GroupPicture = new UUID(group.Data["InsigniaID"]);
if (role != null)
{
data.GroupPowers = UInt64.Parse(role.Data["Powers"]);
data.GroupTitle = role.Data["Title"];
}
data.ListInProfile = membership.Data["ListInProfile"] == "1" ? true : false;
data.MaturePublish = group.Data["MaturePublish"] == "1" ? true : false;
data.MembershipFee = Int32.Parse(group.Data["MembershipFee"]);
data.OpenEnrollment = group.Data["OpenEnrollment"] == "1" ? true : false;
data.ShowInList = group.Data["ShowInList"] == "1" ? true : false;
return data;
}
public List<GroupMembershipData> GetAgentGroupMemberships(string RequestingAgentID, string AgentID)
{
List<GroupMembershipData> memberships = new List<GroupMembershipData>();
// 1. Get all the groups that this person is a member of
MembershipData[] mdata = m_Database.RetrieveMemberships(AgentID);
if (mdata == null || (mdata != null && mdata.Length == 0))
return memberships;
foreach (MembershipData d in mdata)
{
GroupMembershipData gmember = GetAgentGroupMembership(RequestingAgentID, AgentID, d.GroupID, d);
if (gmember != null)
{
memberships.Add(gmember);
//m_log.DebugFormat("[XXX]: Member of {0} as {1}", gmember.GroupName, gmember.GroupTitle);
//Util.PrintCallStack();
}
}
return memberships;
}
public void SetAgentActiveGroupRole(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID)
{
MembershipData data = m_Database.RetrieveMember(GroupID, AgentID);
if (data == null)
return;
data.Data["SelectedRoleID"] = RoleID.ToString();
m_Database.StoreMember(data);
}
public void UpdateMembership(string RequestingAgentID, string AgentID, UUID GroupID, bool AcceptNotices, bool ListInProfile)
{
// TODO: check perms
MembershipData membership = m_Database.RetrieveMember(GroupID, AgentID);
if (membership == null)
return;
membership.Data["AcceptNotices"] = AcceptNotices ? "1" : "0";
membership.Data["ListInProfile"] = ListInProfile ? "1" : "0";
m_Database.StoreMember(membership);
}
public bool AddGroupNotice(string RequestingAgentID, UUID groupID, UUID noticeID, string fromName, string subject, string message,
bool hasAttachment, byte attType, string attName, UUID attItemID, string attOwnerID)
{
// Check perms
if (!HasPower(RequestingAgentID, groupID, GroupPowers.SendNotices))
{
m_log.DebugFormat("[Groups]: ({0}) Attempt at sending notice to group {1} denied because of lack of permission", RequestingAgentID, groupID);
return false;
}
return _AddNotice(groupID, noticeID, fromName, subject, message, hasAttachment, attType, attName, attItemID, attOwnerID);
}
public GroupNoticeInfo GetGroupNotice(string RequestingAgentID, UUID noticeID)
{
NoticeData data = m_Database.RetrieveNotice(noticeID);
if (data == null)
return null;
return _NoticeDataToInfo(data);
}
public List<ExtendedGroupNoticeData> GetGroupNotices(string RequestingAgentID, UUID groupID)
{
NoticeData[] data = m_Database.RetrieveNotices(groupID);
List<ExtendedGroupNoticeData> infos = new List<ExtendedGroupNoticeData>();
if (data == null || (data != null && data.Length == 0))
return infos;
foreach (NoticeData d in data)
{
ExtendedGroupNoticeData info = _NoticeDataToData(d);
infos.Add(info);
}
return infos;
}
public void ResetAgentGroupChatSessions(string agentID)
{
}
public bool hasAgentBeenInvitedToGroupChatSession(string agentID, UUID groupID)
{
return false;
}
public bool hasAgentDroppedGroupChatSession(string agentID, UUID groupID)
{
return false;
}
public void AgentDroppedFromGroupChatSession(string agentID, UUID groupID)
{
}
public void AgentInvitedToGroupChatSession(string agentID, UUID groupID)
{
}
#region Actions without permission checks
protected void _AddAgentToGroup(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID)
{
_AddAgentToGroup(RequestingAgentID, AgentID, GroupID, RoleID, string.Empty);
}
protected void _RemoveAgentFromGroup(string RequestingAgentID, string AgentID, UUID GroupID)
{
// 1. Delete membership
m_Database.DeleteMember(GroupID, AgentID);
// 2. Remove from rolememberships
m_Database.DeleteMemberAllRoles(GroupID, AgentID);
// 3. if it was active group, inactivate it
PrincipalData principal = m_Database.RetrievePrincipal(AgentID);
if (principal != null && principal.ActiveGroupID == GroupID)
{
principal.ActiveGroupID = UUID.Zero;
m_Database.StorePrincipal(principal);
}
}
protected void _AddAgentToGroup(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID, string accessToken)
{
// Check if it's already there
MembershipData data = m_Database.RetrieveMember(GroupID, AgentID);
if (data != null)
return;
// Add the membership
data = new MembershipData();
data.PrincipalID = AgentID;
data.GroupID = GroupID;
data.Data = new Dictionary<string, string>();
data.Data["SelectedRoleID"] = RoleID.ToString();
data.Data["Contribution"] = "0";
data.Data["ListInProfile"] = "1";
data.Data["AcceptNotices"] = "1";
data.Data["AccessToken"] = accessToken;
m_Database.StoreMember(data);
// Add principal to everyone role
_AddAgentToGroupRole(RequestingAgentID, AgentID, GroupID, UUID.Zero);
// Add principal to role, if different from everyone role
if (RoleID != UUID.Zero)
_AddAgentToGroupRole(RequestingAgentID, AgentID, GroupID, RoleID);
// Make thit this active group
PrincipalData pdata = new PrincipalData();
pdata.PrincipalID = AgentID;
pdata.ActiveGroupID = GroupID;
m_Database.StorePrincipal(pdata);
}
protected bool _AddOrUpdateGroupRole(string RequestingAgentID, UUID groupID, UUID roleID, string name, string description, string title, ulong powers, bool add)
{
RoleData data = m_Database.RetrieveRole(groupID, roleID);
if (add && data != null) // it already exists, can't create
{
m_log.DebugFormat("[Groups]: Group {0} already exists. Can't create it again", groupID);
return false;
}
if (!add && data == null) // it deosn't exist, can't update
{
m_log.DebugFormat("[Groups]: Group {0} doesn't exist. Can't update it", groupID);
return false;
}
if (add)
data = new RoleData();
data.GroupID = groupID;
data.RoleID = roleID;
data.Data = new Dictionary<string, string>();
data.Data["Name"] = name;
data.Data["Description"] = description;
data.Data["Title"] = title;
data.Data["Powers"] = powers.ToString();
return m_Database.StoreRole(data);
}
protected void _RemoveGroupRole(UUID groupID, UUID roleID)
{
m_Database.DeleteRole(groupID, roleID);
}
protected void _AddAgentToGroupRole(string RequestingAgentID, string AgentID, UUID GroupID, UUID RoleID)
{
RoleMembershipData data = m_Database.RetrieveRoleMember(GroupID, RoleID, AgentID);
if (data != null)
return;
data = new RoleMembershipData();
data.GroupID = GroupID;
data.PrincipalID = AgentID;
data.RoleID = RoleID;
m_Database.StoreRoleMember(data);
// Make it the SelectedRoleID
MembershipData membership = m_Database.RetrieveMember(GroupID, AgentID);
if (membership == null)
{
m_log.DebugFormat("[Groups]: ({0}) No such member {0} in group {1}", AgentID, GroupID);
return;
}
membership.Data["SelectedRoleID"] = RoleID.ToString();
m_Database.StoreMember(membership);
}
protected List<GroupRolesData> _GetGroupRoles(UUID groupID)
{
List<GroupRolesData> roles = new List<GroupRolesData>();
RoleData[] data = m_Database.RetrieveRoles(groupID);
if (data == null || (data != null && data.Length == 0))
return roles;
foreach (RoleData d in data)
{
GroupRolesData r = new GroupRolesData();
r.Description = d.Data["Description"];
r.Members = m_Database.RoleMemberCount(groupID, d.RoleID);
r.Name = d.Data["Name"];
r.Powers = UInt64.Parse(d.Data["Powers"]);
r.RoleID = d.RoleID;
r.Title = d.Data["Title"];
roles.Add(r);
}
return roles;
}
protected List<ExtendedGroupRoleMembersData> _GetGroupRoleMembers(UUID GroupID, bool isInGroup)
{
List<ExtendedGroupRoleMembersData> rmembers = new List<ExtendedGroupRoleMembersData>();
RoleData[] rdata = new RoleData[0];
if (!isInGroup)
{
rdata = m_Database.RetrieveRoles(GroupID);
if (rdata == null || (rdata != null && rdata.Length == 0))
return rmembers;
}
List<RoleData> rlist = new List<RoleData>(rdata);
if (!isInGroup)
rlist = rlist.FindAll(r => (UInt64.Parse(r.Data["Powers"]) & (ulong)GroupPowers.MemberVisible) != 0);
RoleMembershipData[] data = m_Database.RetrieveRolesMembers(GroupID);
if (data == null || (data != null && data.Length == 0))
return rmembers;
foreach (RoleMembershipData d in data)
{
if (!isInGroup)
{
RoleData rd = rlist.Find(_r => _r.RoleID == d.RoleID); // visible role
if (rd == null)
continue;
}
ExtendedGroupRoleMembersData r = new ExtendedGroupRoleMembersData();
r.MemberID = d.PrincipalID;
r.RoleID = d.RoleID;
rmembers.Add(r);
}
return rmembers;
}
protected bool _AddNotice(UUID groupID, UUID noticeID, string fromName, string subject, string message,
bool hasAttachment, byte attType, string attName, UUID attItemID, string attOwnerID)
{
NoticeData data = new NoticeData();
data.GroupID = groupID;
data.NoticeID = noticeID;
data.Data = new Dictionary<string, string>();
data.Data["FromName"] = fromName;
data.Data["Subject"] = subject;
data.Data["Message"] = message;
data.Data["HasAttachment"] = hasAttachment ? "1" : "0";
if (hasAttachment)
{
data.Data["AttachmentType"] = attType.ToString();
data.Data["AttachmentName"] = attName;
data.Data["AttachmentItemID"] = attItemID.ToString();
data.Data["AttachmentOwnerID"] = attOwnerID;
}
data.Data["TMStamp"] = ((uint)Util.UnixTimeSinceEpoch()).ToString();
return m_Database.StoreNotice(data);
}
#endregion
#region structure translations
ExtendedGroupRecord _GroupDataToRecord(GroupData data)
{
if (data == null)
return null;
ExtendedGroupRecord rec = new ExtendedGroupRecord();
rec.AllowPublish = data.Data["AllowPublish"] == "1" ? true : false;
rec.Charter = data.Data["Charter"];
rec.FounderID = new UUID(data.Data["FounderID"]);
rec.GroupID = data.GroupID;
rec.GroupName = data.Data["Name"];
rec.GroupPicture = new UUID(data.Data["InsigniaID"]);
rec.MaturePublish = data.Data["MaturePublish"] == "1" ? true : false;
rec.MembershipFee = Int32.Parse(data.Data["MembershipFee"]);
rec.OpenEnrollment = data.Data["OpenEnrollment"] == "1" ? true : false;
rec.OwnerRoleID = new UUID(data.Data["OwnerRoleID"]);
rec.ShowInList = data.Data["ShowInList"] == "1" ? true : false;
rec.ServiceLocation = data.Data["Location"];
rec.MemberCount = m_Database.MemberCount(data.GroupID);
rec.RoleCount = m_Database.RoleCount(data.GroupID);
return rec;
}
GroupNoticeInfo _NoticeDataToInfo(NoticeData data)
{
GroupNoticeInfo notice = new GroupNoticeInfo();
notice.GroupID = data.GroupID;
notice.Message = data.Data["Message"];
notice.noticeData = _NoticeDataToData(data);
return notice;
}
ExtendedGroupNoticeData _NoticeDataToData(NoticeData data)
{
ExtendedGroupNoticeData notice = new ExtendedGroupNoticeData();
notice.FromName = data.Data["FromName"];
notice.NoticeID = data.NoticeID;
notice.Subject = data.Data["Subject"];
notice.Timestamp = uint.Parse((string)data.Data["TMStamp"]);
notice.HasAttachment = data.Data["HasAttachment"] == "1" ? true : false;
if (notice.HasAttachment)
{
notice.AttachmentName = data.Data["AttachmentName"];
notice.AttachmentItemID = new UUID(data.Data["AttachmentItemID"].ToString());
notice.AttachmentType = byte.Parse(data.Data["AttachmentType"].ToString());
notice.AttachmentOwnerID = data.Data["AttachmentOwnerID"].ToString();
}
return notice;
}
#endregion
#region permissions
private bool HasPower(string agentID, UUID groupID, GroupPowers power)
{
RoleMembershipData[] rmembership = m_Database.RetrieveMemberRoles(groupID, agentID);
if (rmembership == null || (rmembership != null && rmembership.Length == 0))
return false;
foreach (RoleMembershipData rdata in rmembership)
{
RoleData role = m_Database.RetrieveRole(groupID, rdata.RoleID);
if ( (UInt64.Parse(role.Data["Powers"]) & (ulong)power) != 0 )
return true;
}
return false;
}
private bool IsOwner(string agentID, UUID groupID)
{
GroupData group = m_Database.RetrieveGroup(groupID);
if (group == null)
return false;
RoleMembershipData rmembership = m_Database.RetrieveRoleMember(groupID, new UUID(group.Data["OwnerRoleID"]), agentID);
if (rmembership == null)
return false;
return true;
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Data;
using System.Threading;
using System.Xml;
using System.Linq;
using Umbraco.Core;
using umbraco.cms.businesslogic.language;
using umbraco.DataLayer;
using umbraco.BusinessLogic;
using System.Runtime.CompilerServices;
namespace umbraco.cms.businesslogic
{
/// <summary>
/// The Dictionary is used for storing and retrieving language translated textpieces in Umbraco. It uses
/// umbraco.cms.businesslogic.language.Item class as storage and can be used from the public website of umbraco
/// all text are cached in memory.
/// </summary>
public class Dictionary
{
private static volatile bool _cacheIsEnsured = false;
private static readonly ReaderWriterLockSlim Locker = new ReaderWriterLockSlim();
private static readonly ConcurrentDictionary<string, DictionaryItem> DictionaryItems = new ConcurrentDictionary<string, DictionaryItem>();
private static readonly Guid TopLevelParent = new Guid("41c7638d-f529-4bff-853e-59a0c2fb1bde");
protected static ISqlHelper SqlHelper
{
get { return Application.SqlHelper; }
}
/// <summary>
/// Reads all items from the database and stores in local cache
/// </summary>
private static void EnsureCache()
{
using (var lck = new UpgradeableReadLock(Locker))
{
if (_cacheIsEnsured) return;
lck.UpgradeToWriteLock();
using (var dr = SqlHelper.ExecuteReader("Select pk, id, [key], parent from cmsDictionary"))
{
while (dr.Read())
{
//create new dictionaryitem object and put in cache
var item = new DictionaryItem(dr.GetInt("pk"),
dr.GetString("key"),
dr.GetGuid("id"),
dr.GetGuid("parent"));
DictionaryItems.TryAdd(item.key, item);
}
}
_cacheIsEnsured = true;
}
}
/// <summary>
/// Used by the cache refreshers to clear the cache on distributed servers
/// </summary>
internal static void ClearCache()
{
using (new WriteLock(Locker))
{
DictionaryItems.Clear();
//ensure the flag is reset so that EnsureCache will re-cache everything
_cacheIsEnsured = false;
}
}
/// <summary>
/// Retrieve a list of toplevel DictionaryItems
/// </summary>
public static DictionaryItem[] getTopMostItems
{
get
{
EnsureCache();
return DictionaryItems.Values
.Where(x => x.ParentId == TopLevelParent).OrderBy(item => item.key)
.ToArray();
}
}
/// <summary>
/// A DictionaryItem is basically a key/value pair (key/language key/value) which holds the data
/// associated to a key in various language translations
/// </summary>
public class DictionaryItem
{
private string _key;
internal Guid UniqueId { get; private set; }
internal Guid ParentId { get; private set; }
/// <summary>
/// Used internally to construct a new item object and store in cache
/// </summary>
/// <param name="id"></param>
/// <param name="key"></param>
/// <param name="uniqueKey"></param>
/// <param name="parentId"></param>
internal DictionaryItem(int id, string key, Guid uniqueKey, Guid parentId)
{
this.id = id;
this._key = key;
this.UniqueId = uniqueKey;
this.ParentId = parentId;
}
public DictionaryItem(string key)
{
EnsureCache();
var item = DictionaryItems.Values.SingleOrDefault(x => x.key == key);
if (item == null)
{
throw new ArgumentException("No key " + key + " exists in dictionary");
}
this.id = item.id;
this._key = item.key;
this.ParentId = item.ParentId;
this.UniqueId = item.UniqueId;
}
public DictionaryItem(Guid id)
{
EnsureCache();
var item = DictionaryItems.Values.SingleOrDefault(x => x.UniqueId == id);
if (item == null)
{
throw new ArgumentException("No unique id " + id.ToString() + " exists in dictionary");
}
this.id = item.id;
this._key = item.key;
this.ParentId = item.ParentId;
this.UniqueId = item.UniqueId;
}
public DictionaryItem(int id)
{
EnsureCache();
var item = DictionaryItems.Values.SingleOrDefault(x => x.id == id);
if (item == null)
{
throw new ArgumentException("No id " + id + " exists in dictionary");
}
this.id = item.id;
this._key = item.key;
this.ParentId = item.ParentId;
this.UniqueId = item.UniqueId;
}
private DictionaryItem _parent;
/// <summary>
/// Returns if the dictionaryItem is the root item.
/// </summary>
public bool IsTopMostItem()
{
EnsureCache();
return DictionaryItems.Values
.Where(x => x.id == id)
.Select(x => x.ParentId)
.SingleOrDefault() == TopLevelParent;
}
/// <summary>
/// Returns the parent.
/// </summary>
public DictionaryItem Parent
{
get
{
EnsureCache();
if (_parent == null)
{
var p = DictionaryItems.Values.SingleOrDefault(x => x.UniqueId == this.ParentId);
if (p == null)
{
throw new ArgumentException("Top most dictionary items doesn't have a parent");
}
else
{
_parent = p;
}
}
return _parent;
}
}
/// <summary>
/// The primary key in the database
/// </summary>
public int id
{
get;
private set;
}
public DictionaryItem[] Children
{
get
{
EnsureCache();
return DictionaryItems.Values
.Where(x => x.ParentId == this.UniqueId).OrderBy(item => item.key)
.ToArray();
}
}
public static bool hasKey(string key)
{
EnsureCache();
return DictionaryItems.ContainsKey(key);
}
public bool hasChildren
{
get
{
EnsureCache();
return DictionaryItems.Values.Any(x => x.ParentId == UniqueId);
}
}
/// <summary>
/// Returns or sets the key.
/// </summary>
public string key
{
get { return _key; }
set
{
if (!hasKey(value))
{
lock (Locker)
{
SqlHelper.ExecuteNonQuery("Update cmsDictionary set [key] = @key WHERE pk = @Id", SqlHelper.CreateParameter("@key", value),
SqlHelper.CreateParameter("@Id", id));
using (IRecordsReader dr =
SqlHelper.ExecuteReader("Select pk, id, [key], parent from cmsDictionary where id=@id",
SqlHelper.CreateParameter("@id", this.UniqueId)))
{
if (dr.Read())
{
//create new dictionaryitem object and put in cache
var item = new DictionaryItem(dr.GetInt("pk"),
dr.GetString("key"),
dr.GetGuid("id"),
dr.GetGuid("parent"));
}
else
{
throw new DataException("Could not load updated created dictionary item with id " + id);
}
}
//finally update this objects value
this._key = value;
}
}
else
throw new ArgumentException("New value of key already exists (is key)");
}
}
public string Value(int languageId)
{
if (languageId == 0)
return Value();
if (Item.hasText(UniqueId, languageId))
return Item.Text(UniqueId, languageId);
return "";
}
public void setValue(int languageId, string value)
{
if (Item.hasText(UniqueId, languageId))
Item.setText(languageId, UniqueId, value);
else
Item.addText(languageId, UniqueId, value);
// Calling Save method triggers the Saving event
Save();
}
public string Value()
{
if (Item.hasText(UniqueId, 1))
{
return Item.Text(UniqueId, 1);
}
return string.Empty;
}
/// <summary>
/// This sets the value for the placeholder language (id = 0), not for a language with an ID
/// </summary>
/// <param name="value"></param>
public void setValue(string value)
{
if (Item.hasText(UniqueId, 0))
Item.setText(0, UniqueId, value);
else
Item.addText(0, UniqueId, value);
// Calling Save method triggers the Saving event
Save();
}
public static int addKey(string key, string defaultValue, string parentKey)
{
EnsureCache();
if (hasKey(parentKey))
{
int retval = createKey(key, new DictionaryItem(parentKey).UniqueId, defaultValue);
return retval;
}
else
throw new ArgumentException("Parentkey doesnt exist");
}
public static int addKey(string key, string defaultValue)
{
EnsureCache();
int retval = createKey(key, TopLevelParent, defaultValue);
return retval;
}
public void delete()
{
OnDeleting(EventArgs.Empty);
// delete recursive
foreach (DictionaryItem dd in Children)
dd.delete();
// remove all language values from key
Item.removeText(UniqueId);
// remove key from database
SqlHelper.ExecuteNonQuery("delete from cmsDictionary where [key] = @key", SqlHelper.CreateParameter("@key", key));
OnDeleted(EventArgs.Empty);
}
/// <summary>
/// ensures events fire after setting proeprties
/// </summary>
public void Save()
{
OnSaving(EventArgs.Empty);
}
public System.Xml.XmlNode ToXml(XmlDocument xd)
{
XmlNode dictionaryItem = xd.CreateElement("DictionaryItem");
dictionaryItem.Attributes.Append(xmlHelper.addAttribute(xd, "Key", this.key));
foreach (Language lang in Language.GetAllAsList())
{
XmlNode itemValue = xmlHelper.addCDataNode(xd, "Value", this.Value(lang.id));
itemValue.Attributes.Append(xmlHelper.addAttribute(xd, "LanguageId", lang.id.ToString()));
itemValue.Attributes.Append(xmlHelper.addAttribute(xd, "LanguageCultureAlias", lang.CultureAlias));
dictionaryItem.AppendChild(itemValue);
}
if (this.hasChildren)
{
foreach (DictionaryItem di in this.Children)
{
dictionaryItem.AppendChild(di.ToXml(xd));
}
}
return dictionaryItem;
}
public static DictionaryItem Import(XmlNode xmlData)
{
return Import(xmlData, null);
}
public static DictionaryItem Import(XmlNode xmlData, DictionaryItem parent)
{
string key = xmlData.Attributes["Key"].Value;
XmlNodeList values = xmlData.SelectNodes("./Value");
XmlNodeList childItems = xmlData.SelectNodes("./DictionaryItem");
DictionaryItem newItem;
bool retVal = false;
if (!hasKey(key))
{
if (parent != null)
addKey(key, " ", parent.key);
else
addKey(key, " ");
if (values.Count > 0)
{
//Set language values on the dictionary item
newItem = new DictionaryItem(key);
foreach (XmlNode xn in values)
{
string cA = xn.Attributes["LanguageCultureAlias"].Value;
string keyValue = xmlHelper.GetNodeValue(xn);
Language valueLang = Language.GetByCultureCode(cA);
if (valueLang != null)
{
newItem.setValue(valueLang.id, keyValue);
}
}
}
if (parent == null)
retVal = true;
}
newItem = new DictionaryItem(key);
foreach (XmlNode childItem in childItems)
{
Import(childItem, newItem);
}
if (retVal)
return newItem;
else
return null;
}
[MethodImpl(MethodImplOptions.Synchronized)]
private static int createKey(string key, Guid parentId, string defaultValue)
{
if (!hasKey(key))
{
Guid newId = Guid.NewGuid();
SqlHelper.ExecuteNonQuery("Insert into cmsDictionary (id,parent,[key]) values (@id, @parentId, @dictionaryKey)",
SqlHelper.CreateParameter("@id", newId),
SqlHelper.CreateParameter("@parentId", parentId),
SqlHelper.CreateParameter("@dictionaryKey", key));
using (IRecordsReader dr =
SqlHelper.ExecuteReader("Select pk, id, [key], parent from cmsDictionary where id=@id",
SqlHelper.CreateParameter("@id", newId)))
{
if (dr.Read())
{
//create new dictionaryitem object and put in cache
var item = new DictionaryItem(dr.GetInt("pk"),
dr.GetString("key"),
dr.GetGuid("id"),
dr.GetGuid("parent"));
item.setValue(defaultValue);
item.OnNew(EventArgs.Empty);
return item.id;
}
else
{
throw new DataException("Could not load newly created dictionary item with id " + newId.ToString());
}
}
}
else
{
throw new ArgumentException("Key being added already exists!");
}
}
#region Events
public delegate void SaveEventHandler(DictionaryItem sender, EventArgs e);
public delegate void NewEventHandler(DictionaryItem sender, EventArgs e);
public delegate void DeleteEventHandler(DictionaryItem sender, EventArgs e);
public static event SaveEventHandler Saving;
protected virtual void OnSaving(EventArgs e)
{
if (Saving != null)
Saving(this, e);
}
public static event NewEventHandler New;
protected virtual void OnNew(EventArgs e)
{
if (New != null)
New(this, e);
}
public static event DeleteEventHandler Deleting;
protected virtual void OnDeleting(EventArgs e)
{
if (Deleting != null)
Deleting(this, e);
}
public static event DeleteEventHandler Deleted;
protected virtual void OnDeleted(EventArgs e)
{
if (Deleted != null)
Deleted(this, e);
}
#endregion
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Xml.Tests
{
public static class PreviousSiblingTests
{
[Fact]
public static void OnElementNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><elem1/><elem2/><elem3/></root>");
var elem1 = xmlDocument.DocumentElement.ChildNodes[0];
var elem2 = xmlDocument.DocumentElement.ChildNodes[1];
var elem3 = xmlDocument.DocumentElement.ChildNodes[2];
Assert.Null(elem1.PreviousSibling);
Assert.Same(elem1, elem2.PreviousSibling);
Assert.Same(elem2, elem3.PreviousSibling);
}
[Fact]
public static void OnTextNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/>some text</root>");
Assert.Equal(XmlNodeType.Text, xmlDocument.DocumentElement.ChildNodes[1].NodeType);
Assert.Equal(xmlDocument.DocumentElement.ChildNodes[1].PreviousSibling, xmlDocument.DocumentElement.ChildNodes[0]);
}
[Fact]
public static void OnTextNodeSplit()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root>some text</root>");
var textNode = (XmlText)xmlDocument.DocumentElement.FirstChild;
Assert.Equal(1, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.Null(textNode.PreviousSibling);
var split = textNode.SplitText(4);
Assert.Equal(2, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.Equal(textNode, split.PreviousSibling);
Assert.Null(textNode.PreviousSibling);
}
[Fact]
public static void OnCommentNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><!--some text--></root>");
Assert.Equal(XmlNodeType.Comment, xmlDocument.DocumentElement.ChildNodes[1].NodeType);
Assert.Equal(xmlDocument.DocumentElement.ChildNodes[0], xmlDocument.DocumentElement.ChildNodes[1].PreviousSibling);
}
[Fact]
public static void SiblingOfLastChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root>some text<child1/><child2/></root>");
Assert.Same(xmlDocument.DocumentElement.ChildNodes[1], xmlDocument.DocumentElement.LastChild.PreviousSibling);
}
[Fact]
public static void OnCDataNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><![CDATA[ <opentag> without an </endtag> and & <! are all ok here ]]></root>");
Assert.Equal(XmlNodeType.CDATA, xmlDocument.DocumentElement.ChildNodes[1].NodeType);
Assert.Equal(xmlDocument.DocumentElement.ChildNodes[0], xmlDocument.DocumentElement.ChildNodes[1].PreviousSibling);
}
[Fact]
public static void OnDocumentFragment()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/>some text<child2/><child3/></root>");
var documentFragment = xmlDocument.CreateDocumentFragment();
documentFragment.AppendChild(xmlDocument.DocumentElement);
Assert.Null(documentFragment.PreviousSibling);
}
[Fact]
public static void OnDocumentElement()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<?PI pi info?><root/>");
var piInfo = xmlDocument.ChildNodes[0];
Assert.Equal(XmlNodeType.ProcessingInstruction, piInfo.NodeType);
Assert.Equal(piInfo, xmlDocument.DocumentElement.PreviousSibling);
}
[Fact]
public static void OnAttributeNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root attr1='test' attr2='test2' />");
var attr1 = xmlDocument.DocumentElement.Attributes[0];
var attr2 = xmlDocument.DocumentElement.Attributes[1];
Assert.Equal("attr1", attr1.Name);
Assert.Equal("attr2", attr2.Name);
Assert.Null(attr1.PreviousSibling);
Assert.Null(attr2.PreviousSibling);
}
[Fact]
public static void OnAttributeNodeWithChildren()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root attr1='test' attr2='test2'><child1/><child2/><child3/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var child2 = xmlDocument.DocumentElement.ChildNodes[1];
var child3 = xmlDocument.DocumentElement.ChildNodes[2];
Assert.Null(child1.PreviousSibling);
Assert.Same(child1, child2.PreviousSibling);
Assert.Same(child2, child3.PreviousSibling);
}
[Fact]
public static void ElementOneChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child/></root>");
Assert.Null(xmlDocument.DocumentElement.ChildNodes[0].PreviousSibling);
}
[Fact]
public static void OnAllSiblings()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/><child3 attr='1'/>Some Text<child4/><!-- comment --><?PI processing info?></root>");
var count = xmlDocument.DocumentElement.ChildNodes.Count;
var nextNode = xmlDocument.DocumentElement.ChildNodes[count - 1];
for (var idx = count - 2; idx >= 0; idx--)
{
var currentNode = xmlDocument.DocumentElement.ChildNodes[idx];
Assert.Equal(currentNode, nextNode.PreviousSibling);
nextNode = currentNode;
}
Assert.Null(nextNode.PreviousSibling);
}
[Fact]
public static void RemoveChildCheckSibling()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var child2 = xmlDocument.DocumentElement.ChildNodes[1];
Assert.Equal("child1", child1.Name);
Assert.Equal("child2", child2.Name);
Assert.Equal(child1, child2.PreviousSibling);
Assert.Null(child1.PreviousSibling);
xmlDocument.DocumentElement.RemoveChild(child2);
Assert.Null(child2.PreviousSibling);
Assert.Null(child1.PreviousSibling);
}
[Fact]
public static void ReplaceChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/><child3/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var child2 = xmlDocument.DocumentElement.ChildNodes[1];
var child3 = xmlDocument.DocumentElement.ChildNodes[2];
Assert.Null(child1.PreviousSibling);
Assert.Same(child1, child2.PreviousSibling);
Assert.Same(child2, child3.PreviousSibling);
var newNode = xmlDocument.CreateElement("child4");
xmlDocument.DocumentElement.ReplaceChild(newNode, child2);
Assert.Null(child1.PreviousSibling);
Assert.Same(child1, newNode.PreviousSibling);
Assert.Same(newNode, child3.PreviousSibling);
Assert.Null(child2.PreviousSibling);
}
[Fact]
public static void InsertChildAfter()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var newNode = xmlDocument.CreateElement("child2");
Assert.Null(child1.PreviousSibling);
xmlDocument.DocumentElement.InsertAfter(newNode, child1);
Assert.Null(child1.PreviousSibling);
Assert.Same(child1, newNode.PreviousSibling);
}
[Fact]
public static void InsertChildBefore()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var newNode = xmlDocument.CreateElement("child2");
Assert.Null(child1.PreviousSibling);
xmlDocument.DocumentElement.InsertBefore(newNode, child1);
Assert.Same(newNode, child1.PreviousSibling);
Assert.Null(newNode.PreviousSibling);
}
[Fact]
public static void AppendChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/></root>");
var child1 = xmlDocument.DocumentElement.ChildNodes[0];
var newNode = xmlDocument.CreateElement("child2");
Assert.Null(child1.PreviousSibling);
xmlDocument.DocumentElement.AppendChild(newNode);
Assert.Same(child1, newNode.PreviousSibling);
Assert.Null(child1.PreviousSibling);
}
[Fact]
public static void NewlyCreatedElement()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateElement("element");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedAttribute()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateAttribute("attribute");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedTextNode()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateTextNode("textnode");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedCDataNode()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateCDataSection("cdata section");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedProcessingInstruction()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateProcessingInstruction("PI", "data");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedComment()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateComment("comment");
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void NewlyCreatedDocumentFragment()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateDocumentFragment();
Assert.Null(node.PreviousSibling);
}
[Fact]
public static void FirstChildNextSibling()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/><child3/></root>");
Assert.Null(xmlDocument.DocumentElement.FirstChild.PreviousSibling);
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics;
using System.Reflection;
using DiscUtils.Streams;
namespace DiscUtils.Common
{
public abstract class ProgramBase
{
private CommandLineParser _parser;
private CommandLineSwitch _outFormatSwitch;
private CommandLineEnumSwitch<GenericDiskAdapterType> _adapterTypeSwitch;
private CommandLineSwitch _userNameSwitch;
private CommandLineSwitch _passwordSwitch;
private CommandLineSwitch _partitionSwitch;
private CommandLineSwitch _volumeIdSwitch;
private CommandLineSwitch _diskSizeSwitch;
private CommandLineSwitch _filenameEncodingSwitch;
private CommandLineSwitch _helpSwitch;
private CommandLineSwitch _quietSwitch;
private CommandLineSwitch _verboseSwitch;
private CommandLineSwitch _timeSwitch;
private string _userName;
private string _password;
private string _outputDiskType;
private string _outputDiskVariant;
private GenericDiskAdapterType _adapterType;
private int _partition = -1;
private string _volumeId;
private long _diskSize;
protected ProgramBase()
{
}
protected string UserName
{
get { return _userName; }
}
protected string Password
{
get { return _password; }
}
protected string OutputDiskType
{
get { return _outputDiskType; }
}
protected string OutputDiskVariant
{
get { return _outputDiskVariant; }
}
protected GenericDiskAdapterType AdapterType
{
get { return _adapterType; }
}
protected bool Quiet
{
get { return _quietSwitch.IsPresent; }
}
protected bool Verbose
{
get { return _verboseSwitch.IsPresent; }
}
protected int Partition
{
get { return _partition; }
}
protected string VolumeId
{
get { return _volumeId; }
}
protected long DiskSize
{
get { return _diskSize; }
}
protected VirtualDiskParameters DiskParameters
{
get
{
return new VirtualDiskParameters()
{
AdapterType = AdapterType,
Capacity = DiskSize,
};
}
}
protected FileSystemParameters FileSystemParameters
{
get
{
return new FileSystemParameters()
{
FileNameEncoding = (_filenameEncodingSwitch != null && _filenameEncodingSwitch.IsPresent) ? Encoding.GetEncoding(_filenameEncodingSwitch.Value) : null,
};
}
}
protected abstract StandardSwitches DefineCommandLine(CommandLineParser parser);
protected virtual string[] HelpRemarks { get { return new string[] { }; } }
protected abstract void DoRun();
protected void Run(string[] args)
{
_parser = new CommandLineParser(ExeName);
StandardSwitches stdSwitches = DefineCommandLine(_parser);
if ((stdSwitches & StandardSwitches.OutputFormatAndAdapterType) != 0)
{
_outFormatSwitch = OutputFormatSwitch();
_adapterTypeSwitch = new CommandLineEnumSwitch<GenericDiskAdapterType>("a", "adaptortype", "type", GenericDiskAdapterType.Ide, "Some disk formats encode the disk type (IDE or SCSI) into the disk image, this parameter specifies the type of adaptor to encode.");
_parser.AddSwitch(_outFormatSwitch);
_parser.AddSwitch(_adapterTypeSwitch);
}
if ((stdSwitches & StandardSwitches.DiskSize) != 0)
{
_diskSizeSwitch = new CommandLineSwitch("sz", "size", "size", "The size of the output disk. Use B, KB, MB, GB to specify units (units default to bytes if not specified).");
_parser.AddSwitch(_diskSizeSwitch);
}
if ((stdSwitches & StandardSwitches.FileNameEncoding) != 0)
{
_filenameEncodingSwitch = new CommandLineSwitch(new string[]{"ne"}, "nameencoding", "encoding", "The encoding used for filenames in the file system (aka the codepage), e.g. UTF-8 or IBM437. This is ignored for file systems have fixed/defined encodings.");
_parser.AddSwitch(_filenameEncodingSwitch);
}
if ((stdSwitches & StandardSwitches.PartitionOrVolume) != 0)
{
_partitionSwitch = new CommandLineSwitch("p", "partition", "num", "The number of the partition to inspect, in the range 0-n. If not specified, 0 (the first partition) is the default.");
_volumeIdSwitch = new CommandLineSwitch("v", "volume", "id", "The volume id of the volume to access, use the VolInfo tool to discover this id. If specified, the partition parameter is ignored.");
_parser.AddSwitch(_partitionSwitch);
_parser.AddSwitch(_volumeIdSwitch);
}
if ((stdSwitches & StandardSwitches.UserAndPassword) != 0)
{
_userNameSwitch = new CommandLineSwitch("u", "user", "user_name", "If using an iSCSI source or target, optionally use this parameter to specify the user name to authenticate with. If this parameter is specified without a password, you will be prompted to supply the password.");
_parser.AddSwitch(_userNameSwitch);
_passwordSwitch = new CommandLineSwitch("pw", "password", "secret", "If using an iSCSI source or target, optionally use this parameter to specify the password to authenticate with.");
_parser.AddSwitch(_passwordSwitch);
}
if ((stdSwitches & StandardSwitches.Verbose) != 0)
{
_verboseSwitch = new CommandLineSwitch("v", "verbose", null, "Show detailed information.");
_parser.AddSwitch(_verboseSwitch);
}
_helpSwitch = new CommandLineSwitch(new string[] { "h", "?" }, "help", null, "Show this help.");
_parser.AddSwitch(_helpSwitch);
_quietSwitch = new CommandLineSwitch("q", "quiet", null, "Run quietly.");
_parser.AddSwitch(_quietSwitch);
_timeSwitch = new CommandLineSwitch("time", null, "Times how long this program takes to execute.");
_parser.AddSwitch(_timeSwitch);
bool parseResult = _parser.Parse(args);
if (!_quietSwitch.IsPresent)
{
DisplayHeader();
}
if (_helpSwitch.IsPresent || !parseResult)
{
DisplayHelp();
return;
}
if ((stdSwitches & StandardSwitches.OutputFormatAndAdapterType) != 0)
{
if (_outFormatSwitch.IsPresent)
{
string[] typeAndVariant = _outFormatSwitch.Value.Split(new char[] { '-' }, 2);
_outputDiskType = typeAndVariant[0];
_outputDiskVariant = (typeAndVariant.Length > 1) ? typeAndVariant[1] : "";
}
else
{
DisplayHelp();
return;
}
if (_adapterTypeSwitch.IsPresent)
{
_adapterType = _adapterTypeSwitch.EnumValue;
}
else
{
_adapterType = GenericDiskAdapterType.Ide;
}
}
if ((stdSwitches & StandardSwitches.DiskSize) != 0)
{
if (_diskSizeSwitch.IsPresent && !Utilities.TryParseDiskSize(_diskSizeSwitch.Value, out _diskSize))
{
DisplayHelp();
return;
}
}
if ((stdSwitches & StandardSwitches.PartitionOrVolume) != 0)
{
_partition = -1;
if (_partitionSwitch.IsPresent && !int.TryParse(_partitionSwitch.Value, out _partition))
{
DisplayHelp();
return;
}
_volumeId = _volumeIdSwitch.IsPresent ? _volumeIdSwitch.Value : null;
}
if ((stdSwitches & StandardSwitches.UserAndPassword) != 0)
{
_userName = null;
if (_userNameSwitch.IsPresent)
{
_userName = _userNameSwitch.Value;
if (_passwordSwitch.IsPresent)
{
_password = _passwordSwitch.Value;
}
else
{
_password = Utilities.PromptForPassword();
}
}
}
if (_timeSwitch.IsPresent)
{
Stopwatch stopWatch = new Stopwatch();
stopWatch.Start();
DoRun();
stopWatch.Stop();
Console.WriteLine();
Console.WriteLine("Time taken: {0}", stopWatch.Elapsed);
}
else
{
DoRun();
}
}
protected void DisplayHelp()
{
_parser.DisplayHelp(HelpRemarks);
}
protected virtual void DisplayHeader()
{
Console.WriteLine("{0} v{1}, available from http://discutils.codeplex.com", ExeName, Version);
Console.WriteLine("Copyright (c) Kenneth Bell, 2008-2013");
Console.WriteLine("Free software issued under the MIT License, see LICENSE.TXT for details.");
Console.WriteLine();
}
protected CommandLineParameter FileOrUriParameter(string paramName, string intro, bool optional)
{
return new CommandLineParameter(
paramName,
intro + " " +
"This can be a file path or an iSCSI, NFS or ODS URL. " +
"URLs for iSCSI LUNs are of the form: iscsi://192.168.1.2/iqn.2002-2004.example.com:port1?LUN=2. " +
"Use the iSCSIBrowse utility to discover iSCSI URLs. " +
"NFS URLs are of the form: nfs://host/a/path.vhd. " +
"ODS URLs are of the form: ods://domain/host/volumename.",
optional);
}
protected CommandLineMultiParameter FileOrUriMultiParameter(string paramName, string intro, bool optional)
{
return new CommandLineMultiParameter(
paramName,
intro + " " +
"This can be a file path or an iSCSI, NFS or ODS URL. " +
"URLs for iSCSI LUNs are of the form: iscsi://192.168.1.2/iqn.2002-2004.example.com:port1?LUN=2. " +
"Use the iSCSIBrowse utility to discover iSCSI URLs. " +
"NFS URLs are of the form: nfs://host/a/path.vhd. " +
"ODS URLs are of the form: ods://domain/host/volumename.",
optional);
}
protected static void ShowProgress(string label, long totalBytes, DateTime startTime, object sourceObject, PumpProgressEventArgs e)
{
int progressLen = 55 - label.Length;
int numProgressChars = (int)((e.BytesRead * progressLen) / totalBytes);
string progressBar = new string('=', numProgressChars) + new string(' ', progressLen - numProgressChars);
DateTime now = DateTime.Now;
TimeSpan timeSoFar = now - startTime;
TimeSpan remaining = TimeSpan.FromMilliseconds((timeSoFar.TotalMilliseconds / (double)e.BytesRead) * (totalBytes - e.BytesRead));
Console.Write("\r{0} ({1,3}%) |{2}| {3:hh\\:mm\\:ss\\.f}", label, (e.BytesRead * 100) / totalBytes, progressBar, remaining, remaining.TotalHours, remaining.Minutes, remaining.Seconds, remaining.Milliseconds);
}
private CommandLineSwitch OutputFormatSwitch()
{
List<string> outputTypes = new List<string>();
foreach (var type in VirtualDiskManager.SupportedDiskTypes)
{
List<string> variants = new List<string>(VirtualDisk.GetSupportedDiskVariants(type));
if (variants.Count == 0)
{
outputTypes.Add(type.ToUpperInvariant());
}
else
{
foreach (var variant in variants)
{
outputTypes.Add(type.ToUpperInvariant() + "-" + variant.ToLowerInvariant());
}
}
}
string[] ots = outputTypes.ToArray();
Array.Sort(ots);
return new CommandLineSwitch(
"of",
"outputFormat",
"format",
"Mandatory - the type of disk to output, one of " + string.Join(", ", ots, 0, ots.Length - 1) + " or " + ots[ots.Length - 1] + ".");
}
private string ExeName
{
get
{
#if NET40
return GetType().Assembly.GetName().Name;
#else
return GetType().GetTypeInfo().Assembly.GetName().Name;
#endif
}
}
private string Version
{
get
{
#if NET40
return GetType().Assembly.GetName().Version.ToString(3);
#else
return GetType().GetTypeInfo().Assembly.GetName().Version.ToString(3);
#endif
}
}
[Flags]
protected internal enum StandardSwitches
{
Default = 0,
UserAndPassword = 1,
OutputFormatAndAdapterType = 2,
Verbose = 4,
PartitionOrVolume = 8,
DiskSize = 16,
FileNameEncoding = 32
}
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using JetBrains.Annotations;
using NLog.Config;
using NLog.Internal;
namespace NLog.MessageTemplates
{
/// <summary>
/// Convert, Render or serialize a value, with optionally backwards-compatible with <see cref="string.Format(System.IFormatProvider,string,object[])"/>
/// </summary>
internal class ValueFormatter : IValueFormatter
{
private static readonly IEqualityComparer<object> _referenceEqualsComparer = SingleItemOptimizedHashSet<object>.ReferenceEqualityComparer.Default;
private readonly MruCache<Enum, string> _enumCache = new MruCache<Enum, string>(2000);
private readonly IServiceProvider _serviceProvider;
private IJsonConverter JsonConverter => _jsonConverter ?? (_jsonConverter = _serviceProvider.GetService<IJsonConverter>());
private IJsonConverter _jsonConverter;
public ValueFormatter([NotNull] IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider;
}
private const int MaxRecursionDepth = 2;
private const int MaxValueLength = 512 * 1024;
private const string LiteralFormatSymbol = "l";
public const string FormatAsJson = "@";
public const string FormatAsString = "$";
/// <summary>
/// Serialization of an object, e.g. JSON and append to <paramref name="builder"/>
/// </summary>
/// <param name="value">The object to serialize to string.</param>
/// <param name="format">Parameter Format</param>
/// <param name="captureType">Parameter CaptureType</param>
/// <param name="formatProvider">An object that supplies culture-specific formatting information.</param>
/// <param name="builder">Output destination.</param>
/// <returns>Serialize succeeded (true/false)</returns>
public bool FormatValue(object value, string format, CaptureType captureType, IFormatProvider formatProvider, StringBuilder builder)
{
switch (captureType)
{
case CaptureType.Serialize:
{
return JsonConverter.SerializeObject(value, builder);
}
case CaptureType.Stringify:
{
builder.Append('"');
FormatToString(value, null, formatProvider, builder);
builder.Append('"');
return true;
}
default:
{
return FormatObject(value, format, formatProvider, builder);
}
}
}
/// <summary>
/// Format an object to a readable string, or if it's an object, serialize
/// </summary>
/// <param name="value">The value to convert</param>
/// <param name="format"></param>
/// <param name="formatProvider"></param>
/// <param name="builder"></param>
/// <returns></returns>
public bool FormatObject(object value, string format, IFormatProvider formatProvider, StringBuilder builder)
{
if (SerializeSimpleObject(value, format, formatProvider, builder, false))
{
return true;
}
IEnumerable collection = value as IEnumerable;
if (collection != null)
{
return SerializeWithoutCyclicLoop(collection, format, formatProvider, builder, default(SingleItemOptimizedHashSet<object>), 0);
}
SerializeConvertToString(value, formatProvider, builder);
return true;
}
/// <summary>
/// Try serializing a scalar (string, int, NULL) or simple type (IFormattable)
/// </summary>
private bool SerializeSimpleObject(object value, string format, IFormatProvider formatProvider, StringBuilder builder, bool convertToString = true)
{
if (value is string stringValue)
{
SerializeStringObject(stringValue, format, builder);
return true;
}
if (value is null)
{
builder.Append("NULL");
return true;
}
// Optimize for types that are pretty much invariant in all cultures when no format-string
if (value is IConvertible convertibleValue)
{
SerializeConvertibleObject(convertibleValue, format, formatProvider, builder);
return true;
}
else
{
if (!string.IsNullOrEmpty(format) && value is IFormattable formattable)
{
builder.Append(formattable.ToString(format, formatProvider));
return true;
}
if (convertToString)
{
SerializeConvertToString(value, formatProvider, builder);
return true;
}
return false;
}
}
private void SerializeConvertibleObject(IConvertible value, string format, IFormatProvider formatProvider, StringBuilder builder)
{
TypeCode convertibleTypeCode = value.GetTypeCode();
if (convertibleTypeCode == TypeCode.String)
{
SerializeStringObject(value.ToString(), format, builder);
return;
}
if (!string.IsNullOrEmpty(format) && value is IFormattable formattable)
{
builder.Append(formattable.ToString(format, formatProvider));
return;
}
switch (convertibleTypeCode)
{
case TypeCode.Boolean:
{
builder.Append(value.ToBoolean(CultureInfo.InvariantCulture) ? "true" : "false");
break;
}
case TypeCode.Char:
{
bool includeQuotes = format != LiteralFormatSymbol;
if (includeQuotes) builder.Append('"');
builder.Append(value.ToChar(CultureInfo.InvariantCulture));
if (includeQuotes) builder.Append('"');
break;
}
case TypeCode.Byte:
case TypeCode.SByte:
case TypeCode.Int16:
case TypeCode.Int32:
case TypeCode.Int64:
case TypeCode.UInt16:
case TypeCode.UInt32:
case TypeCode.UInt64:
{
if (value is Enum enumValue)
{
AppendEnumAsString(builder, enumValue);
}
else
{
builder.AppendNumericInvariant(value, convertibleTypeCode);
}
break;
}
case TypeCode.Object: // Guid, TimeSpan, DateTimeOffset
default: // Single, Double, Decimal, etc.
SerializeConvertToString(value, formatProvider, builder);
break;
}
}
private static void SerializeConvertToString(object value, IFormatProvider formatProvider, StringBuilder builder)
{
builder.Append(Convert.ToString(value, formatProvider));
}
private static void SerializeStringObject(string stringValue, string format, StringBuilder builder)
{
bool includeQuotes = format != LiteralFormatSymbol;
if (includeQuotes) builder.Append('"');
builder.Append(stringValue);
if (includeQuotes) builder.Append('"');
}
private void AppendEnumAsString(StringBuilder sb, Enum value)
{
if (!_enumCache.TryGetValue(value, out var textValue))
{
textValue = value.ToString();
_enumCache.TryAddValue(value, textValue);
}
sb.Append(textValue);
}
private bool SerializeWithoutCyclicLoop(IEnumerable collection, string format, IFormatProvider formatProvider, StringBuilder builder,
SingleItemOptimizedHashSet<object> objectsInPath, int depth)
{
if (objectsInPath.Contains(collection))
{
return false; // detected reference loop, skip serialization
}
if (depth > MaxRecursionDepth)
{
return false; // reached maximum recursion level, no further serialization
}
IDictionary dictionary = collection as IDictionary;
if (dictionary != null)
{
using (new SingleItemOptimizedHashSet<object>.SingleItemScopedInsert(dictionary, ref objectsInPath, true, _referenceEqualsComparer))
{
return SerializeDictionaryObject(dictionary, format, formatProvider, builder, objectsInPath, depth);
}
}
using (new SingleItemOptimizedHashSet<object>.SingleItemScopedInsert(collection, ref objectsInPath, true, _referenceEqualsComparer))
{
return SerializeCollectionObject(collection, format, formatProvider, builder, objectsInPath, depth);
}
}
/// <summary>
/// Serialize Dictionary as JSON like structure, without { and }
/// </summary>
/// <example>
/// "FirstOrder"=true, "Previous login"=20-12-2017 14:55:32, "number of tries"=1
/// </example>
/// <param name="dictionary"></param>
/// <param name="format">formatstring of an item</param>
/// <param name="formatProvider"></param>
/// <param name="builder"></param>
/// <param name="objectsInPath"></param>
/// <param name="depth"></param>
/// <returns></returns>
private bool SerializeDictionaryObject(IDictionary dictionary, string format, IFormatProvider formatProvider, StringBuilder builder, SingleItemOptimizedHashSet<object> objectsInPath, int depth)
{
bool separator = false;
foreach (var item in new DictionaryEntryEnumerable(dictionary))
{
if (builder.Length > MaxValueLength)
return false;
if (separator) builder.Append(", ");
SerializeCollectionItem(item.Key, format, formatProvider, builder, ref objectsInPath, depth);
builder.Append("=");
SerializeCollectionItem(item.Value, format, formatProvider, builder, ref objectsInPath, depth);
separator = true;
}
return true;
}
private bool SerializeCollectionObject(IEnumerable collection, string format, IFormatProvider formatProvider, StringBuilder builder, SingleItemOptimizedHashSet<object> objectsInPath, int depth)
{
bool separator = false;
foreach (var item in collection)
{
if (builder.Length > MaxValueLength)
return false;
if (separator) builder.Append(", ");
SerializeCollectionItem(item, format, formatProvider, builder, ref objectsInPath, depth);
separator = true;
}
return true;
}
private void SerializeCollectionItem(object item, string format, IFormatProvider formatProvider, StringBuilder builder, ref SingleItemOptimizedHashSet<object> objectsInPath, int depth)
{
if (item is IConvertible convertible)
SerializeConvertibleObject(convertible, format, formatProvider, builder);
else if (item is IEnumerable enumerable)
SerializeWithoutCyclicLoop(enumerable, format, formatProvider, builder, objectsInPath, depth + 1);
else
SerializeSimpleObject(item, format, formatProvider, builder);
}
/// <summary>
/// Convert a value to a string with format and append to <paramref name="builder"/>.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <param name="format">Format sting for the value.</param>
/// <param name="formatProvider">Format provider for the value.</param>
/// <param name="builder">Append to this</param>
public static void FormatToString(object value, string format, IFormatProvider formatProvider, StringBuilder builder)
{
var stringValue = value as string;
if (stringValue != null)
{
builder.Append(stringValue);
}
else
{
var formattable = value as IFormattable;
if (formattable != null)
{
builder.Append(formattable.ToString(format, formatProvider));
}
else
{
builder.Append(Convert.ToString(value, formatProvider));
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void InsertByte129()
{
var test = new SimpleUnaryOpTest__InsertByte129();
try
{
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
}
catch (PlatformNotSupportedException)
{
test.Succeeded = true;
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleUnaryOpTest__InsertByte129
{
private const int VectorSize = 16;
private const int Op1ElementCount = VectorSize / sizeof(Byte);
private const int RetElementCount = VectorSize / sizeof(Byte);
private static Byte[] _data = new Byte[Op1ElementCount];
private static Vector128<Byte> _clsVar;
private Vector128<Byte> _fld;
private SimpleUnaryOpTest__DataTable<Byte, Byte> _dataTable;
static SimpleUnaryOpTest__InsertByte129()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (byte)0; }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Byte>, byte>(ref _clsVar), ref Unsafe.As<Byte, byte>(ref _data[0]), VectorSize);
}
public SimpleUnaryOpTest__InsertByte129()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (byte)0; }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Byte>, byte>(ref _fld), ref Unsafe.As<Byte, byte>(ref _data[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (byte)0; }
_dataTable = new SimpleUnaryOpTest__DataTable<Byte, Byte>(_data, new Byte[RetElementCount], VectorSize);
}
public bool IsSupported => Sse41.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse41.Insert(
Unsafe.Read<Vector128<Byte>>(_dataTable.inArrayPtr),
(byte)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Sse41.Insert(
Sse2.LoadVector128((Byte*)(_dataTable.inArrayPtr)),
(byte)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse41.Insert(
Sse2.LoadAlignedVector128((Byte*)(_dataTable.inArrayPtr)),
(byte)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<Byte>), typeof(Byte), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Byte>>(_dataTable.inArrayPtr),
(byte)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Byte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<Byte>), typeof(Byte), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Byte*)(_dataTable.inArrayPtr)),
(byte)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Byte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.Insert), new Type[] { typeof(Vector128<Byte>), typeof(Byte), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Byte*)(_dataTable.inArrayPtr)),
(byte)2,
(byte)129
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Byte>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Sse41.Insert(
_clsVar,
(byte)2,
129
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var firstOp = Unsafe.Read<Vector128<Byte>>(_dataTable.inArrayPtr);
var result = Sse41.Insert(firstOp, (byte)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var firstOp = Sse2.LoadVector128((Byte*)(_dataTable.inArrayPtr));
var result = Sse41.Insert(firstOp, (byte)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var firstOp = Sse2.LoadAlignedVector128((Byte*)(_dataTable.inArrayPtr));
var result = Sse41.Insert(firstOp, (byte)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleUnaryOpTest__InsertByte129();
var result = Sse41.Insert(test._fld, (byte)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Sse41.Insert(_fld, (byte)2, 129);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<Byte> firstOp, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray = new Byte[Op1ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
Byte[] inArray = new Byte[Op1ElementCount];
Byte[] outArray = new Byte[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(Byte[] firstOp, Byte[] result, [CallerMemberName] string method = "")
{
for (var i = 0; i < RetElementCount; i++)
{
if ((i == 1 ? result[i] != 2 : result[i] != 0))
{
Succeeded = false;
break;
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Sse41)}.{nameof(Sse41.Insert)}<Byte>(Vector128<Byte><9>): {method} failed:");
Console.WriteLine($" firstOp: ({string.Join(", ", firstOp)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
// Copyright (C) 2006-2010 Jim Tilander. See COPYING for and README for more details.
using System;
using EnvDTE;
using System.Threading;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
// How to call p4v from the command line: http://blog.perforce.com/blog/?p=1928
//
//
namespace Aurora
{
namespace NiftyPerforce
{
// Simplification wrapper around running perforce commands.
class P4Operations
{
private static bool g_p4installed = false;
private static bool g_p4wininstalled = false;
private static bool g_p4vinstalled = false;
private static bool g_p4customdiff = false;
private static Dictionary<string, bool> g_opsInFlight = new Dictionary<string, bool>();
private static bool LockOp(string token)
{
try
{
lock (g_opsInFlight)
{
g_opsInFlight.Add(token, true);
}
Log.Debug("## Locked \"" + token + "\"" );
return true;
}
catch(ArgumentException)
{
//Log.Debug("!! Failed to lock \"" + token + "\"");
Log.Error(token + " already in progress");
return false;
}
}
private static void UnlockOp(bool ok, object token_)
{
string token = (string)token_;
try
{
lock (g_opsInFlight)
{
if (g_opsInFlight.Remove(token))
{
Log.Debug("## Unlocked \"" + token + "\"");
}
else
{
Log.Debug("!! Failed to unlock \"" + token + "\"");
}
}
}
catch (ArgumentNullException)
{
}
}
private static string FormatToken(string operation, string filename)
{
string token = operation + " " + Path.GetFullPath(filename).ToLower();
return token;
}
public delegate bool CheckoutCallback(OutputWindowPane output, string filename);
public static bool IntegrateFile(OutputWindowPane output, string filename, string oldName)
{
if(filename.Length == 0)
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
string token = FormatToken("integrate", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + "integrate \"" + oldName + "\" \"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
public static bool DeleteFile(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
string token = FormatToken("delete", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + "delete \"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
public static bool AddFile(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
string token = FormatToken("add", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + "add \"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
public static bool EditFile(OutputWindowPane output, string filename)
{
return Internal_CheckEditFile(new CheckoutCallback(Internal_EditFile), output, filename);
}
public static bool EditFileImmediate(OutputWindowPane output, string filename)
{
return Internal_CheckEditFile(new CheckoutCallback(Internal_EditFileImmediate), output, filename);
}
private static bool Internal_CheckEditFile(CheckoutCallback callback, OutputWindowPane output, string filename)
{
bool result = callback(output, filename);
string ext = Path.GetExtension(filename).ToLower();
if (ext == ".vcxproj")
{
callback(output, filename + ".filters");
}
if (ext == ".settings" || ext == ".resx")
{
callback(output, Path.ChangeExtension(filename, ".Designer.cs"));
}
if (ext == ".cs")
{
callback(output, Path.ChangeExtension(filename, ".Designer.cs"));
callback(output, Path.ChangeExtension(filename, ".resx"));
}
return result;
}
private static bool Internal_EditFile(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
if (!System.IO.File.Exists(filename))
return false;
if(0 == (System.IO.File.GetAttributes(filename) & FileAttributes.ReadOnly))
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
Log.Debug("EditFile : " + filename);
string token = FormatToken("edit", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + "edit \"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
private static bool Internal_EditFileImmediate(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
if (!System.IO.File.Exists(filename))
return false;
if (0 == (System.IO.File.GetAttributes(filename) & FileAttributes.ReadOnly))
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
Log.Debug("EditFileImmediate : " + filename);
string token = FormatToken("edit", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Run(output, "p4.exe", GetUserInfoString() + "edit \"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
public static bool RevertFile(OutputWindowPane output, string filename, bool onlyUnchanged)
{
if(filename.Length == 0)
return false;
if(!g_p4installed)
return NotifyUser("could not find p4 exe installed in perforce directory");
string token = FormatToken("revert", filename);
if (!LockOp(token))
return false;
string revertArguments = onlyUnchanged ? "-a " : string.Empty;
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + "revert " + revertArguments + "\"" + filename + "\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
public static bool DiffFile(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
string token = FormatToken("diff", filename);
if (!LockOp(token))
return false;
if (g_p4wininstalled /*&& !Singleton<Config>.Instance.preferVisualClient*/)
{
return AsyncProcess.Schedule(output, "p4win.exe", GetUserInfoString() + " -D \"" + filename + "#have\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token, 0);
}
// NOTE: this doesn't work since it leaves zombie p4v processes around!
/*
if (g_p4vinstalled)
{
string arguments = " -win 0 ";
arguments += GetUserInfoStringFull(true, Path.GetDirectoryName(filename));
arguments += " -cmd \"prevdiff \"" + filename + "\"";
return AsyncProcess.Schedule(output, "p4v.exe", arguments, Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}*/
if(g_p4installed)
{
// Let's figure out if the user has some custom diff tool installed. Then we just send whatever we have without any fancy options.
if(g_p4customdiff)
{
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + " diff \"" + filename + "#have\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
else
{
// Otherwise let's show a unified diff in the outputpane.
return AsyncProcess.Schedule(output, "p4.exe", GetUserInfoString() + " diff -du \"" + filename + "#have\"", Path.GetDirectoryName(filename), new AsyncProcess.OnDone(UnlockOp), token);
}
}
return NotifyUser("could not find p4win.exe/p4.exe installed in perforce directory");
}
public static bool RevisionHistoryFile(OutputWindowPane output, string dirname, string filename)
{
if(filename.Length == 0)
return false;
string token = FormatToken("history", filename);
if (!LockOp(token))
return false;
if (g_p4wininstalled && !Singleton<Config>.Instance.preferVisualClient)
return AsyncProcess.Schedule(output, "p4win.exe", GetUserInfoStringFull(true, dirname) + " -H \"" + filename + "\"", dirname, new AsyncProcess.OnDone(UnlockOp), token, 0);
if(g_p4vinstalled)
return AsyncProcess.Schedule(output, "p4v.exe", " -win 0 " + GetUserInfoStringFull(true, dirname) + " -cmd \"history " + filename + "\"", dirname, new AsyncProcess.OnDone(UnlockOp), token, 0);
return NotifyUser("could not find p4win.exe/p4v.exe installed in perforce directory");
}
public static bool P4WinShowFile(OutputWindowPane output, string filename)
{
if(filename.Length == 0)
return false;
if (g_p4wininstalled && !Singleton<Config>.Instance.preferVisualClient)
{
return AsyncProcess.Schedule(output, "p4win.exe", GetUserInfoStringFull(true, Path.GetDirectoryName(filename)) + " -q -s \"" + filename + "\"", Path.GetDirectoryName(filename), null, null, 0);
}
if (g_p4vinstalled)
{
return AsyncProcess.Schedule(output, "p4v.exe", " -win 0 " + GetUserInfoStringFull(true, Path.GetDirectoryName(filename)) + " -cmd \"open " + filename + "\"", Path.GetDirectoryName(filename), null, null, 0);
}
return NotifyUser("could not find p4win.exe or p4v.exe installed in perforce directory");
}
private static string GetUserInfoString()
{
return GetUserInfoStringFull(false, "");
}
private static string GetUserInfoStringFull(bool lookup, string dir)
{
// NOTE: This to allow the user to have a P4CONFIG variable and connect to multiple perforce servers seamlessly.
if(Singleton<Config>.Instance.useSystemEnv)
{
if(lookup)
{
try
{
string output = Process.Execute("p4", dir, "-s -L \"{0}\" info", dir);
Regex userpattern = new Regex(@"User name: (?<user>.*)$", RegexOptions.Compiled | RegexOptions.Multiline);
Regex portpattern = new Regex(@"Server address: (?<port>.*)$", RegexOptions.Compiled | RegexOptions.Multiline);
Regex clientpattern = new Regex(@"Client name: (?<client>.*)$", RegexOptions.Compiled | RegexOptions.Multiline);
Match usermatch = userpattern.Match(output);
Match portmatch = portpattern.Match(output);
Match clientmatch = clientpattern.Match(output);
string port = portmatch.Groups["port"].Value.Trim();
string username = usermatch.Groups["user"].Value.Trim();
string client = clientmatch.Groups["client"].Value.Trim();
string ret = string.Format(" -p {0} -u {1} -c {2} ", port, username, client);
Log.Debug("GetUserInfoStringFull : " + ret);
return ret;
}
catch(Process.Error e)
{
Log.Error("Failed to execute info string discovery: {0}", e.info);
}
}
return "";
}
string arguments = "";
arguments += " -p " + Singleton<Config>.Instance.port;
arguments += " -u " + Singleton<Config>.Instance.username;
arguments += " -c " + Singleton<Config>.Instance.client;
arguments += " ";
Log.Debug("GetUserInfoStringFull : " + arguments);
return arguments;
}
public static bool TimeLapseView(OutputWindowPane output, string dirname, string filename)
{
if(!g_p4vinstalled)
return NotifyUser("could not find p4v exe installed in perforce directory");
// NOTE: The timelapse view uses the undocumented feature for bringing up the timelapse view. The username, client and port needs to be given in a certain order to work (straight from perforce).
string arguments = " -win 0 ";
arguments += GetUserInfoStringFull(true, dirname);
arguments += " -cmd \"annotate -i " + filename + "\"";
string token = FormatToken("timelapse", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4v.exe", arguments, dirname, new AsyncProcess.OnDone(UnlockOp), token, 0);
}
public static bool RevisionGraph(OutputWindowPane output, string dirname, string filename)
{
if (!g_p4vinstalled)
return NotifyUser("could not find p4v exe installed in perforce directory");
// NOTE: The timelapse view uses the undocumented feature for bringing up the timelapse view. The username, client and port needs to be given in a certain order to work (straight from perforce).
string arguments = " -win 0 ";
arguments += GetUserInfoStringFull(true, dirname);
arguments += " -cmd \"tree -i " + filename + "\"";
string token = FormatToken("revisiongraph", filename);
if (!LockOp(token))
return false;
return AsyncProcess.Schedule(output, "p4v.exe", arguments, dirname, new AsyncProcess.OnDone(UnlockOp), token, 0);
}
static public string ResolveFileNameWithCase(string fullpath)
{
string dirname = Path.GetDirectoryName(fullpath);
string basename = Path.GetFileName(fullpath).ToLower();
DirectoryInfo info = new DirectoryInfo(dirname);
FileInfo[] files = info.GetFiles();
foreach(FileInfo file in files)
{
if(file.Name.ToLower() == basename)
{
return file.FullName;
}
}
// Should never happen...
return fullpath;
}
public static string GetRegistryValue(string key, string value, bool global)
{
Microsoft.Win32.RegistryKey hklm = Microsoft.Win32.Registry.LocalMachine;
if(!global)
hklm = Microsoft.Win32.Registry.CurrentUser;
hklm = hklm.OpenSubKey(key);
if(null == hklm)
{
Log.Debug("Could not find registry key " + key);
return null;
}
Object regValue = hklm.GetValue(value);
if(null == regValue)
{
Log.Debug("Could not find registry value " + value + " in " + key);
return null;
}
return (string)regValue;
}
public static void CheckInstalledFiles()
{
Log.Debug("Looking for installed files...");
g_p4installed = false;
g_p4wininstalled = false;
g_p4vinstalled = false;
g_p4customdiff = false;
string p4diff = null;
// Let's try the default 64 bit installation. Since we are in a 32 bit exe this is tricky
// to ask the registry...
string installRoot = null;
string candidate = @"C:\Program Files\Perforce";
if (System.IO.Directory.Exists(candidate))
{
installRoot = candidate;
}
if( null == installRoot )
{
installRoot = GetRegistryValue("SOFTWARE\\Perforce\\Environment", "P4INSTROOT", true); ;
if (null == installRoot)
{
// Perhaps it's an older installation?
// http://code.google.com/p/niftyplugins/issues/detail?id=47&can=1&q=path
installRoot = GetRegistryValue("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths", "p4.exe", true);
}
}
if(null != installRoot)
{
Log.Info("Found perforce installation at {0}", installRoot);
if(System.IO.File.Exists(Path.Combine(installRoot, "p4.exe")))
{
g_p4installed = true;
Log.Info("Found p4.exe");
}
if(System.IO.File.Exists(Path.Combine(installRoot, "p4win.exe")))
{
g_p4wininstalled = true;
Log.Info("Found p4win.exe");
}
if(System.IO.File.Exists(Path.Combine(installRoot, "p4v.exe")))
{
g_p4vinstalled = true;
Log.Info("Found p4v.exe");
}
p4diff = GetRegistryValue("SOFTWARE\\Perforce\\Environment", "P4DIFF", true);
if(null != p4diff && p4diff.Length > 0)
{
Log.Info("Found p4 custom diff");
g_p4customdiff = true;
}
p4diff = GetRegistryValue("SOFTWARE\\Perforce\\Environment", "P4DIFF", false);
if(null != p4diff && p4diff.Length > 0)
{
Log.Info("Found p4 custom diff");
g_p4customdiff = true;
}
}
else
{
// Let's try to find the executables through the path variable instead.
if(null != Help.FindFileInPath("p4.exe"))
{
g_p4installed = true;
Log.Info("Found p4 in path");
}
if(null != Help.FindFileInPath("p4win.exe"))
{
g_p4wininstalled = true;
Log.Info("Found p4win in path");
}
if(null != Help.FindFileInPath("p4v.exe"))
{
g_p4vinstalled = true;
Log.Info("Found p4v in path");
}
Log.Warning("Could not find any peforce installation in the registry!!!");
p4diff = System.Environment.GetEnvironmentVariable("P4DIFF");
if(null != p4diff)
{
Log.Info("Found p4 custom diff");
g_p4customdiff = true;
}
}
}
private static bool NotifyUser(string message)
{
System.Windows.Forms.MessageBox.Show(message, "NiftyPerforce Notice!", System.Windows.Forms.MessageBoxButtons.OK);
return false;
}
public static string RemapToMain(string filename, string mainline)
{
Log.Debug("RemapToMain : {0} {1}", filename, mainline);
if (mainline.Length == 0)
{
Log.Error( "Tried to find the mainline version of {0}, but the mainline path spec is empty", filename);
throw new Exception( string.Format("Tried to find the mainline version of {0}, but the mainline path spec is empty", filename) );
}
string result = Process.Execute("p4.exe", Path.GetDirectoryName(filename), GetUserInfoString() + "integrated \"" + filename + "\"");
Regex pattern = new Regex(@"//(.*)#\d+ - .*//([^#]+)#\d+", RegexOptions.Compiled);
string mainline_ = mainline.ToLower();
foreach( Match m in pattern.Matches(result) )
{
string candidate = "//" + m.Groups[2].ToString().ToLower();
if( candidate.StartsWith(mainline_) )
return candidate;
}
return filename;
}
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
#if !NETCF_1_0
using System.Collections;
#endif
using Ctrip.Log4.Core;
namespace Ctrip.Log4.Util
{
/// <summary>
/// Implementation of Stack for the <see cref="Ctrip.ThreadContext"/>
/// </summary>
/// <remarks>
/// <para>
/// Implementation of Stack for the <see cref="Ctrip.ThreadContext"/>
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
public sealed class ThreadContextStack : IFixingRequired
{
#region Private Static Fields
/// <summary>
/// The stack store.
/// </summary>
private Stack m_stack = new Stack();
#endregion Private Static Fields
#region Public Instance Constructors
/// <summary>
/// Internal constructor
/// </summary>
/// <remarks>
/// <para>
/// Initializes a new instance of the <see cref="ThreadContextStack" /> class.
/// </para>
/// </remarks>
internal ThreadContextStack()
{
}
#endregion Public Instance Constructors
#region Public Properties
/// <summary>
/// The number of messages in the stack
/// </summary>
/// <value>
/// The current number of messages in the stack
/// </value>
/// <remarks>
/// <para>
/// The current number of messages in the stack. That is
/// the number of times <see cref="Push"/> has been called
/// minus the number of times <see cref="Pop"/> has been called.
/// </para>
/// </remarks>
public int Count
{
get { return m_stack.Count; }
}
#endregion // Public Properties
#region Public Methods
/// <summary>
/// Clears all the contextual information held in this stack.
/// </summary>
/// <remarks>
/// <para>
/// Clears all the contextual information held in this stack.
/// Only call this if you think that this tread is being reused after
/// a previous call execution which may not have completed correctly.
/// You do not need to use this method if you always guarantee to call
/// the <see cref="IDisposable.Dispose"/> method of the <see cref="IDisposable"/>
/// returned from <see cref="Push"/> even in exceptional circumstances,
/// for example by using the <c>using(Ctrip.ThreadContext.Stacks["NDC"].Push("Stack_Message"))</c>
/// syntax.
/// </para>
/// </remarks>
public void Clear()
{
m_stack.Clear();
}
/// <summary>
/// Removes the top context from this stack.
/// </summary>
/// <returns>The message in the context that was removed from the top of this stack.</returns>
/// <remarks>
/// <para>
/// Remove the top context from this stack, and return
/// it to the caller. If this stack is empty then an
/// empty string (not <see langword="null"/>) is returned.
/// </para>
/// </remarks>
public string Pop()
{
Stack stack = m_stack;
if (stack.Count > 0)
{
return ((StackFrame)(stack.Pop())).Message;
}
return "";
}
/// <summary>
/// Pushes a new context message into this stack.
/// </summary>
/// <param name="message">The new context message.</param>
/// <returns>
/// An <see cref="IDisposable"/> that can be used to clean up the context stack.
/// </returns>
/// <remarks>
/// <para>
/// Pushes a new context onto this stack. An <see cref="IDisposable"/>
/// is returned that can be used to clean up this stack. This
/// can be easily combined with the <c>using</c> keyword to scope the
/// context.
/// </para>
/// </remarks>
/// <example>Simple example of using the <c>Push</c> method with the <c>using</c> keyword.
/// <code lang="C#">
/// using(Ctrip.ThreadContext.Stacks["NDC"].Push("Stack_Message"))
/// {
/// log.Warn("This should have an ThreadContext Stack message");
/// }
/// </code>
/// </example>
public IDisposable Push(string message)
{
Stack stack = m_stack;
stack.Push(new StackFrame(message, (stack.Count>0) ? (StackFrame)stack.Peek() : null));
return new AutoPopStackFrame(stack, stack.Count - 1);
}
#endregion Public Methods
#region Internal Methods
/// <summary>
/// Gets the current context information for this stack.
/// </summary>
/// <returns>The current context information.</returns>
internal string GetFullMessage()
{
Stack stack = m_stack;
if (stack.Count > 0)
{
return ((StackFrame)(stack.Peek())).FullMessage;
}
return null;
}
/// <summary>
/// Gets and sets the internal stack used by this <see cref="ThreadContextStack"/>
/// </summary>
/// <value>The internal storage stack</value>
/// <remarks>
/// <para>
/// This property is provided only to support backward compatability
/// of the <see cref="NDC"/>. Tytpically the internal stack should not
/// be modified.
/// </para>
/// </remarks>
internal Stack InternalStack
{
get { return m_stack; }
set { m_stack = value; }
}
#endregion Internal Methods
/// <summary>
/// Gets the current context information for this stack.
/// </summary>
/// <returns>Gets the current context information</returns>
/// <remarks>
/// <para>
/// Gets the current context information for this stack.
/// </para>
/// </remarks>
public override string ToString()
{
return GetFullMessage();
}
/// <summary>
/// Get a portable version of this object
/// </summary>
/// <returns>the portable instance of this object</returns>
/// <remarks>
/// <para>
/// Get a cross thread portable version of this object
/// </para>
/// </remarks>
object IFixingRequired.GetFixedObject()
{
return GetFullMessage();
}
/// <summary>
/// Inner class used to represent a single context frame in the stack.
/// </summary>
/// <remarks>
/// <para>
/// Inner class used to represent a single context frame in the stack.
/// </para>
/// </remarks>
private sealed class StackFrame
{
#region Private Instance Fields
private readonly string m_message;
private readonly StackFrame m_parent;
private string m_fullMessage = null;
#endregion
#region Internal Instance Constructors
/// <summary>
/// Constructor
/// </summary>
/// <param name="message">The message for this context.</param>
/// <param name="parent">The parent context in the chain.</param>
/// <remarks>
/// <para>
/// Initializes a new instance of the <see cref="StackFrame" /> class
/// with the specified message and parent context.
/// </para>
/// </remarks>
internal StackFrame(string message, StackFrame parent)
{
m_message = message;
m_parent = parent;
if (parent == null)
{
m_fullMessage = message;
}
}
#endregion Internal Instance Constructors
#region Internal Instance Properties
/// <summary>
/// Get the message.
/// </summary>
/// <value>The message.</value>
/// <remarks>
/// <para>
/// Get the message.
/// </para>
/// </remarks>
internal string Message
{
get { return m_message; }
}
/// <summary>
/// Gets the full text of the context down to the root level.
/// </summary>
/// <value>
/// The full text of the context down to the root level.
/// </value>
/// <remarks>
/// <para>
/// Gets the full text of the context down to the root level.
/// </para>
/// </remarks>
internal string FullMessage
{
get
{
if (m_fullMessage == null && m_parent != null)
{
m_fullMessage = string.Concat(m_parent.FullMessage, " ", m_message);
}
return m_fullMessage;
}
}
#endregion Internal Instance Properties
}
/// <summary>
/// Struct returned from the <see cref="ThreadContextStack.Push"/> method.
/// </summary>
/// <remarks>
/// <para>
/// This struct implements the <see cref="IDisposable"/> and is designed to be used
/// with the <see langword="using"/> pattern to remove the stack frame at the end of the scope.
/// </para>
/// </remarks>
private struct AutoPopStackFrame : IDisposable
{
#region Private Instance Fields
/// <summary>
/// The ThreadContextStack internal stack
/// </summary>
private Stack m_frameStack;
/// <summary>
/// The depth to trim the stack to when this instance is disposed
/// </summary>
private int m_frameDepth;
#endregion Private Instance Fields
#region Internal Instance Constructors
/// <summary>
/// Constructor
/// </summary>
/// <param name="frameStack">The internal stack used by the ThreadContextStack.</param>
/// <param name="frameDepth">The depth to return the stack to when this object is disposed.</param>
/// <remarks>
/// <para>
/// Initializes a new instance of the <see cref="AutoPopStackFrame" /> class with
/// the specified stack and return depth.
/// </para>
/// </remarks>
internal AutoPopStackFrame(Stack frameStack, int frameDepth)
{
m_frameStack = frameStack;
m_frameDepth = frameDepth;
}
#endregion Internal Instance Constructors
#region Implementation of IDisposable
/// <summary>
/// Returns the stack to the correct depth.
/// </summary>
/// <remarks>
/// <para>
/// Returns the stack to the correct depth.
/// </para>
/// </remarks>
public void Dispose()
{
if (m_frameDepth >= 0 && m_frameStack != null)
{
while(m_frameStack.Count > m_frameDepth)
{
m_frameStack.Pop();
}
}
}
#endregion Implementation of IDisposable
}
#if NETCF_1_0
/// <summary>
/// Subclass of <see cref="System.Collections.Stack"/> to
/// provide missing methods.
/// </summary>
/// <remarks>
/// <para>
/// The Compact Framework version of the <see cref="System.Collections.Stack"/>
/// class is missing the <c>Clear</c> and <c>Clone</c> methods.
/// This subclass adds implementations of those missing methods.
/// </para>
/// </remarks>
public class Stack : System.Collections.Stack
{
/// <summary>
/// Clears the stack of all elements.
/// </summary>
/// <remarks>
/// <para>
/// Clears the stack of all elements.
/// </para>
/// </remarks>
public void Clear()
{
while(Count > 0)
{
Pop();
}
}
/// <summary>
/// Makes a shallow copy of the stack's elements.
/// </summary>
/// <returns>A new stack that has a shallow copy of the stack's elements.</returns>
/// <remarks>
/// <para>
/// Makes a shallow copy of the stack's elements.
/// </para>
/// </remarks>
public Stack Clone()
{
Stack res = new Stack();
object[] items = ToArray();
foreach(object item in items)
{
res.Push(item);
}
return res;
}
}
#endif
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System.Reflection;
using System.Collections.Generic;
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Data.Null
{
public class NullDataService : ISimulationDataService
{
private NullDataStore m_store;
public NullDataService()
{
m_store = new NullDataStore();
}
public void StoreObject(SceneObjectGroup obj, UUID regionUUID)
{
m_store.StoreObject(obj, regionUUID);
}
public void RemoveObject(UUID uuid, UUID regionUUID)
{
m_store.RemoveObject(uuid, regionUUID);
}
public void StorePrimInventory(UUID primID, ICollection<TaskInventoryItem> items)
{
m_store.StorePrimInventory(primID, items);
}
public List<SceneObjectGroup> LoadObjects(UUID regionUUID)
{
return m_store.LoadObjects(regionUUID);
}
public void StoreTerrain(double[,] terrain, UUID regionID)
{
m_store.StoreTerrain(terrain, regionID);
}
public void StoreTerrain(TerrainData terrain, UUID regionID)
{
m_store.StoreTerrain(terrain, regionID);
}
public double[,] LoadTerrain(UUID regionID)
{
return m_store.LoadTerrain(regionID);
}
public TerrainData LoadTerrain(UUID regionID, int pSizeX, int pSizeY, int pSizeZ)
{
return m_store.LoadTerrain(regionID, pSizeX, pSizeY, pSizeZ);
}
public void StoreLandObject(ILandObject Parcel)
{
m_store.StoreLandObject(Parcel);
}
public void RemoveLandObject(UUID globalID)
{
m_store.RemoveLandObject(globalID);
}
public List<LandData> LoadLandObjects(UUID regionUUID)
{
return m_store.LoadLandObjects(regionUUID);
}
public void StoreRegionSettings(RegionSettings rs)
{
m_store.StoreRegionSettings(rs);
}
public RegionSettings LoadRegionSettings(UUID regionUUID)
{
return m_store.LoadRegionSettings(regionUUID);
}
public RegionLightShareData LoadRegionWindlightSettings(UUID regionUUID)
{
return m_store.LoadRegionWindlightSettings(regionUUID);
}
public void RemoveRegionWindlightSettings(UUID regionID)
{
}
public void StoreRegionWindlightSettings(RegionLightShareData wl)
{
m_store.StoreRegionWindlightSettings(wl);
}
public string LoadRegionEnvironmentSettings(UUID regionUUID)
{
return m_store.LoadRegionEnvironmentSettings(regionUUID);
}
public void StoreRegionEnvironmentSettings(UUID regionUUID, string settings)
{
m_store.StoreRegionEnvironmentSettings(regionUUID, settings);
}
public void RemoveRegionEnvironmentSettings(UUID regionUUID)
{
m_store.RemoveRegionEnvironmentSettings(regionUUID);
}
public void SaveExtra(UUID regionID, string name, string value)
{
}
public void RemoveExtra(UUID regionID, string name)
{
}
public Dictionary<string, string> GetExtra(UUID regionID)
{
return null;
}
}
/// <summary>
/// Mock region data plugin. This obeys the api contract for persistence but stores everything in memory, so that
/// tests can check correct persistence.
/// </summary>
public class NullDataStore : ISimulationDataStore
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
protected Dictionary<UUID, RegionSettings> m_regionSettings = new Dictionary<UUID, RegionSettings>();
protected Dictionary<UUID, SceneObjectPart> m_sceneObjectParts = new Dictionary<UUID, SceneObjectPart>();
protected Dictionary<UUID, ICollection<TaskInventoryItem>> m_primItems
= new Dictionary<UUID, ICollection<TaskInventoryItem>>();
protected Dictionary<UUID, TerrainData> m_terrains = new Dictionary<UUID, TerrainData>();
protected Dictionary<UUID, LandData> m_landData = new Dictionary<UUID, LandData>();
public void Initialise(string dbfile)
{
return;
}
public void Dispose()
{
}
public void StoreRegionSettings(RegionSettings rs)
{
m_regionSettings[rs.RegionUUID] = rs;
}
public RegionLightShareData LoadRegionWindlightSettings(UUID regionUUID)
{
//This connector doesn't support the windlight module yet
//Return default LL windlight settings
return new RegionLightShareData();
}
public void RemoveRegionWindlightSettings(UUID regionID)
{
}
public void StoreRegionWindlightSettings(RegionLightShareData wl)
{
//This connector doesn't support the windlight module yet
}
#region Environment Settings
public string LoadRegionEnvironmentSettings(UUID regionUUID)
{
//This connector doesn't support the Environment module yet
return string.Empty;
}
public void StoreRegionEnvironmentSettings(UUID regionUUID, string settings)
{
//This connector doesn't support the Environment module yet
}
public void RemoveRegionEnvironmentSettings(UUID regionUUID)
{
//This connector doesn't support the Environment module yet
}
#endregion
public RegionSettings LoadRegionSettings(UUID regionUUID)
{
RegionSettings rs = null;
m_regionSettings.TryGetValue(regionUUID, out rs);
if (rs == null)
rs = new RegionSettings();
return rs;
}
public void StoreObject(SceneObjectGroup obj, UUID regionUUID)
{
// We can't simply store groups here because on delinking, OpenSim will not update the original group
// directly. Rather, the newly delinked parts will be updated to be in their own scene object group
// Therefore, we need to store parts rather than groups.
foreach (SceneObjectPart prim in obj.Parts)
{
// m_log.DebugFormat(
// "[MOCK REGION DATA PLUGIN]: Storing part {0} {1} in object {2} {3} in region {4}",
// prim.Name, prim.UUID, obj.Name, obj.UUID, regionUUID);
m_sceneObjectParts[prim.UUID] = prim;
}
}
public void RemoveObject(UUID obj, UUID regionUUID)
{
// All parts belonging to the object with the uuid are removed.
List<SceneObjectPart> parts = new List<SceneObjectPart>(m_sceneObjectParts.Values);
foreach (SceneObjectPart part in parts)
{
if (part.ParentGroup.UUID == obj)
{
// m_log.DebugFormat(
// "[MOCK REGION DATA PLUGIN]: Removing part {0} {1} as part of object {2} from {3}",
// part.Name, part.UUID, obj, regionUUID);
m_sceneObjectParts.Remove(part.UUID);
}
}
}
public void StorePrimInventory(UUID primID, ICollection<TaskInventoryItem> items)
{
m_primItems[primID] = items;
}
public List<SceneObjectGroup> LoadObjects(UUID regionUUID)
{
Dictionary<UUID, SceneObjectGroup> objects = new Dictionary<UUID, SceneObjectGroup>();
// Create all of the SOGs from the root prims first
foreach (SceneObjectPart prim in m_sceneObjectParts.Values)
{
if (prim.IsRoot)
{
// m_log.DebugFormat(
// "[MOCK REGION DATA PLUGIN]: Loading root part {0} {1} in {2}", prim.Name, prim.UUID, regionUUID);
objects[prim.UUID] = new SceneObjectGroup(prim);
}
}
// Add all of the children objects to the SOGs
foreach (SceneObjectPart prim in m_sceneObjectParts.Values)
{
SceneObjectGroup sog;
if (prim.UUID != prim.ParentUUID)
{
if (objects.TryGetValue(prim.ParentUUID, out sog))
{
int originalLinkNum = prim.LinkNum;
sog.AddPart(prim);
// SceneObjectGroup.AddPart() tries to be smart and automatically set the LinkNum.
// We override that here
if (originalLinkNum != 0)
prim.LinkNum = originalLinkNum;
}
else
{
// m_log.WarnFormat(
// "[MOCK REGION DATA PLUGIN]: Database contains an orphan child prim {0} {1} in region {2} pointing to missing parent {3}. This prim will not be loaded.",
// prim.Name, prim.UUID, regionUUID, prim.ParentUUID);
}
}
}
// TODO: Load items. This is assymetric - we store items as a separate method but don't retrieve them that
// way!
return new List<SceneObjectGroup>(objects.Values);
}
public void StoreTerrain(TerrainData ter, UUID regionID)
{
m_terrains[regionID] = ter;
}
public void StoreTerrain(double[,] ter, UUID regionID)
{
m_terrains[regionID] = new HeightmapTerrainData(ter);
}
public TerrainData LoadTerrain(UUID regionID, int pSizeX, int pSizeY, int pSizeZ)
{
if (m_terrains.ContainsKey(regionID))
return m_terrains[regionID];
else
return null;
}
public double[,] LoadTerrain(UUID regionID)
{
if (m_terrains.ContainsKey(regionID))
return m_terrains[regionID].GetDoubles();
else
return null;
}
public void RemoveLandObject(UUID globalID)
{
if (m_landData.ContainsKey(globalID))
m_landData.Remove(globalID);
}
public void StoreLandObject(ILandObject land)
{
m_landData[land.LandData.GlobalID] = land.LandData;
}
public List<LandData> LoadLandObjects(UUID regionUUID)
{
return new List<LandData>(m_landData.Values);
}
public void Shutdown()
{
}
public void SaveExtra(UUID regionID, string name, string value)
{
}
public void RemoveExtra(UUID regionID, string name)
{
}
public Dictionary<string, string> GetExtra(UUID regionID)
{
return null;
}
}
}
| |
using System;
using System.Globalization;
using System.Linq;
using System.Runtime.Versioning;
using NuGet.Resources;
namespace NuGet
{
public class PackageManager : IPackageManager
{
private ILogger _logger;
public event EventHandler<PackageOperationEventArgs> PackageInstalling;
public event EventHandler<PackageOperationEventArgs> PackageInstalled;
public event EventHandler<PackageOperationEventArgs> PackageUninstalling;
public event EventHandler<PackageOperationEventArgs> PackageUninstalled;
public PackageManager(IPackageRepository sourceRepository, string path)
: this(sourceRepository, new DefaultPackagePathResolver(path), new PhysicalFileSystem(path))
{
}
public PackageManager(IPackageRepository sourceRepository, IPackagePathResolver pathResolver, IFileSystem fileSystem) :
this(sourceRepository, pathResolver, fileSystem, new SharedPackageRepository(pathResolver, fileSystem, fileSystem))
{
}
public PackageManager(IPackageRepository sourceRepository, IPackagePathResolver pathResolver, IFileSystem fileSystem, ISharedPackageRepository localRepository)
{
if (sourceRepository == null)
{
throw new ArgumentNullException("sourceRepository");
}
if (pathResolver == null)
{
throw new ArgumentNullException("pathResolver");
}
if (fileSystem == null)
{
throw new ArgumentNullException("fileSystem");
}
if (localRepository == null)
{
throw new ArgumentNullException("localRepository");
}
SourceRepository = sourceRepository;
PathResolver = pathResolver;
FileSystem = fileSystem;
LocalRepository = localRepository;
DependencyVersion = DependencyVersion.Lowest;
CheckDowngrade = true;
}
public IFileSystem FileSystem
{
get;
set;
}
public IPackageRepository SourceRepository
{
get;
private set;
}
public ISharedPackageRepository LocalRepository
{
get;
private set;
}
public IPackagePathResolver PathResolver
{
get;
private set;
}
public ILogger Logger
{
get
{
return _logger ?? NullLogger.Instance;
}
set
{
_logger = value;
}
}
public DependencyVersion DependencyVersion
{
get;
set;
}
public void Execute(PackageOperation operation)
{
bool packageExists = LocalRepository.Exists(operation.Package);
if (operation.Action == PackageAction.Install)
{
// If the package is already installed, then skip it
if (packageExists)
{
Logger.Log(MessageLevel.Info, NuGetResources.Log_PackageAlreadyInstalled, operation.Package.GetFullName());
}
else
{
ExecuteInstall(operation.Package);
}
}
else
{
if (packageExists)
{
ExecuteUninstall(operation.Package);
}
}
}
protected void ExecuteInstall(IPackage package)
{
string packageFullName = package.GetFullName();
Logger.Log(MessageLevel.Info, NuGetResources.Log_BeginInstallPackage, packageFullName);
PackageOperationEventArgs args = CreateOperation(package);
OnInstalling(args);
if (args.Cancel)
{
return;
}
OnExpandFiles(args);
LocalRepository.AddPackage(package);
Logger.Log(MessageLevel.Info, NuGetResources.Log_PackageInstalledSuccessfully, packageFullName);
OnInstalled(args);
}
private void ExpandFiles(IPackage package)
{
var batchProcessor = FileSystem as IBatchProcessor<string>;
try
{
var files = package.GetFiles().ToList();
if (batchProcessor != null)
{
// Notify the batch processor that the files are being added. This is to allow source controlled file systems
// to manage previously uninstalled files.
batchProcessor.BeginProcessing(files.Select(p => p.Path), PackageAction.Install);
}
string packageDirectory = PathResolver.GetPackageDirectory(package);
// Add files
FileSystem.AddFiles(files, packageDirectory);
// If this is a Satellite Package, then copy the satellite files into the related runtime package folder too
IPackage runtimePackage;
if (PackageHelper.IsSatellitePackage(package, LocalRepository, targetFramework: null, runtimePackage: out runtimePackage))
{
var satelliteFiles = package.GetSatelliteFiles();
var runtimePath = PathResolver.GetPackageDirectory(runtimePackage);
FileSystem.AddFiles(satelliteFiles, runtimePath);
}
}
finally
{
if (batchProcessor != null)
{
batchProcessor.EndProcessing();
}
}
}
protected virtual void ExecuteUninstall(IPackage package)
{
string packageFullName = package.GetFullName();
Logger.Log(MessageLevel.Info, NuGetResources.Log_BeginUninstallPackage, packageFullName);
PackageOperationEventArgs args = CreateOperation(package);
OnUninstalling(args);
if (args.Cancel)
{
return;
}
OnRemoveFiles(args);
LocalRepository.RemovePackage(package);
Logger.Log(MessageLevel.Info, NuGetResources.Log_SuccessfullyUninstalledPackage, packageFullName);
OnUninstalled(args);
}
private void RemoveFiles(IPackage package)
{
string packageDirectory = PathResolver.GetPackageDirectory(package);
// If this is a Satellite Package, then remove the files from the related runtime package folder too
IPackage runtimePackage;
if (PackageHelper.IsSatellitePackage(package, LocalRepository, targetFramework: null, runtimePackage: out runtimePackage))
{
var satelliteFiles = package.GetSatelliteFiles();
var runtimePath = PathResolver.GetPackageDirectory(runtimePackage);
FileSystem.DeleteFiles(satelliteFiles, runtimePath);
}
// Remove package files
// IMPORTANT: This has to be done AFTER removing satellite files from runtime package,
// because starting from 2.1, we read satellite files directly from package files, instead of .nupkg
FileSystem.DeleteFiles(package.GetFiles(), packageDirectory);
}
protected virtual void OnInstalling(PackageOperationEventArgs e)
{
if (PackageInstalling != null)
{
PackageInstalling(this, e);
}
}
protected virtual void OnExpandFiles(PackageOperationEventArgs e)
{
ExpandFiles(e.Package);
}
protected virtual void OnInstalled(PackageOperationEventArgs e)
{
if (PackageInstalled != null)
{
PackageInstalled(this, e);
}
}
protected virtual void OnUninstalling(PackageOperationEventArgs e)
{
if (PackageUninstalling != null)
{
PackageUninstalling(this, e);
}
}
protected virtual void OnRemoveFiles(PackageOperationEventArgs e)
{
RemoveFiles(e.Package);
}
protected virtual void OnUninstalled(PackageOperationEventArgs e)
{
if (PackageUninstalled != null)
{
PackageUninstalled(this, e);
}
}
public PackageOperationEventArgs CreateOperation(IPackage package)
{
return new PackageOperationEventArgs(package, FileSystem, PathResolver.GetInstallPath(package));
}
public bool CheckDowngrade { get; set; }
/// <summary>
/// Check to see if this package applies to a project based on 2 criteria:
/// 1. The package has project content (i.e. content that can be applied to a project lib or content files)
/// 2. The package is referenced by any other project
/// 3. The package has at least one dependecy
///
/// This logic will probably fail in one edge case. If there is a meta package that applies to a project
/// that ended up not being installed in any of the projects and it only exists at solution level.
/// If this happens, then we think that the following operation applies to the solution instead of showing an error.
/// To solve that edge case we'd have to walk the graph to find out what the package applies to.
///
/// Technically, the third condition is not totally accurate because a solution-level package can depend on another
/// solution-level package. However, doing that check here is expensive and we haven't seen such a package.
/// This condition here is more geared towards guarding against metadata packages, i.e. we shouldn't treat metadata packages
/// as solution-level ones.
/// </summary>
public bool IsProjectLevel(IPackage package)
{
return package.HasProjectContent() ||
package.DependencySets.SelectMany(p => p.Dependencies).Any() ||
LocalRepository.IsReferenced(package.Id, package.Version);
}
private bool _bindingRedirectEnabled = true;
public bool BindingRedirectEnabled
{
get { return _bindingRedirectEnabled; }
set { _bindingRedirectEnabled = value; }
}
public virtual void AddBindingRedirects(IProjectManager projectManager)
{
// no-op
}
public virtual IPackage LocatePackageToUninstall(IProjectManager projectManager, string id, SemanticVersion version)
{
var package = LocalRepository.FindPackagesById(id).SingleOrDefault();
if (package == null)
{
throw new InvalidOperationException();
}
return package;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Xml;
using System.Xml.Xsl.Qil;
using System.Xml.Xsl.XPath;
namespace System.Xml.Xsl.Xslt
{
using XPathParser = XPathParser<QilNode>;
using XPathNodeType = System.Xml.XPath.XPathNodeType;
internal class XPathPatternParser
{
public interface IPatternBuilder : IXPathBuilder<QilNode>
{
IXPathBuilder<QilNode> GetPredicateBuilder(QilNode context);
}
private XPathScanner _scanner;
private IPatternBuilder _ptrnBuilder;
private XPathParser _predicateParser = new XPathParser();
public QilNode Parse(XPathScanner scanner, IPatternBuilder ptrnBuilder)
{
Debug.Assert(_scanner == null && _ptrnBuilder == null);
Debug.Assert(scanner != null && ptrnBuilder != null);
QilNode result = null;
ptrnBuilder.StartBuild();
try
{
_scanner = scanner;
_ptrnBuilder = ptrnBuilder;
result = this.ParsePattern();
_scanner.CheckToken(LexKind.Eof);
}
finally
{
result = ptrnBuilder.EndBuild(result);
#if DEBUG
_ptrnBuilder = null;
_scanner = null;
#endif
}
return result;
}
/*
* Pattern ::= LocationPathPattern ('|' LocationPathPattern)*
*/
private QilNode ParsePattern()
{
QilNode opnd = ParseLocationPathPattern();
while (_scanner.Kind == LexKind.Union)
{
_scanner.NextLex();
opnd = _ptrnBuilder.Operator(XPathOperator.Union, opnd, ParseLocationPathPattern());
}
return opnd;
}
/*
* LocationPathPattern ::= '/' RelativePathPattern? | '//'? RelativePathPattern | IdKeyPattern (('/' | '//') RelativePathPattern)?
*/
private QilNode ParseLocationPathPattern()
{
QilNode opnd;
switch (_scanner.Kind)
{
case LexKind.Slash:
_scanner.NextLex();
opnd = _ptrnBuilder.Axis(XPathAxis.Root, XPathNodeType.All, null, null);
if (XPathParser.IsStep(_scanner.Kind))
{
opnd = _ptrnBuilder.JoinStep(opnd, ParseRelativePathPattern());
}
return opnd;
case LexKind.SlashSlash:
_scanner.NextLex();
return _ptrnBuilder.JoinStep(
_ptrnBuilder.Axis(XPathAxis.Root, XPathNodeType.All, null, null),
_ptrnBuilder.JoinStep(
_ptrnBuilder.Axis(XPathAxis.DescendantOrSelf, XPathNodeType.All, null, null),
ParseRelativePathPattern()
)
);
case LexKind.Name:
if (_scanner.CanBeFunction && _scanner.Prefix.Length == 0 && (_scanner.Name == "id" || _scanner.Name == "key"))
{
opnd = ParseIdKeyPattern();
switch (_scanner.Kind)
{
case LexKind.Slash:
_scanner.NextLex();
opnd = _ptrnBuilder.JoinStep(opnd, ParseRelativePathPattern());
break;
case LexKind.SlashSlash:
_scanner.NextLex();
opnd = _ptrnBuilder.JoinStep(opnd,
_ptrnBuilder.JoinStep(
_ptrnBuilder.Axis(XPathAxis.DescendantOrSelf, XPathNodeType.All, null, null),
ParseRelativePathPattern()
)
);
break;
}
return opnd;
}
break;
}
opnd = ParseRelativePathPattern();
return opnd;
}
/*
* IdKeyPattern ::= 'id' '(' Literal ')' | 'key' '(' Literal ',' Literal ')'
*/
private QilNode ParseIdKeyPattern()
{
Debug.Assert(_scanner.CanBeFunction);
Debug.Assert(_scanner.Prefix.Length == 0);
Debug.Assert(_scanner.Name == "id" || _scanner.Name == "key");
List<QilNode> args = new List<QilNode>(2);
if (_scanner.Name == "id")
{
_scanner.NextLex();
_scanner.PassToken(LexKind.LParens);
_scanner.CheckToken(LexKind.String);
args.Add(_ptrnBuilder.String(_scanner.StringValue));
_scanner.NextLex();
_scanner.PassToken(LexKind.RParens);
return _ptrnBuilder.Function("", "id", args);
}
else
{
_scanner.NextLex();
_scanner.PassToken(LexKind.LParens);
_scanner.CheckToken(LexKind.String);
args.Add(_ptrnBuilder.String(_scanner.StringValue));
_scanner.NextLex();
_scanner.PassToken(LexKind.Comma);
_scanner.CheckToken(LexKind.String);
args.Add(_ptrnBuilder.String(_scanner.StringValue));
_scanner.NextLex();
_scanner.PassToken(LexKind.RParens);
return _ptrnBuilder.Function("", "key", args);
}
}
/*
* RelativePathPattern ::= StepPattern (('/' | '//') StepPattern)*
*/
//Max depth to avoid StackOverflow
private const int MaxParseRelativePathDepth = 1024;
private int _parseRelativePath = 0;
private QilNode ParseRelativePathPattern()
{
if (++_parseRelativePath > MaxParseRelativePathDepth)
{
if (LocalAppContextSwitches.LimitXPathComplexity)
{
throw _scanner.CreateException(SR.Xslt_InputTooComplex);
}
}
QilNode opnd = ParseStepPattern();
if (_scanner.Kind == LexKind.Slash)
{
_scanner.NextLex();
opnd = _ptrnBuilder.JoinStep(opnd, ParseRelativePathPattern());
}
else if (_scanner.Kind == LexKind.SlashSlash)
{
_scanner.NextLex();
opnd = _ptrnBuilder.JoinStep(opnd,
_ptrnBuilder.JoinStep(
_ptrnBuilder.Axis(XPathAxis.DescendantOrSelf, XPathNodeType.All, null, null),
ParseRelativePathPattern()
)
);
}
--_parseRelativePath;
return opnd;
}
/*
* StepPattern ::= ChildOrAttributeAxisSpecifier NodeTest Predicate*
* ChildOrAttributeAxisSpecifier ::= @ ? | ('child' | 'attribute') '::'
*/
private QilNode ParseStepPattern()
{
QilNode opnd;
XPathAxis axis;
switch (_scanner.Kind)
{
case LexKind.Dot:
case LexKind.DotDot:
throw _scanner.CreateException(SR.XPath_InvalidAxisInPattern);
case LexKind.At:
axis = XPathAxis.Attribute;
_scanner.NextLex();
break;
case LexKind.Axis:
axis = _scanner.Axis;
if (axis != XPathAxis.Child && axis != XPathAxis.Attribute)
{
throw _scanner.CreateException(SR.XPath_InvalidAxisInPattern);
}
_scanner.NextLex(); // Skip '::'
_scanner.NextLex();
break;
case LexKind.Name:
case LexKind.Star:
// NodeTest must start with Name or '*'
axis = XPathAxis.Child;
break;
default:
throw _scanner.CreateException(SR.XPath_UnexpectedToken, _scanner.RawValue);
}
XPathNodeType nodeType;
string nodePrefix, nodeName;
XPathParser.InternalParseNodeTest(_scanner, axis, out nodeType, out nodePrefix, out nodeName);
opnd = _ptrnBuilder.Axis(axis, nodeType, nodePrefix, nodeName);
XPathPatternBuilder xpathPatternBuilder = _ptrnBuilder as XPathPatternBuilder;
if (xpathPatternBuilder != null)
{
//for XPathPatternBuilder, get all predicates and then build them
List<QilNode> predicates = new List<QilNode>();
while (_scanner.Kind == LexKind.LBracket)
{
predicates.Add(ParsePredicate(opnd));
}
if (predicates.Count > 0)
opnd = xpathPatternBuilder.BuildPredicates(opnd, predicates);
}
else
{
while (_scanner.Kind == LexKind.LBracket)
{
opnd = _ptrnBuilder.Predicate(opnd, ParsePredicate(opnd), /*reverseStep:*/false);
}
}
return opnd;
}
/*
* Predicate ::= '[' Expr ']'
*/
private QilNode ParsePredicate(QilNode context)
{
Debug.Assert(_scanner.Kind == LexKind.LBracket);
_scanner.NextLex();
QilNode result = _predicateParser.Parse(_scanner, _ptrnBuilder.GetPredicateBuilder(context), LexKind.RBracket);
Debug.Assert(_scanner.Kind == LexKind.RBracket);
_scanner.NextLex();
return result;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace GeneratorBase.MVC.Models
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames.
// If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames
// If still not found, try get the sample provided for a specific type and mediaType
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// Try create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
sampleObject = objectGenerator.GenerateObject(type);
}
return sampleObject;
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ActionDescriptor.ReturnType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
e.Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Documents.FixedPage.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Documents
{
sealed public partial class FixedPage : System.Windows.FrameworkElement, System.Windows.Markup.IAddChild, IFixedNavigate, System.Windows.Markup.IUriContext
{
#region Methods and constructors
protected override System.Windows.Size ArrangeOverride(System.Windows.Size arrangeSize)
{
return default(System.Windows.Size);
}
public FixedPage()
{
}
public static double GetBottom(System.Windows.UIElement element)
{
return default(double);
}
public static double GetLeft(System.Windows.UIElement element)
{
return default(double);
}
public static Uri GetNavigateUri(System.Windows.UIElement element)
{
return default(Uri);
}
public static double GetRight(System.Windows.UIElement element)
{
return default(double);
}
public static double GetTop(System.Windows.UIElement element)
{
return default(double);
}
protected override System.Windows.Media.Visual GetVisualChild(int index)
{
return default(System.Windows.Media.Visual);
}
protected override System.Windows.Size MeasureOverride(System.Windows.Size constraint)
{
return default(System.Windows.Size);
}
protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer()
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
protected override void OnPreviewMouseWheel(System.Windows.Input.MouseWheelEventArgs e)
{
}
protected override void OnRender(System.Windows.Media.DrawingContext dc)
{
}
protected override void OnVisualParentChanged(System.Windows.DependencyObject oldParent)
{
}
public static void SetBottom(System.Windows.UIElement element, double length)
{
}
public static void SetLeft(System.Windows.UIElement element, double length)
{
}
public static void SetNavigateUri(System.Windows.UIElement element, Uri uri)
{
}
public static void SetRight(System.Windows.UIElement element, double length)
{
}
public static void SetTop(System.Windows.UIElement element, double length)
{
}
System.Windows.UIElement System.Windows.Documents.IFixedNavigate.FindElementByID(string elementID, out System.Windows.Documents.FixedPage rootFixedPage)
{
rootFixedPage = default(System.Windows.Documents.FixedPage);
return default(System.Windows.UIElement);
}
void System.Windows.Documents.IFixedNavigate.NavigateAsync(string elementID)
{
}
void System.Windows.Markup.IAddChild.AddChild(Object value)
{
}
void System.Windows.Markup.IAddChild.AddText(string text)
{
}
#endregion
#region Properties and indexers
public System.Windows.Media.Brush Background
{
get
{
return default(System.Windows.Media.Brush);
}
set
{
}
}
public System.Windows.Rect BleedBox
{
get
{
return default(System.Windows.Rect);
}
set
{
}
}
public System.Windows.Controls.UIElementCollection Children
{
get
{
Contract.Ensures(Contract.Result<System.Windows.Controls.UIElementCollection>() != null);
return default(System.Windows.Controls.UIElementCollection);
}
}
public System.Windows.Rect ContentBox
{
get
{
return default(System.Windows.Rect);
}
set
{
}
}
internal protected override System.Collections.IEnumerator LogicalChildren
{
get
{
return default(System.Collections.IEnumerator);
}
}
public Object PrintTicket
{
get
{
return default(Object);
}
set
{
}
}
Uri System.Windows.Markup.IUriContext.BaseUri
{
get
{
return default(Uri);
}
set
{
}
}
protected override int VisualChildrenCount
{
get
{
return default(int);
}
}
#endregion
#region Fields
public readonly static System.Windows.DependencyProperty BackgroundProperty;
public readonly static System.Windows.DependencyProperty BleedBoxProperty;
public readonly static System.Windows.DependencyProperty BottomProperty;
public readonly static System.Windows.DependencyProperty ContentBoxProperty;
public readonly static System.Windows.DependencyProperty LeftProperty;
public readonly static System.Windows.DependencyProperty NavigateUriProperty;
public readonly static System.Windows.DependencyProperty PrintTicketProperty;
public readonly static System.Windows.DependencyProperty RightProperty;
public readonly static System.Windows.DependencyProperty TopProperty;
#endregion
}
}
| |
using Signum.Entities.Basics;
using System.Globalization;
using Signum.Entities.Help;
using System.Text.Json.Serialization;
namespace Signum.Engine.Help;
public abstract class BaseHelp
{
public abstract string? IsAllowed();
public void AssertAllowed()
{
string? error = IsAllowed();
if (error != null)
throw new UnauthorizedAccessException(EngineMessage.UnauthorizedAccessTo0Because1.NiceToString().FormatWith(this.GetType(), error));
}
public abstract override string ToString();
}
public class NamespaceHelp : BaseHelp
{
public readonly string Namespace;
public readonly string? Before;
public readonly string Title;
public readonly string? Description;
[JsonIgnore]
public readonly CultureInfo Culture;
[JsonIgnore]
public readonly NamespaceHelpEntity? DBEntity;
[JsonIgnore]
public readonly Type[] Types;
public NamespaceHelp(string @namespace, CultureInfo culture, NamespaceHelpEntity? entity, Type[] types)
{
Culture = culture;
Namespace = @namespace;
Types = types;
var clean = @namespace.Replace(".Entities", "");
Title = entity?.Let(a => a.Title.DefaultToNull()) ?? clean.TryAfterLast('.') ?? clean;
Before = clean.TryBeforeLast('.');
Description = entity?.Description;
DBEntity = entity;
}
public NamespaceHelpEntity Entity
{
get
{
var result = new NamespaceHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Name = this.Namespace,
};
if (DBEntity != null)
{
result.Title = DBEntity.Title;
result.Description = DBEntity.Description;
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
}
public EntityItem[] AllowedTypes
{
get
{
Schema s = Schema.Current;
return Types.Where(t => s.IsAllowed(t, inUserInterface: true) == null).Select(t => new EntityItem(t)).ToArray();
}
}
public override string? IsAllowed()
{
if (AllowedTypes.Any())
return null;
return "all the types in the nemespace are not allowed";
}
public override string ToString()
{
return "Namespace " + Namespace;
}
}
public class EntityItem
{
public string CleanName;
public bool HasDescription;
public EntityItem(Type t)
{
CleanName = TypeLogic.GetCleanName(t);
HasDescription = HelpLogic.GetTypeHelp(t).HasEntity;
}
}
public class TypeHelp : BaseHelp
{
public readonly Type Type;
public readonly CultureInfo Culture;
public readonly bool HasEntity;
public TypeHelpEntity? DBEntity;
public readonly string Info;
public readonly Dictionary<PropertyRoute, PropertyHelp> Properties;
public readonly Dictionary<OperationSymbol, OperationHelp> Operations;
public readonly Dictionary<object, QueryHelp> Queries;
public TypeHelp(Type type, CultureInfo culture, TypeHelpEntity? entity)
{
Type = type;
Culture = culture;
Info = HelpGenerator.GetEntityHelp(type);
var props = DBEntity?.Properties.ToDictionaryEx(a => a.Property.ToPropertyRoute(), a => a.Info);
var opers = DBEntity?.Operations.ToDictionaryEx(a => a.Operation, a => a.Info);
Properties = PropertyRoute.GenerateRoutes(type)
.ToDictionary(pp => pp, pp => new PropertyHelp(pp, props?.TryGetC(pp)));
Operations = OperationLogic.TypeOperations(type)
.ToDictionary(op => op.OperationSymbol, op => new OperationHelp(op.OperationSymbol, type, opers?.TryGetC(op.OperationSymbol)));
var allQueries = HelpLogic.CachedQueriesHelp();
Queries = HelpLogic.TypeToQuery.Value.TryGetC(this.Type).EmptyIfNull().Select(a => allQueries.GetOrThrow(a)).ToDictionary(qh => qh.QueryName);
DBEntity = entity;
}
public TypeHelpEntity GetEntity()
{
var result = new TypeHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Type = this.Type.ToTypeEntity(),
Description = DBEntity?.Description,
Info = Info
};
result.Properties.AddRange(
from pre in PropertyRouteLogic.RetrieveOrGenerateProperties(this.Type.ToTypeEntity())
let pr = pre.ToPropertyRoute()
where !(pr.PropertyInfo != null && pr.PropertyInfo.SetMethod == null && ExpressionCleaner.HasExpansions(pr.PropertyInfo.DeclaringType!, pr.PropertyInfo))
let ph = Properties.GetOrThrow(pre.ToPropertyRoute())
where ph.IsAllowed() == null
select new PropertyRouteHelpEmbedded
{
Property = pre,
Info = ph.Info,
Description = ph.UserDescription,
});
result.Operations.AddRange(
from oh in Operations.Values
where oh.IsAllowed() == null
select new OperationHelpEmbedded
{
Operation = oh.OperationSymbol,
Info = oh.Info,
Description = oh.UserDescription,
});
result.Queries.AddRange(
from qn in QueryLogic.Queries.GetTypeQueries(this.Type).Keys
let qh = HelpLogic.GetQueryHelp(qn)
where qh.IsAllowed() == null
select qh.GetEntity());
if (DBEntity != null)
{
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
public override string? IsAllowed()
{
return Schema.Current.IsAllowed(Type, inUserInterface: true);
}
public override string ToString()
{
return "Type " + TypeLogic.GetCleanName(Type);
}
}
public class PropertyHelp : BaseHelp
{
public PropertyHelp(PropertyRoute propertyRoute, string? userDescription)
{
if(propertyRoute.PropertyRouteType != PropertyRouteType.FieldOrProperty)
throw new ArgumentException("propertyRoute should be of type Property");
this.PropertyRoute = propertyRoute;
this.Info = HelpGenerator.GetPropertyHelp(propertyRoute);
this.UserDescription = userDescription;
}
public readonly string Info;
public readonly PropertyRoute PropertyRoute;
public readonly string? UserDescription;
public PropertyInfo PropertyInfo { get { return PropertyRoute.PropertyInfo!; } }
public override string? IsAllowed()
{
return PropertyRoute.IsAllowed();
}
public override string ToString()
{
return "Property " + this.PropertyRoute.ToString();
}
}
public class OperationHelp : BaseHelp
{
public readonly OperationSymbol OperationSymbol;
public readonly Type Type;
public readonly string Info;
public readonly string? UserDescription;
public OperationHelp(OperationSymbol operationSymbol, Type type, string? userDescription)
{
this.OperationSymbol = operationSymbol;
this.Type = type;
this.Info = HelpGenerator.GetOperationHelp(type, operationSymbol);
this.UserDescription = userDescription;
}
public override string? IsAllowed()
{
return OperationLogic.OperationAllowed(OperationSymbol, this.Type, inUserInterface: true) ? null :
OperationMessage.Operation01IsNotAuthorized.NiceToString(this.OperationSymbol.NiceToString(), this.OperationSymbol.Key);
}
public override string ToString()
{
return "Operation " + this.OperationSymbol.Key;
}
}
public class QueryHelp : BaseHelp
{
public readonly object QueryName;
public readonly CultureInfo Culture;
public readonly string Info;
public readonly Dictionary<string, QueryColumnHelp> Columns;
public readonly QueryHelpEntity? DBEntity;
public readonly string? UserDescription;
public QueryHelp(object queryName, CultureInfo ci, QueryHelpEntity? entity)
{
QueryName = queryName;
Culture = ci;
Info = HelpGenerator.GetQueryHelp(QueryLogic.Queries.GetQuery(queryName).Core.Value);
var cols = entity?.Columns.ToDictionary(a => a.ColumnName, a => a.Description);
Columns = QueryLogic.Queries.GetQuery(queryName).Core.Value.StaticColumns.ToDictionary(
cf => cf.Name,
cf => new QueryColumnHelp(cf, cf.DisplayName(), HelpGenerator.GetQueryColumnHelp(cf), cols?.TryGetCN(cf.Name)));
DBEntity = entity;
UserDescription = entity?.Description;
}
public QueryHelpEntity GetEntity()
{
var cd = DBEntity?.Columns.ToDictionary(a => a.ColumnName, a => a.Description);
var result = new QueryHelpEntity
{
Culture = this.Culture.ToCultureInfoEntity(),
Query = QueryLogic.GetQueryEntity(this.QueryName),
Description = DBEntity?.Description,
Info = Info,
Columns = this.Columns.Values.Where(a => a.Column.IsAllowed() == null)
.Select(c => new QueryColumnHelpEmbedded
{
ColumnName = c.Column.Name,
Description = cd?.TryGetCN(c.Column.Name)!,
NiceName = c.NiceName,
Info = c.Info,
}).ToMList()
};
if (DBEntity != null)
{
result.SetId(DBEntity.Id);
result.SetIsNew(DBEntity.IsNew);
result.Ticks = DBEntity.Ticks;
}
return result;
}
public override string ToString()
{
return "Query " + QueryUtils.GetKey(this.QueryName);
}
public override string? IsAllowed()
{
return QueryLogic.Queries.QueryAllowed(this.QueryName, false) ? null :
"Access to query {0} not allowed".FormatWith(QueryUtils.GetKey(this.QueryName));
}
}
public class QueryColumnHelp : BaseHelp
{
public ColumnDescriptionFactory Column;
public string NiceName;
public string Info;
public string? UserDescription;
public QueryColumnHelp(ColumnDescriptionFactory column, string niceName, string info, string? userDescription)
{
this.Column = column;
this.NiceName = niceName;
this.Info = info;
this.UserDescription = userDescription;
}
public override string? IsAllowed()
{
return Column.IsAllowed();
}
public override string ToString()
{
return "Column " + Column.Name;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Runtime.InteropServices;
using System.Text;
internal partial class Interop
{
internal partial class WinHttp
{
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandle WinHttpOpen(
IntPtr userAgent,
uint accessType,
string proxyName,
string proxyBypass, int flags);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpCloseHandle(
IntPtr handle);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandle WinHttpConnect(
SafeWinHttpHandle sessionHandle,
string serverName,
ushort serverPort,
uint reserved);
// NOTE: except for the return type, this refers to the same function as WinHttpConnect.
[DllImport(Interop.Libraries.WinHttp, EntryPoint = "WinHttpConnect", CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandleWithCallback WinHttpConnectWithCallback(
SafeWinHttpHandle sessionHandle,
string serverName,
ushort serverPort,
uint reserved);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandle WinHttpOpenRequest(
SafeWinHttpHandle connectHandle,
string verb,
string objectName,
string version,
string referrer,
string acceptTypes,
uint flags);
// NOTE: except for the return type, this refers to the same function as WinOpenRequest.
[DllImport(Interop.Libraries.WinHttp, EntryPoint = "WinHttpOpenRequest", CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandleWithCallback WinHttpOpenRequestWithCallback(
SafeWinHttpHandle connectHandle,
string verb,
string objectName,
string version,
string referrer,
string acceptTypes,
uint flags);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpAddRequestHeaders(
SafeWinHttpHandle requestHandle,
StringBuilder headers,
uint headersLength,
uint modifiers);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpAddRequestHeaders(
SafeWinHttpHandle requestHandle,
string headers,
uint headersLength,
uint modifiers);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSendRequest(
SafeWinHttpHandle requestHandle,
StringBuilder headers,
uint headersLength,
IntPtr optional,
uint optionalLength,
uint totalLength,
IntPtr context);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpReceiveResponse(
SafeWinHttpHandle requestHandle,
IntPtr reserved);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryDataAvailable(
SafeWinHttpHandle requestHandle,
out uint bytesAvailable);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpReadData(
SafeWinHttpHandle requestHandle,
IntPtr buffer,
uint bufferSize,
out uint bytesRead);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryHeaders(
SafeWinHttpHandle requestHandle,
uint infoLevel,
string name,
StringBuilder buffer,
ref uint bufferLength,
IntPtr index);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryHeaders(
SafeWinHttpHandle requestHandle,
uint infoLevel,
string name,
ref uint number,
ref uint bufferLength,
IntPtr index);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryOption(
SafeWinHttpHandle handle,
uint option,
StringBuilder buffer,
ref uint bufferSize);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryOption(
SafeWinHttpHandle handle,
uint option,
ref IntPtr buffer,
ref uint bufferSize);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryOption(
SafeWinHttpHandle handle,
uint option,
IntPtr buffer,
ref uint bufferSize);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpWriteData(
SafeWinHttpHandle requestHandle,
IntPtr buffer,
uint bufferSize,
out uint bytesWritten);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSetOption(
SafeWinHttpHandle handle,
uint option,
ref uint optionData,
uint optionLength = sizeof(uint));
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSetOption(
SafeWinHttpHandle handle,
uint option,
string optionData,
uint optionLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSetOption(
SafeWinHttpHandle handle,
uint option,
IntPtr optionData,
uint optionLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSetCredentials(
SafeWinHttpHandle requestHandle,
uint authTargets,
uint authScheme,
string userName,
string password,
IntPtr reserved);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpQueryAuthSchemes(
SafeWinHttpHandle requestHandle,
out uint supportedSchemes,
out uint firstScheme,
out uint authTarget);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpSetTimeouts(
SafeWinHttpHandle handle,
int resolveTimeout,
int connectTimeout,
int sendTimeout,
int receiveTimeout);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpGetIEProxyConfigForCurrentUser(
out WINHTTP_CURRENT_USER_IE_PROXY_CONFIG proxyConfig);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool WinHttpGetProxyForUrl(
SafeWinHttpHandle sessionHandle, string url,
ref WINHTTP_AUTOPROXY_OPTIONS autoProxyOptions,
out WINHTTP_PROXY_INFO proxyInfo);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
public static extern IntPtr WinHttpSetStatusCallback(
SafeWinHttpHandle handle,
WINHTTP_STATUS_CALLBACK callback,
uint notificationFlags,
IntPtr reserved);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = true)]
public static extern SafeWinHttpHandleWithCallback WinHttpWebSocketCompleteUpgrade(
SafeWinHttpHandle requestHandle,
IntPtr context);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketSend(
SafeWinHttpHandle webSocketHandle,
WINHTTP_WEB_SOCKET_BUFFER_TYPE bufferType,
IntPtr buffer,
uint bufferLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketReceive(
SafeWinHttpHandle webSocketHandle,
IntPtr buffer,
uint bufferLength,
out uint bytesRead,
out WINHTTP_WEB_SOCKET_BUFFER_TYPE bufferType);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketShutdown(
SafeWinHttpHandle webSocketHandle,
ushort status,
byte[] reason,
uint reasonLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketShutdown(
SafeWinHttpHandle webSocketHandle,
ushort status,
IntPtr reason,
uint reasonLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketClose(
SafeWinHttpHandle webSocketHandle,
ushort status,
byte[] reason,
uint reasonLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketClose(
SafeWinHttpHandle webSocketHandle,
ushort status,
IntPtr reason,
uint reasonLength);
[DllImport(Interop.Libraries.WinHttp, CharSet = CharSet.Unicode, SetLastError = false)]
public static extern uint WinHttpWebSocketQueryCloseStatus(
SafeWinHttpHandle webSocketHandle,
out ushort status,
byte[] reason,
uint reasonLength,
out uint reasonLengthConsumed);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Binary
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Impl.Binary.IO;
using Apache.Ignite.Core.Impl.Binary.Metadata;
/// <summary>
/// Binary builder implementation.
/// </summary>
internal class BinaryObjectBuilder : IBinaryObjectBuilder
{
/** Cached dictionary with no values. */
private static readonly IDictionary<int, BinaryBuilderField> EmptyVals =
new Dictionary<int, BinaryBuilderField>();
/** Binary. */
private readonly Binary _binary;
/** */
private readonly BinaryObjectBuilder _parent;
/** Initial binary object. */
private readonly BinaryObject _obj;
/** Type descriptor. */
private readonly IBinaryTypeDescriptor _desc;
/** Values. */
private IDictionary<string, BinaryBuilderField> _vals;
/** Contextual fields. */
private IDictionary<int, BinaryBuilderField> _cache;
/** Hash code. */
private int _hashCode;
/** Current context. */
private Context _ctx;
/** Write array action. */
private static readonly Action<BinaryWriter, object> WriteArrayAction =
(w, o) => w.WriteArrayInternal((Array) o);
/** Write collection action. */
private static readonly Action<BinaryWriter, object> WriteCollectionAction =
(w, o) => w.WriteCollection((ICollection) o);
/** Write timestamp action. */
private static readonly Action<BinaryWriter, object> WriteTimestampAction =
(w, o) => w.WriteTimestamp((DateTime?) o);
/** Write timestamp array action. */
private static readonly Action<BinaryWriter, object> WriteTimestampArrayAction =
(w, o) => w.WriteTimestampArray((DateTime?[])o);
/// <summary>
/// Constructor.
/// </summary>
/// <param name="binary">Binary.</param>
/// <param name="parent">Parent builder.</param>
/// <param name="obj">Initial binary object.</param>
/// <param name="desc">Type descriptor.</param>
public BinaryObjectBuilder(Binary binary, BinaryObjectBuilder parent,
BinaryObject obj, IBinaryTypeDescriptor desc)
{
Debug.Assert(binary != null);
Debug.Assert(obj != null);
Debug.Assert(desc != null);
_binary = binary;
_parent = parent ?? this;
_obj = obj;
_desc = desc;
_hashCode = obj.GetHashCode();
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetHashCode(int hashCode)
{
_hashCode = hashCode;
return this;
}
/** <inheritDoc /> */
public T GetField<T>(string name)
{
BinaryBuilderField field;
if (_vals != null && _vals.TryGetValue(name, out field))
return field != BinaryBuilderField.RmvMarker ? (T) field.Value : default(T);
int pos;
if (!_obj.TryGetFieldPosition(name, out pos))
return default(T);
T val;
if (TryGetCachedField(pos, out val))
return val;
val = _obj.GetField<T>(pos, this);
var fld = CacheField(pos, val);
SetField0(name, fld);
return val;
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetField<T>(string fieldName, T val)
{
return SetField0(fieldName,
new BinaryBuilderField(typeof (T), val, BinarySystemHandlers.GetTypeId(typeof (T))));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetArrayField<T>(string fieldName, T[] val)
{
return SetField0(fieldName,
new BinaryBuilderField(typeof (T[]), val, BinaryUtils.TypeArray, WriteArrayAction));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetBooleanField(string fieldName, bool val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (bool), val, BinaryUtils.TypeBool,
(w, o) => w.WriteBooleanField((bool) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetBooleanArrayField(string fieldName, bool[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (bool[]), val, BinaryUtils.TypeArrayBool,
(w, o) => w.WriteBooleanArray((bool[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetByteField(string fieldName, byte val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (byte), val, BinaryUtils.TypeByte,
(w, o) => w.WriteByteField((byte) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetByteArrayField(string fieldName, byte[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (byte[]), val, BinaryUtils.TypeArrayByte,
(w, o) => w.WriteByteArray((byte[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetCharField(string fieldName, char val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (char), val, BinaryUtils.TypeChar,
(w, o) => w.WriteCharField((char) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetCharArrayField(string fieldName, char[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (char[]), val, BinaryUtils.TypeArrayChar,
(w, o) => w.WriteCharArray((char[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetCollectionField(string fieldName, ICollection val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (ICollection), val, BinaryUtils.TypeCollection,
WriteCollectionAction));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetDecimalField(string fieldName, decimal? val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (decimal?), val, BinaryUtils.TypeDecimal,
(w, o) => w.WriteDecimal((decimal?) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetDecimalArrayField(string fieldName, decimal?[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (decimal?[]), val, BinaryUtils.TypeArrayDecimal,
(w, o) => w.WriteDecimalArray((decimal?[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetDictionaryField(string fieldName, IDictionary val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (IDictionary), val, BinaryUtils.TypeDictionary,
(w, o) => w.WriteDictionary((IDictionary) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetDoubleField(string fieldName, double val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (double), val, BinaryUtils.TypeDouble,
(w, o) => w.WriteDoubleField((double) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetDoubleArrayField(string fieldName, double[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (double[]), val, BinaryUtils.TypeArrayDouble,
(w, o) => w.WriteDoubleArray((double[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetEnumField<T>(string fieldName, T val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (T), val, BinaryUtils.TypeEnum,
(w, o) => w.WriteEnum((T) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetEnumArrayField<T>(string fieldName, T[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (T[]), val, BinaryUtils.TypeArrayEnum,
(w, o) => w.WriteEnumArray((T[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetFloatField(string fieldName, float val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (float), val, BinaryUtils.TypeFloat,
(w, o) => w.WriteFloatField((float) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetFloatArrayField(string fieldName, float[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (float[]), val, BinaryUtils.TypeArrayFloat,
(w, o) => w.WriteFloatArray((float[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetGuidField(string fieldName, Guid? val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (Guid?), val, BinaryUtils.TypeGuid,
(w, o) => w.WriteGuid((Guid?) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetGuidArrayField(string fieldName, Guid?[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (Guid?[]), val, BinaryUtils.TypeArrayGuid,
(w, o) => w.WriteGuidArray((Guid?[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetIntField(string fieldName, int val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (int), val, BinaryUtils.TypeInt,
(w, o) => w.WriteIntField((int) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetIntArrayField(string fieldName, int[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (int[]), val, BinaryUtils.TypeArrayInt,
(w, o) => w.WriteIntArray((int[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetLongField(string fieldName, long val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (long), val, BinaryUtils.TypeLong,
(w, o) => w.WriteLongField((long) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetLongArrayField(string fieldName, long[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (long[]), val, BinaryUtils.TypeArrayLong,
(w, o) => w.WriteLongArray((long[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetShortField(string fieldName, short val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (short), val, BinaryUtils.TypeShort,
(w, o) => w.WriteShortField((short) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetShortArrayField(string fieldName, short[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (short[]), val, BinaryUtils.TypeArrayShort,
(w, o) => w.WriteShortArray((short[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetStringField(string fieldName, string val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (string), val, BinaryUtils.TypeString,
(w, o) => w.WriteString((string) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetStringArrayField(string fieldName, string[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (string[]), val, BinaryUtils.TypeArrayString,
(w, o) => w.WriteStringArray((string[]) o)));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetTimestampField(string fieldName, DateTime? val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (DateTime?), val, BinaryUtils.TypeTimestamp,
WriteTimestampAction));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder SetTimestampArrayField(string fieldName, DateTime?[] val)
{
return SetField0(fieldName, new BinaryBuilderField(typeof (DateTime?[]), val, BinaryUtils.TypeArrayTimestamp,
WriteTimestampArrayAction));
}
/** <inheritDoc /> */
public IBinaryObjectBuilder RemoveField(string name)
{
return SetField0(name, BinaryBuilderField.RmvMarker);
}
/** <inheritDoc /> */
public IBinaryObject Build()
{
// Assume that resulting length will be no less than header + [fields_cnt] * 12;
int estimatedCapacity = BinaryObjectHeader.Size + (_vals == null ? 0 : _vals.Count*12);
using (var outStream = new BinaryHeapStream(estimatedCapacity))
{
BinaryWriter writer = _binary.Marshaller.StartMarshal(outStream);
writer.SetBuilder(this);
// All related builders will work in this context with this writer.
_parent._ctx = new Context(writer);
try
{
// Write.
writer.Write(this);
// Process metadata.
_binary.Marshaller.FinishMarshal(writer);
// Create binary object once metadata is processed.
return new BinaryObject(_binary.Marshaller, outStream.InternalArray, 0,
BinaryObjectHeader.Read(outStream, 0));
}
finally
{
// Cleanup.
_parent._ctx.Closed = true;
}
}
}
/// <summary>
/// Create child builder.
/// </summary>
/// <param name="obj">binary object.</param>
/// <returns>Child builder.</returns>
public BinaryObjectBuilder Child(BinaryObject obj)
{
var desc = _binary.Marshaller.GetDescriptor(true, obj.TypeId);
return new BinaryObjectBuilder(_binary, null, obj, desc);
}
/// <summary>
/// Get cache field.
/// </summary>
/// <param name="pos">Position.</param>
/// <param name="val">Value.</param>
/// <returns><c>true</c> if value is found in cache.</returns>
public bool TryGetCachedField<T>(int pos, out T val)
{
if (_parent._cache != null)
{
BinaryBuilderField res;
if (_parent._cache.TryGetValue(pos, out res))
{
val = res != null ? (T) res.Value : default(T);
return true;
}
}
val = default(T);
return false;
}
/// <summary>
/// Add field to cache test.
/// </summary>
/// <param name="pos">Position.</param>
/// <param name="val">Value.</param>
public BinaryBuilderField CacheField<T>(int pos, T val)
{
if (_parent._cache == null)
_parent._cache = new Dictionary<int, BinaryBuilderField>(2);
var hdr = _obj.Data[pos];
var field = new BinaryBuilderField(typeof(T), val, hdr, GetWriteAction(hdr, pos));
_parent._cache[pos] = field;
return field;
}
/// <summary>
/// Gets the write action by header.
/// </summary>
/// <param name="header">The header.</param>
/// <param name="pos">Position.</param>
/// <returns>Write action.</returns>
private Action<BinaryWriter, object> GetWriteAction(byte header, int pos)
{
// We need special actions for all cases where SetField(X) produces different result from SetSpecialField(X)
// Arrays, Collections, Dates
switch (header)
{
case BinaryUtils.TypeArray:
return WriteArrayAction;
case BinaryUtils.TypeCollection:
return WriteCollectionAction;
case BinaryUtils.TypeTimestamp:
return WriteTimestampAction;
case BinaryUtils.TypeArrayTimestamp:
return WriteTimestampArrayAction;
case BinaryUtils.TypeArrayEnum:
using (var stream = new BinaryHeapStream(_obj.Data))
{
stream.Seek(pos, SeekOrigin.Begin + 1);
var elementTypeId = stream.ReadInt();
return (w, o) => w.WriteEnumArrayInternal((Array) o, elementTypeId);
}
default:
return null;
}
}
/// <summary>
/// Internal set field routine.
/// </summary>
/// <param name="fieldName">Name.</param>
/// <param name="val">Value.</param>
/// <returns>This builder.</returns>
private IBinaryObjectBuilder SetField0(string fieldName, BinaryBuilderField val)
{
if (_vals == null)
_vals = new Dictionary<string, BinaryBuilderField>();
_vals[fieldName] = val;
return this;
}
/// <summary>
/// Mutate binary object.
/// </summary>
/// <param name="inStream">Input stream with initial object.</param>
/// <param name="outStream">Output stream.</param>
/// <param name="desc">Type descriptor.</param>
/// <param name="hashCode">Hash code.</param>
/// <param name="vals">Values.</param>
private void Mutate(
BinaryHeapStream inStream,
BinaryHeapStream outStream,
IBinaryTypeDescriptor desc,
int hashCode,
IDictionary<string, BinaryBuilderField> vals)
{
// Set correct builder to writer frame.
BinaryObjectBuilder oldBuilder = _parent._ctx.Writer.SetBuilder(_parent);
int streamPos = inStream.Position;
try
{
// Prepare fields.
IBinaryTypeHandler metaHnd = _binary.Marshaller.GetBinaryTypeHandler(desc);
IDictionary<int, BinaryBuilderField> vals0;
if (vals == null || vals.Count == 0)
vals0 = EmptyVals;
else
{
vals0 = new Dictionary<int, BinaryBuilderField>(vals.Count);
foreach (KeyValuePair<string, BinaryBuilderField> valEntry in vals)
{
int fieldId = BinaryUtils.FieldId(desc.TypeId, valEntry.Key, desc.NameMapper, desc.IdMapper);
if (vals0.ContainsKey(fieldId))
throw new IgniteException("Collision in field ID detected (change field name or " +
"define custom ID mapper) [fieldName=" + valEntry.Key + ", fieldId=" + fieldId + ']');
vals0[fieldId] = valEntry.Value;
// Write metadata if: 1) it is enabled for type; 2) type is not null (i.e. it is neither
// remove marker, nor a field read through "GetField" method.
if (metaHnd != null && valEntry.Value.Type != null)
metaHnd.OnFieldWrite(fieldId, valEntry.Key, valEntry.Value.TypeId);
}
}
// Actual processing.
Mutate0(_parent._ctx, inStream, outStream, true, hashCode, vals0);
// 3. Handle metadata.
if (metaHnd != null)
{
IDictionary<string, int> meta = metaHnd.OnObjectWriteFinished();
if (meta != null)
_parent._ctx.Writer.SaveMetadata(desc, meta);
}
}
finally
{
// Restore builder frame.
_parent._ctx.Writer.SetBuilder(oldBuilder);
inStream.Seek(streamPos, SeekOrigin.Begin);
}
}
/// <summary>
/// Internal mutation routine.
/// </summary>
/// <param name="inStream">Input stream.</param>
/// <param name="outStream">Output stream.</param>
/// <param name="ctx">Context.</param>
/// <param name="changeHash">WHether hash should be changed.</param>
/// <param name="hash">New hash.</param>
/// <param name="vals">Values to be replaced.</param>
/// <returns>Mutated object.</returns>
private void Mutate0(Context ctx, BinaryHeapStream inStream, IBinaryStream outStream,
bool changeHash, int hash, IDictionary<int, BinaryBuilderField> vals)
{
int inStartPos = inStream.Position;
int outStartPos = outStream.Position;
byte inHdr = inStream.ReadByte();
if (inHdr == BinaryUtils.HdrNull)
outStream.WriteByte(BinaryUtils.HdrNull);
else if (inHdr == BinaryUtils.HdrHnd)
{
int inHnd = inStream.ReadInt();
int oldPos = inStartPos - inHnd;
int newPos;
if (ctx.OldToNew(oldPos, out newPos))
{
// Handle is still valid.
outStream.WriteByte(BinaryUtils.HdrHnd);
outStream.WriteInt(outStartPos - newPos);
}
else
{
// Handle is invalid, write full object.
int inRetPos = inStream.Position;
inStream.Seek(oldPos, SeekOrigin.Begin);
Mutate0(ctx, inStream, outStream, false, 0, EmptyVals);
inStream.Seek(inRetPos, SeekOrigin.Begin);
}
}
else if (inHdr == BinaryUtils.HdrFull)
{
var inHeader = BinaryObjectHeader.Read(inStream, inStartPos);
BinaryUtils.ValidateProtocolVersion(inHeader.Version);
int hndPos;
if (ctx.AddOldToNew(inStartPos, outStartPos, out hndPos))
{
// Object could be cached in parent builder.
BinaryBuilderField cachedVal;
if (_parent._cache != null && _parent._cache.TryGetValue(inStartPos, out cachedVal))
{
WriteField(ctx, cachedVal);
}
else
{
// New object, write in full form.
var inSchema = BinaryObjectSchemaSerializer.ReadSchema(inStream, inStartPos, inHeader,
_desc.Schema, _binary.Marshaller);
var outSchema = BinaryObjectSchemaHolder.Current;
var schemaIdx = outSchema.PushSchema();
try
{
// Skip header as it is not known at this point.
outStream.Seek(BinaryObjectHeader.Size, SeekOrigin.Current);
if (inSchema != null)
{
foreach (var inField in inSchema)
{
BinaryBuilderField fieldVal;
var fieldFound = vals.TryGetValue(inField.Id, out fieldVal);
if (fieldFound && fieldVal == BinaryBuilderField.RmvMarker)
continue;
outSchema.PushField(inField.Id, outStream.Position - outStartPos);
if (!fieldFound)
fieldFound = _parent._cache != null &&
_parent._cache.TryGetValue(inField.Offset + inStartPos,
out fieldVal);
if (fieldFound)
{
WriteField(ctx, fieldVal);
vals.Remove(inField.Id);
}
else
{
// Field is not tracked, re-write as is.
inStream.Seek(inField.Offset + inStartPos, SeekOrigin.Begin);
Mutate0(ctx, inStream, outStream, false, 0, EmptyVals);
}
}
}
// Write remaining new fields.
foreach (var valEntry in vals)
{
if (valEntry.Value == BinaryBuilderField.RmvMarker)
continue;
outSchema.PushField(valEntry.Key, outStream.Position - outStartPos);
WriteField(ctx, valEntry.Value);
}
var flags = inHeader.IsUserType
? BinaryObjectHeader.Flag.UserType
: BinaryObjectHeader.Flag.None;
// Write raw data.
int outRawOff = outStream.Position - outStartPos;
if (inHeader.HasRaw)
{
var inRawOff = inHeader.GetRawOffset(inStream, inStartPos);
var inRawLen = inHeader.SchemaOffset - inRawOff;
flags |= BinaryObjectHeader.Flag.HasRaw;
outStream.Write(inStream.InternalArray, inStartPos + inRawOff, inRawLen);
}
// Write schema
int outSchemaOff = outRawOff;
var schemaPos = outStream.Position;
int outSchemaId;
if (inHeader.IsCompactFooter)
flags |= BinaryObjectHeader.Flag.CompactFooter;
var hasSchema = outSchema.WriteSchema(outStream, schemaIdx, out outSchemaId, ref flags);
if (hasSchema)
{
outSchemaOff = schemaPos - outStartPos;
flags |= BinaryObjectHeader.Flag.HasSchema;
if (inHeader.HasRaw)
outStream.WriteInt(outRawOff);
if (_desc.Schema.Get(outSchemaId) == null)
_desc.Schema.Add(outSchemaId, outSchema.GetSchema(schemaIdx));
}
var outLen = outStream.Position - outStartPos;
var outHash = changeHash ? hash : inHeader.HashCode;
var outHeader = new BinaryObjectHeader(inHeader.TypeId, outHash, outLen,
outSchemaId, outSchemaOff, flags);
BinaryObjectHeader.Write(outHeader, outStream, outStartPos);
outStream.Seek(outStartPos + outLen, SeekOrigin.Begin); // seek to the end of the object
}
finally
{
outSchema.PopSchema(schemaIdx);
}
}
}
else
{
// Object has already been written, write as handle.
outStream.WriteByte(BinaryUtils.HdrHnd);
outStream.WriteInt(outStartPos - hndPos);
}
// Synchronize input stream position.
inStream.Seek(inStartPos + inHeader.Length, SeekOrigin.Begin);
}
else
{
// Try writing as well-known type with fixed size.
outStream.WriteByte(inHdr);
if (!WriteAsPredefined(inHdr, inStream, outStream, ctx))
throw new IgniteException("Unexpected header [position=" + (inStream.Position - 1) +
", header=" + inHdr + ']');
}
}
/// <summary>
/// Writes the specified field.
/// </summary>
private static void WriteField(Context ctx, BinaryBuilderField field)
{
var action = field.WriteAction;
if (action != null)
action(ctx.Writer, field.Value);
else
ctx.Writer.Write(field.Value);
}
/// <summary>
/// Process binary object inverting handles if needed.
/// </summary>
/// <param name="outStream">Output stream.</param>
/// <param name="port">Binary object.</param>
internal void ProcessBinary(IBinaryStream outStream, BinaryObject port)
{
// Special case: writing binary object with correct inversions.
using (var inStream = new BinaryHeapStream(port.Data))
{
inStream.Seek(port.Offset, SeekOrigin.Begin);
// Use fresh context to ensure correct binary inversion.
Mutate0(new Context(), inStream, outStream, false, 0, EmptyVals);
}
}
/// <summary>
/// Process child builder.
/// </summary>
/// <param name="outStream">Output stream.</param>
/// <param name="builder">Builder.</param>
internal void ProcessBuilder(IBinaryStream outStream, BinaryObjectBuilder builder)
{
using (var inStream = new BinaryHeapStream(builder._obj.Data))
{
inStream.Seek(builder._obj.Offset, SeekOrigin.Begin);
// Builder parent context might be null only in one case: if we never met this group of
// builders before. In this case we set context to their parent and track it. Context
// cleanup will be performed at the very end of build process.
if (builder._parent._ctx == null || builder._parent._ctx.Closed)
builder._parent._ctx = new Context(_parent._ctx);
builder.Mutate(inStream, (BinaryHeapStream) outStream, builder._desc,
builder._hashCode, builder._vals);
}
}
/// <summary>
/// Write object as a predefined type if possible.
/// </summary>
/// <param name="hdr">Header.</param>
/// <param name="inStream">Input stream.</param>
/// <param name="outStream">Output stream.</param>
/// <param name="ctx">Context.</param>
/// <returns><c>True</c> if was written.</returns>
private bool WriteAsPredefined(byte hdr, BinaryHeapStream inStream, IBinaryStream outStream,
Context ctx)
{
switch (hdr)
{
case BinaryUtils.TypeByte:
TransferBytes(inStream, outStream, 1);
break;
case BinaryUtils.TypeShort:
TransferBytes(inStream, outStream, 2);
break;
case BinaryUtils.TypeInt:
TransferBytes(inStream, outStream, 4);
break;
case BinaryUtils.TypeLong:
TransferBytes(inStream, outStream, 8);
break;
case BinaryUtils.TypeFloat:
TransferBytes(inStream, outStream, 4);
break;
case BinaryUtils.TypeDouble:
TransferBytes(inStream, outStream, 8);
break;
case BinaryUtils.TypeChar:
TransferBytes(inStream, outStream, 2);
break;
case BinaryUtils.TypeBool:
TransferBytes(inStream, outStream, 1);
break;
case BinaryUtils.TypeDecimal:
TransferBytes(inStream, outStream, 4); // Transfer scale
int magLen = inStream.ReadInt(); // Transfer magnitude length.
outStream.WriteInt(magLen);
TransferBytes(inStream, outStream, magLen); // Transfer magnitude.
break;
case BinaryUtils.TypeString:
BinaryUtils.WriteString(BinaryUtils.ReadString(inStream), outStream);
break;
case BinaryUtils.TypeGuid:
TransferBytes(inStream, outStream, 16);
break;
case BinaryUtils.TypeTimestamp:
TransferBytes(inStream, outStream, 12);
break;
case BinaryUtils.TypeArrayByte:
TransferArray(inStream, outStream, 1);
break;
case BinaryUtils.TypeArrayShort:
TransferArray(inStream, outStream, 2);
break;
case BinaryUtils.TypeArrayInt:
TransferArray(inStream, outStream, 4);
break;
case BinaryUtils.TypeArrayLong:
TransferArray(inStream, outStream, 8);
break;
case BinaryUtils.TypeArrayFloat:
TransferArray(inStream, outStream, 4);
break;
case BinaryUtils.TypeArrayDouble:
TransferArray(inStream, outStream, 8);
break;
case BinaryUtils.TypeArrayChar:
TransferArray(inStream, outStream, 2);
break;
case BinaryUtils.TypeArrayBool:
TransferArray(inStream, outStream, 1);
break;
case BinaryUtils.TypeArrayDecimal:
case BinaryUtils.TypeArrayString:
case BinaryUtils.TypeArrayGuid:
case BinaryUtils.TypeArrayTimestamp:
case BinaryUtils.TypeArrayEnum:
case BinaryUtils.TypeArray:
int arrLen = inStream.ReadInt();
outStream.WriteInt(arrLen);
for (int i = 0; i < arrLen; i++)
Mutate0(ctx, inStream, outStream, false, 0, null);
break;
case BinaryUtils.TypeCollection:
int colLen = inStream.ReadInt();
outStream.WriteInt(colLen);
outStream.WriteByte(inStream.ReadByte());
for (int i = 0; i < colLen; i++)
Mutate0(ctx, inStream, outStream, false, 0, EmptyVals);
break;
case BinaryUtils.TypeDictionary:
int dictLen = inStream.ReadInt();
outStream.WriteInt(dictLen);
outStream.WriteByte(inStream.ReadByte());
for (int i = 0; i < dictLen; i++)
{
Mutate0(ctx, inStream, outStream, false, 0, EmptyVals);
Mutate0(ctx, inStream, outStream, false, 0, EmptyVals);
}
break;
case BinaryUtils.TypeBinary:
TransferArray(inStream, outStream, 1); // Data array.
TransferBytes(inStream, outStream, 4); // Offset in array.
break;
case BinaryUtils.TypeEnum:
TransferBytes(inStream, outStream, 4); // Integer ordinal.
break;
default:
return false;
}
return true;
}
/// <summary>
/// Transfer bytes from one stream to another.
/// </summary>
/// <param name="inStream">Input stream.</param>
/// <param name="outStream">Output stream.</param>
/// <param name="cnt">Bytes count.</param>
private static void TransferBytes(BinaryHeapStream inStream, IBinaryStream outStream, int cnt)
{
outStream.Write(inStream.InternalArray, inStream.Position, cnt);
inStream.Seek(cnt, SeekOrigin.Current);
}
/// <summary>
/// Transfer array of fixed-size elements from one stream to another.
/// </summary>
/// <param name="inStream">Input stream.</param>
/// <param name="outStream">Output stream.</param>
/// <param name="elemSize">Element size.</param>
private static void TransferArray(BinaryHeapStream inStream, IBinaryStream outStream,
int elemSize)
{
int len = inStream.ReadInt();
outStream.WriteInt(len);
TransferBytes(inStream, outStream, elemSize * len);
}
/// <summary>
/// Mutation ocntext.
/// </summary>
private class Context
{
/** Map from object position in old binary to position in new binary. */
private IDictionary<int, int> _oldToNew;
/** Parent context. */
private readonly Context _parent;
/** Binary writer. */
private readonly BinaryWriter _writer;
/** Children contexts. */
private ICollection<Context> _children;
/** Closed flag; if context is closed, it can no longer be used. */
private bool _closed;
/// <summary>
/// Constructor for parent context where writer invocation is not expected.
/// </summary>
public Context()
{
// No-op.
}
/// <summary>
/// Constructor for parent context.
/// </summary>
/// <param name="writer">Writer</param>
public Context(BinaryWriter writer)
{
_writer = writer;
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="parent">Parent context.</param>
public Context(Context parent)
{
_parent = parent;
_writer = parent._writer;
if (parent._children == null)
parent._children = new List<Context>();
parent._children.Add(this);
}
/// <summary>
/// Add another old-to-new position mapping.
/// </summary>
/// <param name="oldPos">Old position.</param>
/// <param name="newPos">New position.</param>
/// <param name="hndPos">Handle position.</param>
/// <returns><c>True</c> if ampping was added, <c>false</c> if mapping already existed and handle
/// position in the new object is returned.</returns>
public bool AddOldToNew(int oldPos, int newPos, out int hndPos)
{
if (_oldToNew == null)
_oldToNew = new Dictionary<int, int>();
if (_oldToNew.TryGetValue(oldPos, out hndPos))
return false;
_oldToNew[oldPos] = newPos;
return true;
}
/// <summary>
/// Get mapping of old position to the new one.
/// </summary>
/// <param name="oldPos">Old position.</param>
/// <param name="newPos">New position.</param>
/// <returns><c>True</c> if mapping exists.</returns>
public bool OldToNew(int oldPos, out int newPos)
{
return _oldToNew.TryGetValue(oldPos, out newPos);
}
/// <summary>
/// Writer.
/// </summary>
public BinaryWriter Writer
{
get { return _writer; }
}
/// <summary>
/// Closed flag.
/// </summary>
public bool Closed
{
get
{
return _closed;
}
set
{
Context ctx = this;
while (ctx != null)
{
ctx._closed = value;
if (_children != null) {
foreach (Context child in _children)
child.Closed = value;
}
ctx = ctx._parent;
}
}
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.Linq.Expressions;
using FluentAssertions.Common;
using FluentAssertions.Execution;
namespace FluentAssertions.Primitives
{
#pragma warning disable CS0659 // Ignore not overriding Object.GetHashCode()
#pragma warning disable CA1065 // Ignore throwing NotSupportedException from Equals
/// <summary>
/// Contains a number of methods to assert that a reference type object is in the expected state.
/// </summary>
[DebuggerNonUserCode]
public abstract class ReferenceTypeAssertions<TSubject, TAssertions>
where TAssertions : ReferenceTypeAssertions<TSubject, TAssertions>
{
protected ReferenceTypeAssertions(TSubject subject)
{
Subject = subject;
}
/// <summary>
/// Gets the object which value is being asserted.
/// </summary>
public TSubject Subject { get; }
/// <summary>
/// Asserts that the current object has not been initialized yet.
/// </summary>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
public AndConstraint<TAssertions> BeNull(string because = "", params object[] becauseArgs)
{
Execute.Assertion
.ForCondition(Subject is null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} to be <null>{reason}, but found {0}.", Subject);
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the current object has been initialized.
/// </summary>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
public AndConstraint<TAssertions> NotBeNull(string because = "", params object[] becauseArgs)
{
Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} not to be <null>{reason}.");
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that an object reference refers to the exact same object as another object reference.
/// </summary>
/// <param name="expected">The expected object</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because"/>.
/// </param>
public AndConstraint<TAssertions> BeSameAs(TSubject expected, string because = "", params object[] becauseArgs)
{
Execute.Assertion
.UsingLineBreaks
.ForCondition(ReferenceEquals(Subject, expected))
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} to refer to {0}{reason}, but found {1}.", expected, Subject);
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that an object reference refers to a different object than another object reference refers to.
/// </summary>
/// <param name="unexpected">The unexpected object</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because"/>.
/// </param>
public AndConstraint<TAssertions> NotBeSameAs(TSubject unexpected, string because = "", params object[] becauseArgs)
{
Execute.Assertion
.UsingLineBreaks
.ForCondition(!ReferenceEquals(Subject, unexpected))
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Did not expect {context} to refer to {0}{reason}.", unexpected);
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the object is of the specified type <typeparamref name="T"/>.
/// </summary>
/// <typeparam name="T">The expected type of the object.</typeparam>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
public AndWhichConstraint<TAssertions, T> BeOfType<T>(string because = "", params object[] becauseArgs)
{
BeOfType(typeof(T), because, becauseArgs);
T typedSubject = (Subject is T type)
? type
: default;
return new AndWhichConstraint<TAssertions, T>((TAssertions)this, typedSubject);
}
/// <summary>
/// Asserts that the object is of the <paramref name="expectedType"/>.
/// </summary>
/// <param name="expectedType">
/// The type that the subject is supposed to be.
/// </param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <exception cref="ArgumentNullException"><paramref name="expectedType"/> is <c>null</c>.</exception>
public AndConstraint<TAssertions> BeOfType(Type expectedType, string because = "", params object[] becauseArgs)
{
Guard.ThrowIfArgumentIsNull(expectedType, nameof(expectedType));
bool success = Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier("type")
.FailWith("Expected {context} to be {0}{reason}, but found <null>.", expectedType);
if (success)
{
Type subjectType = Subject.GetType();
if (expectedType.IsGenericTypeDefinition && subjectType.IsGenericType)
{
subjectType.GetGenericTypeDefinition().Should().Be(expectedType, because, becauseArgs);
}
else
{
subjectType.Should().Be(expectedType, because, becauseArgs);
}
}
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the object is not of the specified type <typeparamref name="T"/>.
/// </summary>
/// <typeparam name="T">The type that the subject is not supposed to be.</typeparam>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
public AndConstraint<TAssertions> NotBeOfType<T>(string because = "", params object[] becauseArgs)
{
NotBeOfType(typeof(T), because, becauseArgs);
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the object is not the <paramref name="unexpectedType"/>.
/// </summary>
/// <param name="unexpectedType">
/// The type that the subject is not supposed to be.
/// </param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <exception cref="ArgumentNullException"><paramref name="unexpectedType"/> is <c>null</c>.</exception>
public AndConstraint<TAssertions> NotBeOfType(Type unexpectedType, string because = "", params object[] becauseArgs)
{
Guard.ThrowIfArgumentIsNull(unexpectedType, nameof(unexpectedType));
bool success = Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier("type")
.FailWith("Expected {context} not to be {0}{reason}, but found <null>.", unexpectedType);
if (success)
{
Type subjectType = Subject.GetType();
if (unexpectedType.IsGenericTypeDefinition && subjectType.IsGenericType)
{
subjectType.GetGenericTypeDefinition().Should().NotBe(unexpectedType, because, becauseArgs);
}
else
{
subjectType.Should().NotBe(unexpectedType, because, becauseArgs);
}
}
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the object is assignable to a variable of type <typeparamref name="T"/>.
/// </summary>
/// <typeparam name="T">The type to which the object should be assignable to.</typeparam>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <returns>An <see cref="AndWhichConstraint{TAssertions, T}"/> which can be used to chain assertions.</returns>
public AndWhichConstraint<TAssertions, T> BeAssignableTo<T>(string because = "", params object[] becauseArgs)
{
bool success = Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier("type")
.FailWith("Expected {context} to be assignable to {0}{reason}, but found <null>.", typeof(T));
if (success)
{
Execute.Assertion
.ForCondition(Subject is T)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} to be assignable to {0}{reason}, but {1} is not.", typeof(T), Subject.GetType());
}
T typedSubject = (Subject is T type)
? type
: default;
return new AndWhichConstraint<TAssertions, T>((TAssertions)this, typedSubject);
}
/// <summary>
/// Asserts that the object is assignable to a variable of given <paramref name="type"/>.
/// </summary>
/// <param name="type">The type to which the object should be assignable to.</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because"/>.
/// </param>
/// <returns>An <see cref="AndConstraint{TAssertions}"/> which can be used to chain assertions.</returns>
/// <exception cref="ArgumentNullException"><paramref name="type"/> is <c>null</c>.</exception>
public AndConstraint<TAssertions> BeAssignableTo(Type type, string because = "", params object[] becauseArgs)
{
Guard.ThrowIfArgumentIsNull(type, nameof(type));
bool success = Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier("type")
.FailWith("Expected {context} to be assignable to {0}{reason}, but found <null>.", type);
if (success)
{
bool isAssignable = type.IsGenericTypeDefinition
? Subject.GetType().IsAssignableToOpenGeneric(type)
: type.IsAssignableFrom(Subject.GetType());
Execute.Assertion
.ForCondition(isAssignable)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} to be assignable to {0}{reason}, but {1} is not.",
type,
Subject.GetType());
}
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the object is not assignable to a variable of type <typeparamref name="T"/>.
/// </summary>
/// <typeparam name="T">The type to which the object should not be assignable to.</typeparam>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <returns>An <see cref="AndConstraint{TAssertions}"/> which can be used to chain assertions.</returns>
public AndConstraint<TAssertions> NotBeAssignableTo<T>(string because = "", params object[] becauseArgs)
{
return NotBeAssignableTo(typeof(T), because, becauseArgs);
}
/// <summary>
/// Asserts that the object is not assignable to a variable of given <paramref name="type"/>.
/// </summary>
/// <param name="type">The type to which the object should not be assignable to.</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because"/>.
/// </param>
/// <returns>An <see cref="AndConstraint{TAssertions}"/> which can be used to chain assertions.</returns>
/// <exception cref="ArgumentNullException"><paramref name="type"/> is <c>null</c>.</exception>
public AndConstraint<TAssertions> NotBeAssignableTo(Type type, string because = "", params object[] becauseArgs)
{
Guard.ThrowIfArgumentIsNull(type, nameof(type));
bool success = Execute.Assertion
.ForCondition(Subject is not null)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier("type")
.FailWith("Expected {context} to not be assignable to {0}{reason}, but found <null>.", type);
if (success)
{
bool isAssignable = type.IsGenericTypeDefinition
? Subject.GetType().IsAssignableToOpenGeneric(type)
: type.IsAssignableFrom(Subject.GetType());
Execute.Assertion
.ForCondition(!isAssignable)
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context} to not be assignable to {0}{reason}, but {1} is.", type, Subject.GetType());
}
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Asserts that the <paramref name="predicate" /> is satisfied.
/// </summary>
/// <param name="predicate">The predicate which must be satisfied by the <typeparamref name="TSubject" />.</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <returns>An <see cref="AndConstraint{T}" /> which can be used to chain assertions.</returns>
public AndConstraint<TAssertions> Match(Expression<Func<TSubject, bool>> predicate,
string because = "",
params object[] becauseArgs)
{
return Match<TSubject>(predicate, because, becauseArgs);
}
/// <summary>
/// Asserts that the <paramref name="predicate" /> is satisfied.
/// </summary>
/// <param name="predicate">The predicate which must be satisfied by the <typeparamref name="TSubject" />.</param>
/// <param name="because">
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </param>
/// <param name="becauseArgs">
/// Zero or more objects to format using the placeholders in <paramref name="because" />.
/// </param>
/// <returns>An <see cref="AndConstraint{T}" /> which can be used to chain assertions.</returns>
/// <exception cref="ArgumentNullException"><paramref name="predicate"/> is <c>null</c>.</exception>
public AndConstraint<TAssertions> Match<T>(Expression<Func<T, bool>> predicate,
string because = "",
params object[] becauseArgs)
where T : TSubject
{
Guard.ThrowIfArgumentIsNull(predicate, nameof(predicate), "Cannot match an object against a <null> predicate.");
Execute.Assertion
.ForCondition(predicate.Compile()((T)Subject))
.BecauseOf(because, becauseArgs)
.WithDefaultIdentifier(Identifier)
.FailWith("Expected {context:object} to match {1}{reason}, but found {0}.", Subject, predicate);
return new AndConstraint<TAssertions>((TAssertions)this);
}
/// <summary>
/// Returns the type of the subject the assertion applies on.
/// It should be a user-friendly name as it is included in the failure message.
/// </summary>
protected abstract string Identifier { get; }
/// <inheritdoc/>
public override bool Equals(object obj) =>
throw new NotSupportedException("Calling Equals on Assertion classes is not supported.");
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Security;
using Xunit;
[assembly: System.Reflection.CustomAttributesTests.Data.Attr(77, name = "AttrSimple")]
[assembly: System.Reflection.CustomAttributesTests.Data.Int32Attr(77, name = "Int32AttrSimple"),
System.Reflection.CustomAttributesTests.Data.Int64Attr((long)77, name = "Int64AttrSimple"),
System.Reflection.CustomAttributesTests.Data.StringAttr("hello", name = "StringAttrSimple"),
System.Reflection.CustomAttributesTests.Data.EnumAttr(System.Reflection.CustomAttributesTests.Data.MyColorEnum.RED, name = "EnumAttrSimple"),
System.Reflection.CustomAttributesTests.Data.TypeAttr(typeof(object), name = "TypeAttrSimple")]
[assembly: System.Runtime.CompilerServices.CompilationRelaxationsAttribute((int)8)]
[assembly: System.Diagnostics.Debuggable((System.Diagnostics.DebuggableAttribute.DebuggingModes)263)]
[assembly: System.CLSCompliant(false)]
namespace System.Reflection.Tests
{
public class AssemblyTests : FileCleanupTestBase
{
private string SourceTestAssemblyPath { get; } = Path.Combine(Environment.CurrentDirectory, "TestAssembly.dll");
private string DestTestAssemblyPath { get; }
private string LoadFromTestPath { get; }
public AssemblyTests()
{
// Assembly.Location not supported (properly) on uapaot.
DestTestAssemblyPath = Path.Combine(base.TestDirectory, "TestAssembly.dll");
LoadFromTestPath = Path.Combine(base.TestDirectory, "System.Runtime.Tests.dll");
// There is no dll to copy in ILC runs
if (!PlatformDetection.IsNetNative)
{
File.Copy(SourceTestAssemblyPath, DestTestAssemblyPath);
string currAssemblyPath = Path.Combine(Environment.CurrentDirectory, "System.Runtime.Tests.dll");
File.Copy(currAssemblyPath, LoadFromTestPath, true);
}
}
public static IEnumerable<object[]> Equality_TestData()
{
yield return new object[] { Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), typeof(AssemblyTests).Assembly, false };
}
[Theory]
[MemberData(nameof(Equality_TestData))]
public void Equality(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1 == assembly2);
Assert.NotEqual(expected, assembly1 != assembly2);
}
[Fact]
public void GetAssembly_Nullery()
{
AssertExtensions.Throws<ArgumentNullException>("type", () => Assembly.GetAssembly(null));
}
public static IEnumerable<object[]> GetAssembly_TestData()
{
yield return new object[] { Assembly.Load(new AssemblyName(typeof(HashSet<int>).GetTypeInfo().Assembly.FullName)), Assembly.GetAssembly(typeof(HashSet<int>)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), Assembly.GetAssembly(typeof(int)), true };
yield return new object[] { typeof(AssemblyTests).Assembly, Assembly.GetAssembly(typeof(AssemblyTests)), true };
}
[Theory]
[MemberData(nameof(GetAssembly_TestData))]
public void GetAssembly(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1.Equals(assembly2));
}
public static IEnumerable<object[]> GetCallingAssembly_TestData()
{
yield return new object[] { typeof(AssemblyTests).Assembly, GetGetCallingAssembly(), true };
yield return new object[] { Assembly.GetCallingAssembly(), GetGetCallingAssembly(), false };
}
[Theory]
[MemberData(nameof(GetCallingAssembly_TestData))]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "GetCallingAssembly() is not supported on UapAot")]
public void GetCallingAssembly(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1.Equals(assembly2));
}
[Fact]
public void GetExecutingAssembly()
{
Assert.True(typeof(AssemblyTests).Assembly.Equals(Assembly.GetExecutingAssembly()));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.GetSatelliteAssembly() not supported on UapAot")]
public void GetSatelliteAssemblyNeg()
{
Assert.Throws<ArgumentNullException>(() => (typeof(AssemblyTests).Assembly.GetSatelliteAssembly(null)));
Assert.Throws<System.IO.FileNotFoundException>(() => (typeof(AssemblyTests).Assembly.GetSatelliteAssembly(CultureInfo.InvariantCulture)));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.Load(String) not supported on UapAot")]
public void AssemblyLoadFromString()
{
AssemblyName an = typeof(AssemblyTests).Assembly.GetName();
string fullName = an.FullName;
string simpleName = an.Name;
Assembly a1 = Assembly.Load(fullName);
Assert.NotNull(a1);
Assert.Equal(fullName, a1.GetName().FullName);
Assembly a2 = Assembly.Load(simpleName);
Assert.NotNull(a2);
Assert.Equal(fullName, a2.GetName().FullName);
}
[Fact]
public void AssemblyLoadFromStringNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.Load((string)null));
AssertExtensions.Throws<ArgumentException>(null, () => Assembly.Load(string.Empty));
string emptyCName = new string('\0', 1);
AssertExtensions.Throws<ArgumentException>(null, () => Assembly.Load(emptyCName));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.Load(byte[]) not supported on UapAot")]
public void AssemblyLoadFromBytes()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
Assembly loadedAssembly = Assembly.Load(aBytes);
Assert.NotNull(loadedAssembly);
Assert.Equal(assembly.FullName, loadedAssembly.FullName);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.Load(byte[]) not supported on UapAot")]
public void AssemblyLoadFromBytesNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.Load((byte[])null));
Assert.Throws<BadImageFormatException>(() => Assembly.Load(new byte[0]));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.Load(byte[]) not supported on UapAot")]
public void AssemblyLoadFromBytesWithSymbols()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
byte[] symbols = System.IO.File.ReadAllBytes((System.IO.Path.ChangeExtension(assembly.Location, ".pdb")));
Assembly loadedAssembly = Assembly.Load(aBytes, symbols);
Assert.NotNull(loadedAssembly);
Assert.Equal(assembly.FullName, loadedAssembly.FullName);
}
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.ReflectionOnlyLoad() not supported on UapAot")]
public void AssemblyReflectionOnlyLoadFromString()
{
AssemblyName an = typeof(AssemblyTests).Assembly.GetName();
Assert.Throws<NotSupportedException>(() => Assembly.ReflectionOnlyLoad(an.FullName));
}
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.ReflectionOnlyLoad() not supported on UapAot")]
public void AssemblyReflectionOnlyLoadFromBytes()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
Assert.Throws<NotSupportedException>(() => Assembly.ReflectionOnlyLoad(aBytes));
}
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.ReflectionOnlyLoad() not supported on UapAot")]
public void AssemblyReflectionOnlyLoadFromNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.ReflectionOnlyLoad((string)null));
AssertExtensions.Throws<ArgumentException>(null, () => Assembly.ReflectionOnlyLoad(string.Empty));
Assert.Throws<ArgumentNullException>(() => Assembly.ReflectionOnlyLoad((byte[])null));
}
public static IEnumerable<object[]> GetModules_TestData()
{
yield return new object[] { LoadSystemCollectionsAssembly() };
yield return new object[] { LoadSystemReflectionAssembly() };
}
[Theory]
[MemberData(nameof(GetModules_TestData))]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.GetModules() is not supported on UapAot.")]
public void GetModules_GetModule(Assembly assembly)
{
Assert.NotEmpty(assembly.GetModules());
foreach (Module module in assembly.GetModules())
{
Assert.Equal(module, assembly.GetModule(module.ToString()));
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.GetLoadedModules() is not supported on UapAot.")]
public void GetLoadedModules()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
Assert.NotEmpty(assembly.GetLoadedModules());
foreach (Module module in assembly.GetLoadedModules())
{
Assert.NotNull(module);
Assert.Equal(module, assembly.GetModule(module.ToString()));
}
}
public static IEnumerable<object[]> CreateInstance_TestData()
{
yield return new object[] { typeof(AssemblyTests).Assembly, typeof(AssemblyPublicClass).FullName, BindingFlags.CreateInstance, typeof(AssemblyPublicClass) };
yield return new object[] { typeof(int).Assembly, typeof(int).FullName, BindingFlags.Default, typeof(int) };
yield return new object[] { typeof(int).Assembly, typeof(Dictionary<int, string>).FullName, BindingFlags.Default, typeof(Dictionary<int, string>) };
}
[Theory]
[MemberData(nameof(CreateInstance_TestData))]
public void CreateInstance(Assembly assembly, string typeName, BindingFlags bindingFlags, Type expectedType)
{
Assert.IsType(expectedType, assembly.CreateInstance(typeName, true, bindingFlags, null, null, null, null));
Assert.IsType(expectedType, assembly.CreateInstance(typeName, false, bindingFlags, null, null, null, null));
}
public static IEnumerable<object[]> CreateInstance_Invalid_TestData()
{
yield return new object[] { "", typeof(ArgumentException) };
yield return new object[] { null, typeof(ArgumentNullException) };
yield return new object[] { typeof(AssemblyClassWithPrivateCtor).FullName, typeof(MissingMethodException) };
}
[Theory]
[MemberData(nameof(CreateInstance_Invalid_TestData))]
public void CreateInstance_Invalid(string typeName, Type exceptionType)
{
Assembly assembly = typeof(AssemblyTests).Assembly;
Assert.Throws(exceptionType, () => assembly.CreateInstance(typeName, true, BindingFlags.Public, null, null, null, null));
Assert.Throws(exceptionType, () => assembly.CreateInstance(typeName, false, BindingFlags.Public, null, null, null, null));
}
[Fact]
public void GetManifestResourceStream()
{
Assert.NotNull(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "EmbeddedImage.png"));
Assert.NotNull(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "EmbeddedTextFile.txt"));
Assert.Null(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "IDontExist"));
}
[Fact]
public void Test_GlobalAssemblyCache()
{
Assert.False(typeof(AssemblyTests).Assembly.GlobalAssemblyCache);
}
[Fact]
public void Test_HostContext()
{
Assert.Equal(0, typeof(AssemblyTests).Assembly.HostContext);
}
[Fact]
public void Test_IsFullyTrusted()
{
Assert.True(typeof(AssemblyTests).Assembly.IsFullyTrusted);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "The full .NET Framework supports SecurityRuleSet")]
public void Test_SecurityRuleSet_Netcore()
{
Assert.Equal(SecurityRuleSet.None, typeof(AssemblyTests).Assembly.SecurityRuleSet);
}
[Fact]
[SkipOnTargetFramework(~TargetFrameworkMonikers.NetFramework, "SecurityRuleSet is ignored in .NET Core")]
public void Test_SecurityRuleSet_Netfx()
{
Assert.Equal(SecurityRuleSet.Level2, typeof(AssemblyTests).Assembly.SecurityRuleSet);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.LoadFile() not supported on UapAot")]
public void Test_LoadFile()
{
Assembly currentAssembly = typeof(AssemblyTests).Assembly;
const string RuntimeTestsDll = "System.Runtime.Tests.dll";
string fullRuntimeTestsPath = Path.GetFullPath(RuntimeTestsDll);
var loadedAssembly1 = Assembly.LoadFile(fullRuntimeTestsPath);
if (PlatformDetection.IsFullFramework)
{
Assert.Equal(currentAssembly, loadedAssembly1);
}
else
{
Assert.NotEqual(currentAssembly, loadedAssembly1);
}
string dir = Path.GetDirectoryName(fullRuntimeTestsPath);
fullRuntimeTestsPath = Path.Combine(dir, ".", RuntimeTestsDll);
Assembly loadedAssembly2 = Assembly.LoadFile(fullRuntimeTestsPath);
if (PlatformDetection.IsFullFramework)
{
Assert.NotEqual(loadedAssembly1, loadedAssembly2);
}
else
{
Assert.Equal(loadedAssembly1, loadedAssembly2);
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework | TargetFrameworkMonikers.Uap, "The full .NET Framework has a bug and throws a NullReferenceException")]
public void LoadFile_NullPath_Netcore_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("path", () => Assembly.LoadFile(null));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Assembly.LoadFile() not supported on UapAot")]
public void LoadFile_NoSuchPath_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("path", null, () => Assembly.LoadFile("System.Runtime.Tests.dll"));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework | TargetFrameworkMonikers.UapAot, "The full .NET Framework supports Assembly.LoadFrom")]
public void Test_LoadFromUsingHashValue_Netcore()
{
Assert.Throws<NotSupportedException>(() => Assembly.LoadFrom("abc", null, System.Configuration.Assemblies.AssemblyHashAlgorithm.SHA1));
}
[Fact]
[SkipOnTargetFramework(~TargetFrameworkMonikers.NetFramework, "The implementation of Assembly.LoadFrom is stubbed out in .NET Core")]
public void Test_LoadFromUsingHashValue_Netfx()
{
Assert.Throws<FileNotFoundException>(() => Assembly.LoadFrom("abc", null, System.Configuration.Assemblies.AssemblyHashAlgorithm.SHA1));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework | TargetFrameworkMonikers.UapAot, "The full .NET Framework supports more than one module per assembly")]
public void Test_LoadModule_Netcore()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
Assert.Throws<NotImplementedException>(() => assembly.LoadModule("abc", null));
Assert.Throws<NotImplementedException>(() => assembly.LoadModule("abc", null, null));
}
[Fact]
[SkipOnTargetFramework(~TargetFrameworkMonikers.NetFramework, "The coreclr doesn't support more than one module per assembly")]
public void Test_LoadModule_Netfx()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
AssertExtensions.Throws<ArgumentNullException>(null, () => assembly.LoadModule("abc", null));
AssertExtensions.Throws<ArgumentNullException>(null, () => assembly.LoadModule("abc", null, null));
}
#pragma warning disable 618
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFromWithPartialName() not supported on UapAot")]
public void Test_LoadWithPartialName()
{
string simplename = typeof(AssemblyTests).Assembly.GetName().Name;
var assem = Assembly.LoadWithPartialName(simplename);
Assert.Equal(typeof(AssemblyTests).Assembly, assem);
}
#pragma warning restore 618
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFrom() not supported on UapAot")]
public void LoadFrom_SamePath_ReturnsEqualAssemblies()
{
Assembly assembly1 = Assembly.LoadFrom(DestTestAssemblyPath);
Assembly assembly2 = Assembly.LoadFrom(DestTestAssemblyPath);
Assert.Equal(assembly1, assembly2);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFrom() not supported on UapAot")]
public void LoadFrom_SameIdentityAsAssemblyWithDifferentPath_ReturnsEqualAssemblies()
{
Assembly assembly1 = Assembly.LoadFrom(typeof(AssemblyTests).Assembly.Location);
Assert.Equal(assembly1, typeof(AssemblyTests).Assembly);
Assembly assembly2 = Assembly.LoadFrom(LoadFromTestPath);
if (PlatformDetection.IsFullFramework)
{
Assert.NotEqual(assembly1, assembly2);
}
else
{
Assert.Equal(assembly1, assembly2);
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFrom() not supported on UapAot")]
public void LoadFrom_NullAssemblyFile_ThrowsArgumentNullException()
{
AssertExtensions.Throws<ArgumentNullException>("assemblyFile", () => Assembly.LoadFrom(null));
AssertExtensions.Throws<ArgumentNullException>("assemblyFile", () => Assembly.UnsafeLoadFrom(null));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFrom() not supported on UapAot")]
public void LoadFrom_EmptyAssemblyFile_ThrowsArgumentException()
{
AssertExtensions.Throws<ArgumentException>("path", null, (() => Assembly.LoadFrom("")));
AssertExtensions.Throws<ArgumentException>("path", null, (() => Assembly.UnsafeLoadFrom("")));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.LoadFrom() not supported on UapAot")]
public void LoadFrom_NoSuchFile_ThrowsFileNotFoundException()
{
Assert.Throws<FileNotFoundException>(() => Assembly.LoadFrom("NoSuchPath"));
Assert.Throws<FileNotFoundException>(() => Assembly.UnsafeLoadFrom("NoSuchPath"));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.UnsafeLoadFrom() not supported on UapAot")]
public void UnsafeLoadFrom_SamePath_ReturnsEqualAssemblies()
{
Assembly assembly1 = Assembly.UnsafeLoadFrom(DestTestAssemblyPath);
Assembly assembly2 = Assembly.UnsafeLoadFrom(DestTestAssemblyPath);
Assert.Equal(assembly1, assembly2);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework | TargetFrameworkMonikers.UapAot, "The implementation of LoadFrom(string, byte[], AssemblyHashAlgorithm is not supported in .NET Core.")]
public void LoadFrom_WithHashValue_NetCoreCore_ThrowsNotSupportedException()
{
Assert.Throws<NotSupportedException>(() => Assembly.LoadFrom(DestTestAssemblyPath, new byte[0], Configuration.Assemblies.AssemblyHashAlgorithm.None));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.GetFile() not supported on UapAot")]
public void GetFile()
{
Assert.Throws<ArgumentNullException>(() => typeof(AssemblyTests).Assembly.GetFile(null));
AssertExtensions.Throws<ArgumentException>(null, () => typeof(AssemblyTests).Assembly.GetFile(""));
Assert.Null(typeof(AssemblyTests).Assembly.GetFile("NonExistentfile.dll"));
Assert.NotNull(typeof(AssemblyTests).Assembly.GetFile("System.Runtime.Tests.dll"));
Assert.Equal(typeof(AssemblyTests).Assembly.GetFile("System.Runtime.Tests.dll").Name, typeof(AssemblyTests).Assembly.Location);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.GetFiles() not supported on UapAot")]
public void GetFiles()
{
Assert.NotNull(typeof(AssemblyTests).Assembly.GetFiles());
Assert.Equal(typeof(AssemblyTests).Assembly.GetFiles().Length, 1);
Assert.Equal(typeof(AssemblyTests).Assembly.GetFiles()[0].Name, typeof(AssemblyTests).Assembly.Location);
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.UapAot, "Assembly.CodeBase not supported on UapAot")]
public void Load_AssemblyNameWithCodeBase()
{
AssemblyName an = typeof(AssemblyTests).Assembly.GetName();
Assert.NotNull(an.CodeBase);
Assembly a = Assembly.Load(an);
Assert.Equal(a, typeof(AssemblyTests).Assembly);
}
// Helpers
private static Assembly GetGetCallingAssembly()
{
return Assembly.GetCallingAssembly();
}
private static Assembly LoadSystemCollectionsAssembly()
{
// Force System.collections to be linked statically
List<int> li = new List<int>();
li.Add(1);
return Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName));
}
private static Assembly LoadSystemReflectionAssembly()
{
// Force System.Reflection to be linked statically
return Assembly.Load(new AssemblyName(typeof(AssemblyName).GetTypeInfo().Assembly.FullName));
}
public class AssemblyPublicClass
{
public class PublicNestedClass { }
}
private static class AssemblyPrivateClass { }
public class AssemblyClassWithPrivateCtor
{
private AssemblyClassWithPrivateCtor() { }
}
}
public class AssemblyCustomAttributeTest
{
[Fact]
public void Test_Int32AttrSimple()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Int32Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Int32Attr((Int32)77, name = \"Int32AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_Int64Attr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Int64Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Int64Attr((Int64)77, name = \"Int64AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_StringAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.StringAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.StringAttr(\"hello\", name = \"StringAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_EnumAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.EnumAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.EnumAttr((System.Reflection.CustomAttributesTests.Data.MyColorEnum)1, name = \"EnumAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_TypeAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.TypeAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.TypeAttr(typeof(System.Object), name = \"TypeAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_CompilationRelaxationsAttr()
{
bool result = false;
Type attrType = typeof(System.Runtime.CompilerServices.CompilationRelaxationsAttribute);
string attrstr = "[System.Runtime.CompilerServices.CompilationRelaxationsAttribute((Int32)8)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyIdentityAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyTitleAttribute);
string attrstr = "[System.Reflection.AssemblyTitleAttribute(\"System.Reflection.Tests\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyDescriptionAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyDescriptionAttribute);
string attrstr = "[System.Reflection.AssemblyDescriptionAttribute(\"System.Reflection.Tests\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyCompanyAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyCompanyAttribute);
string attrstr = "[System.Reflection.AssemblyCompanyAttribute(\"Microsoft Corporation\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_CLSCompliantAttribute()
{
bool result = false;
Type attrType = typeof(System.CLSCompliantAttribute);
string attrstr = "[System.CLSCompliantAttribute((Boolean)True)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_DebuggableAttribute()
{
bool result = false;
Type attrType = typeof(System.Diagnostics.DebuggableAttribute);
string attrstr = "[System.Diagnostics.DebuggableAttribute((System.Diagnostics.DebuggableAttribute+DebuggingModes)263)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_SimpleAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Attr((Int32)77, name = \"AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
private bool VerifyCustomAttribute(Type type, string attributeStr)
{
Assembly asm = typeof(AssemblyCustomAttributeTest).Assembly;
foreach (CustomAttributeData cad in asm.GetCustomAttributesData())
{
if (cad.AttributeType.Equals(type))
{
return true;
}
}
return false;
}
}
public class AssemblyTests_GetTYpe
{
[Fact]
public void AssemblyGetTypeNoQualifierAllowed()
{
Assembly a = typeof(G<int>).Assembly;
string s = typeof(G<int>).AssemblyQualifiedName;
AssertExtensions.Throws<ArgumentException>(null, () => a.GetType(s, throwOnError: true, ignoreCase: false));
}
[Fact]
public void AssemblyGetTypeDoesntSearchMscorlib()
{
Assembly a = typeof(AssemblyTests_GetTYpe).Assembly;
Assert.Throws<TypeLoadException>(() => a.GetType("System.Object", throwOnError: true, ignoreCase: false));
Assert.Throws<TypeLoadException>(() => a.GetType("G`1[[System.Object]]", throwOnError: true, ignoreCase: false));
}
[Fact]
public void AssemblyGetTypeDefaultsToItself()
{
Assembly a = typeof(AssemblyTests_GetTYpe).Assembly;
Type t = a.GetType("G`1[[G`1[[System.Int32, mscorlib]]]]", throwOnError: true, ignoreCase: false);
Assert.Equal(typeof(G<G<int>>), t);
}
}
}
internal class G<T> { }
| |
/******************************************************************************
* Copyright (C) Leap Motion, Inc. 2011-2017. *
* Leap Motion proprietary and confidential. *
* *
* Use subject to the terms of the Leap Motion SDK Agreement available at *
* https://developer.leapmotion.com/sdk_agreement, or another agreement *
* between Leap Motion and you, your company or other organization. *
******************************************************************************/
using Leap.Unity.Attributes;
using Leap.Unity.Interaction.Internal;
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.VR;
using Leap.Unity.Query;
using Leap.Unity.Space;
using UnityEngine.Serialization;
namespace Leap.Unity.Interaction {
[DisallowMultipleComponent]
public class InteractionVRController : InteractionController {
[Header("Controller Configuration")]
[Tooltip("Read-only. InteractionVRControllers use Unity's built-in VRNode tracking "
+ "API to receive tracking data for VR controllers by default. If you add a "
+ "custom script to set this controller's trackingProvider to something "
+ "other than the DefaultVRNodeTrackingProvider, the change will be reflected "
+ "here. (Hint: Use the ExecuteInEditMode attribute.)")]
[Disable, SerializeField]
#pragma warning disable 0414
private string _trackingProviderType = "DefaultVRNodeTrackingProvider";
#pragma warning restore 0414
public bool isUsingCustomTracking {
get { return !(trackingProvider is DefaultVRNodeTrackingProvider); }
}
[Tooltip("If this string is not empty and does not match a controller in Input.GetJoystickNames()"
+ ", then this game object will disable itself.")]
[SerializeField, EditTimeOnly]
[FormerlySerializedAs("_deviceString")]
private string _deviceJoystickTokens = "oculus touch right"; // or, e.g., "openvr controller right"
public string deviceJoystickTokens { get { return _deviceJoystickTokens; } }
[Tooltip("Which hand will hold this controller? This property cannot be changed "
+ "at runtime.")]
[SerializeField, EditTimeOnly]
private Chirality _chirality;
public Chirality chirality { get { return _chirality; } }
[Header("Hover Configuration")]
[Tooltip("This is the point used to determine the distance to objects for the "
+ "purposes of their 'hovered' state. Generally, it should be somewhere "
+ "between the tip of the controller and the controller's center of mass.")]
[SerializeField]
private Transform _hoverPoint;
[Tooltip("These points refine the hover point when determining distances to "
+ "interaction objects for evaluating which object should be the primary hover "
+ "of this interaction controller. An object's proximity to one of these "
+ "points is interpreted as the user's intention to interact specifically "
+ "with that object, and is important when building less accident-prone user "
+ "interfaces. For example, hands place their primary hover points on the "
+ "thumb, index finger, and middle finger by default. Controllers generally "
+ "should have a primary hover point at any tip of the controller you expect "
+ "users might use to hit a button. Warning: Each point costs distance checks "
+ "against nearby objects, so making this list large is costly!")]
[SerializeField]
public new List<Transform> primaryHoverPoints;
[Header("Grasping Configuration")]
[Tooltip("The point around which to check objects eligible for being grasped. Only "
+ "objects with an InteractionBehaviour component with ignoreGrasping disabled "
+ "are eligible for grasping. Upon attempting to grasp with a controller, the "
+ "object closest to the grasp point is chosen for grasping.")]
public Transform graspPoint;
public float maxGraspDistance = 0.06F;
[Tooltip("This string should match an Axis specified in Edit->Project Settings->"
+ "Input. This is the button to use to listen for grasping.")]
public string graspButtonAxis;
[Tooltip("The duration of time in seconds beyond initially pressing the grasp button "
+ "that the user can move the grasp point within range of a graspable "
+ "interaction object and still trigger a grasp. With a value of zero, objects "
+ "can only be grasped if they are already within the grasp distance of the "
+ "grasp point.")]
public float graspTimingSlop = 0.10F;
private bool _hasTrackedPositionLastFrame = false;
private Vector3 _trackedPositionLastFrame = Vector3.zero;
private Quaternion _trackedRotationLastFrame = Quaternion.identity;
private IVRControllerTrackingProvider _backingTrackingProvider = null;
public IVRControllerTrackingProvider trackingProvider {
get {
if (_backingDefaultTrackingProvider == null) {
_backingDefaultTrackingProvider = _defaultTrackingProvider;
}
return _backingDefaultTrackingProvider;
}
set {
if (_backingTrackingProvider != null) {
_backingTrackingProvider.OnTrackingDataUpdate -= refreshControllerTrackingData;
}
_backingTrackingProvider = value;
if (_backingTrackingProvider != null) {
_backingTrackingProvider.OnTrackingDataUpdate += refreshControllerTrackingData;
}
}
}
private IVRControllerTrackingProvider _backingDefaultTrackingProvider;
private IVRControllerTrackingProvider _defaultTrackingProvider {
get {
if (_backingDefaultTrackingProvider == null) {
refreshDefaultTrackingProvider();
}
return _backingDefaultTrackingProvider;
}
set {
_backingDefaultTrackingProvider = value;
}
}
private void refreshDefaultTrackingProvider() {
var defaultProvider = gameObject.GetComponent<DefaultVRNodeTrackingProvider>();
if (defaultProvider == null) {
defaultProvider = gameObject.AddComponent<DefaultVRNodeTrackingProvider>();
}
defaultProvider.vrNode = this.vrNode;
_defaultTrackingProvider = defaultProvider;
}
protected virtual void OnValidate() {
_trackingProviderType = trackingProvider.GetType().ToString();
}
protected virtual void Awake() {
if (deviceJoystickTokens.Length > 0) {
string[] joysticksConnected = Input.GetJoystickNames().Query().Select(s => s.ToLower()).ToArray();
string[] controllerSupportTokens = deviceJoystickTokens.ToLower().Split(" ".ToCharArray());
bool matchesController = joysticksConnected.Query()
.Any(joystick => controllerSupportTokens.Query()
.All(token => joystick.Contains(token)));
if (!matchesController) {
string message = "";
message += "No joystick name containing the tokens ";
for (int i = 0; i < controllerSupportTokens.Length; i++) {
message += "'" + controllerSupportTokens[i] + "'";
if (i < controllerSupportTokens.Length - 2) {
message += ", ";
}
else if (i == controllerSupportTokens.Length - 2) {
message += ", and ";
}
}
message += " was detected; disabling controller object. Please check the "
+ "device string if this was in error.";
Debug.Log(message, gameObject);
gameObject.SetActive(false);
}
}
}
protected override void Start() {
base.Start();
trackingProvider.OnTrackingDataUpdate += refreshControllerTrackingData;
}
protected override void Reset() {
base.Reset();
hoverEnabled = true;
contactEnabled = true;
graspingEnabled = true;
trackingProvider = _defaultTrackingProvider;
_hoverPoint = null;
primaryHoverPoints.Clear();
graspPoint = null;
maxGraspDistance = 0.06F;
graspTimingSlop = 0.1F;
}
protected override void fixedUpdateController() {
refreshContactBoneTargets();
}
private void refreshControllerTrackingData(Vector3 position, Quaternion rotation) {
refreshIsBeingMoved(position, rotation);
if (_hasTrackedPositionLastFrame) {
_trackedPositionLastFrame = this.transform.position;
_trackedRotationLastFrame = this.transform.rotation;
}
this.transform.position = position;
this.transform.rotation = rotation;
refreshContactBoneTargets();
if (!_hasTrackedPositionLastFrame) {
_hasTrackedPositionLastFrame = true;
_trackedPositionLastFrame = this.transform.position;
_trackedRotationLastFrame = this.transform.rotation;
}
}
private const float RIG_LOCAL_MOVEMENT_SPEED_THRESHOLD = 00.07F;
private const float RIG_LOCAL_ROTATION_SPEED_THRESHOLD = 10.00F;
private const float BEING_MOVED_TIMEOUT = 0.5F;
private float _lastTimeMoved = 0F;
private bool _isBeingMoved = false;
private void refreshIsBeingMoved(Vector3 position, Quaternion rotation) {
Transform baseTransform = Camera.main.transform.parent;
if (((baseTransform.InverseTransformPoint(position)
- baseTransform.InverseTransformPoint(_trackedPositionLastFrame)) / Time.fixedDeltaTime).magnitude > RIG_LOCAL_MOVEMENT_SPEED_THRESHOLD
|| Quaternion.Angle(baseTransform.InverseTransformRotation(rotation),
baseTransform.InverseTransformRotation(_trackedRotationLastFrame)) / Time.fixedDeltaTime > RIG_LOCAL_ROTATION_SPEED_THRESHOLD) {
_lastTimeMoved = Time.fixedTime;
}
_isBeingMoved = trackingProvider != null && trackingProvider.isTracked && Time.fixedTime - _lastTimeMoved < BEING_MOVED_TIMEOUT;
}
#region General InteractionController Implementation
/// <summary>
/// Gets whether or not the underlying controller is currently tracked.
/// </summary>
public override bool isTracked {
get {
return trackingProvider != null && trackingProvider.isTracked;
}
}
/// <summary>
/// Gets whether or not the underlying controller is currently being moved in worldspace.
/// </summary>
public override bool isBeingMoved {
get {
return _isBeingMoved;
}
}
/// <summary>
/// Gets the VRNode associated with this VR controller. Note: If the tracking mode
/// for this controller is specified as ControllerTrackingMode.Custom, this value
/// may be ignored.
/// </summary>
public VRNode vrNode {
get { return chirality == Chirality.Left ? VRNode.LeftHand : VRNode.RightHand; }
}
/// <summary>
/// Gets whether the controller is a left-hand controller.
/// </summary>
public override bool isLeft {
get { return chirality == Chirality.Left; }
}
/// <summary>
/// Gets the current velocity of the controller.
/// </summary>
public override Vector3 velocity {
get {
if (_hasTrackedPositionLastFrame) {
return (this.transform.position - _trackedPositionLastFrame) / Time.fixedDeltaTime;
}
else {
return Vector3.zero;
}
}
}
/// <summary>
/// Gets the type of controller this is. For InteractionVRController, the type is
/// always ControllerType.VRController.
/// </summary>
public override ControllerType controllerType {
get { return ControllerType.VRController; }
}
/// <summary>
/// This implementation of InteractionControllerBase does not represent a Leap hand,
/// so it need not return an InteractionHand object.
/// </summary>
public override InteractionHand intHand {
get { return null; }
}
/// <summary>
/// InteractionVRController doesn't need to do anything when an object is
/// unregistered.
/// </summary>
protected override void onObjectUnregistered(IInteractionBehaviour intObj) { }
#endregion
#region Hover Implementation
/// <summary>
/// Gets the center point used for hover distance checking.
/// </summary>
public override Vector3 hoverPoint {
get { return _hoverPoint == null ? Vector3.zero : _hoverPoint.position; }
}
/// <summary>
/// Gets the list of points to be used to perform higher-fidelity "primary hover"
/// checks. Only one interaction object may be the primary hover of an interaction
/// controller (Leap hand or otherwise) at a time. Interface objects such as buttons
/// can only be pressed when they are primarily hovered by an interaction controller,
/// so it's best to return points on whatever you expect to be able to use to push
/// buttons with the controller.
/// </summary>
protected override List<Transform> _primaryHoverPoints {
get { return primaryHoverPoints; }
}
private Vector3 _pivotingPositionOffset = Vector3.zero;
private Vector3 _unwarpingPositionOffset = Vector3.zero;
private Quaternion _unwarpingRotationOffset = Quaternion.identity;
protected override void unwarpColliders(Transform primaryHoverPoint, ISpaceComponent warpedSpaceElement) {
// Extension method calculates "unwarped" pose in world space.
Vector3 unwarpedPosition;
Quaternion unwarpedRotation;
warpedSpaceElement.anchor.transformer.WorldSpaceUnwarp(primaryHoverPoint.position,
primaryHoverPoint.rotation,
out unwarpedPosition,
out unwarpedRotation);
// Shift the controller to have its origin on the primary hover point so that
// rotations applied to the hand cause it to pivot around that point, then apply
// the position and rotation transformation.
_pivotingPositionOffset = -primaryHoverPoint.position;
_unwarpingPositionOffset = unwarpedPosition;
_unwarpingRotationOffset = unwarpedRotation * Quaternion.Inverse(primaryHoverPoint.rotation);
refreshContactBoneTargets(useUnwarpingData: true);
}
#endregion
#region Contact Implementation
private Vector3[] _contactBoneLocalPositions;
private Quaternion[] _contactBoneLocalRotations;
private Vector3[] _contactBoneTargetPositions;
private Quaternion[] _contactBoneTargetRotations;
private ContactBone[] _contactBones;
public override ContactBone[] contactBones {
get { return _contactBones; }
}
private GameObject _contactBoneParent;
protected override GameObject contactBoneParent {
get { return _contactBoneParent; }
}
protected override bool initContact() {
initContactBones();
if (_contactBoneParent == null) {
_contactBoneParent = new GameObject("VR Controller Contact Bones "
+ (isLeft ? "(Left)" : "(Right"));
}
foreach (var contactBone in _contactBones) {
contactBone.transform.parent = _contactBoneParent.transform;
}
return true;
}
private void refreshContactBoneTargets(bool useUnwarpingData = false) {
if (_wasContactInitialized) {
// Move the controller transform temporarily into its "unwarped space" pose
// (only if we are using the controller in a curved space)
if (useUnwarpingData) {
moveControllerTransform(_pivotingPositionOffset, Quaternion.identity);
moveControllerTransform(_unwarpingPositionOffset, _unwarpingRotationOffset);
}
for (int i = 0; i < _contactBones.Length; i++) {
_contactBoneTargetPositions[i]
= this.transform.TransformPoint(_contactBoneLocalPositions[i]);
_contactBoneTargetRotations[i]
= this.transform.TransformRotation(_contactBoneLocalRotations[i]);
}
// Move the controller transform back to its original pose.
if (useUnwarpingData) {
moveControllerTransform(-_unwarpingPositionOffset, Quaternion.Inverse(_unwarpingRotationOffset));
moveControllerTransform(-_pivotingPositionOffset, Quaternion.identity);
}
}
}
private void moveControllerTransform(Vector3 deltaPosition, Quaternion deltaRotation) {
this.transform.rotation = deltaRotation * this.transform.rotation;
this.transform.position = deltaPosition + this.transform.position;
}
private List<ContactBone> _contactBoneBuffer = new List<ContactBone>();
private List<Collider> _colliderBuffer = new List<Collider>();
private void initContactBones() {
_colliderBuffer.Clear();
_contactBoneBuffer.Clear();
// Scan for existing colliders and construct contact bones out of them.
Utils.FindColliders<Collider>(this.gameObject, _colliderBuffer);
foreach (var collider in _colliderBuffer) {
ContactBone contactBone = collider.gameObject.AddComponent<ContactBone>();
Rigidbody body = collider.gameObject.GetComponent<Rigidbody>();
if (body == null) {
body = collider.gameObject.AddComponent<Rigidbody>();
}
body.freezeRotation = true;
body.useGravity = false;
body.collisionDetectionMode = CollisionDetectionMode.ContinuousDynamic;
body.mass = 1F;
contactBone.interactionController = this;
contactBone.rigidbody = body;
contactBone.collider = collider;
_contactBoneBuffer.Add(contactBone);
}
int numBones = _colliderBuffer.Count;
_contactBones = new ContactBone[numBones];
_contactBoneLocalPositions = new Vector3[numBones];
_contactBoneLocalRotations = new Quaternion[numBones];
_contactBoneTargetPositions = new Vector3[numBones];
_contactBoneTargetRotations = new Quaternion[numBones];
for (int i = 0; i < numBones; i++) {
_contactBones[i] = _contactBoneBuffer[i];
_contactBoneLocalPositions[i]
= _contactBoneTargetPositions[i]
= this.transform.InverseTransformPoint(_contactBones[i].transform.position);
_contactBoneLocalRotations[i]
= _contactBoneTargetRotations[i]
= this.transform.InverseTransformRotation(_contactBones[i].transform.rotation);
}
}
protected override void getColliderBoneTargetPositionRotation(int contactBoneIndex,
out Vector3 targetPosition,
out Quaternion targetRotation) {
targetPosition = _contactBoneTargetPositions[contactBoneIndex];
targetRotation = _contactBoneTargetRotations[contactBoneIndex];
}
#endregion
#region Grasping Implementation
/// <summary>
/// By default, InteractionVRController uses Input.GetAxis(graspButtonAxis) to
/// determine the "depression" state for the grasp button. By setting this value to
/// something other than null, it is possible to modify this behavior to instead
/// retrieve a grasping axis value based on arbitrary code.
///
/// A grasp is attempted when the grasp button axis value returned by this method
/// becomes larger than the graspButtonDepressedValue, and a grasp is released when
/// the grasp button axis value returned by this method becomes smaller than the
/// graspButtonReleasedValue. Both of these values provide public setters.
/// </summary>
public Func<float> graspAxisOverride = null;
private float _graspDepressedValue = 0.8F;
/// <summary>
/// The value between 0 and 1 past which the grasping axis value will cause an
/// attempt to grasp a graspable interaction object near the grasp point.
/// </summary>
public float graspDepressedValue {
get { return _graspDepressedValue; }
set { _graspDepressedValue = value; }
}
private float _graspReleasedValue = 0.7F;
/// <summary>
/// If the grasping axis value passes the graspDepressedValue, it must then drop
/// underneath this value in order to release the grasp attempt (potentially
/// releasing a held object) and allow a new grasp attempt to occur.
/// </summary>
public float graspReleasedValue {
get { return _graspReleasedValue; }
set { _graspReleasedValue = value; }
}
private List<Vector3> _graspManipulatorPointsBuffer = new List<Vector3>();
/// <summary>
/// Gets a list returning this controller's hoverPoint. Because the
/// InteractionVRController represents a rigid controller, any two points that
/// rigidly move with the controller position and orientation will provide enough
/// information.
/// </summary>
public override List<Vector3> graspManipulatorPoints {
get {
_graspManipulatorPointsBuffer.Clear();
_graspManipulatorPointsBuffer.Add(hoverPoint);
_graspManipulatorPointsBuffer.Add(hoverPoint + this.transform.rotation * Vector3.forward * 0.05F);
_graspManipulatorPointsBuffer.Add(hoverPoint + this.transform.rotation * Vector3.right * 0.05F);
return _graspManipulatorPointsBuffer;
}
}
private IInteractionBehaviour _closestGraspableObject = null;
private bool _graspButtonLastFrame = false;
private bool _graspButtonDown = false;
private bool _graspButtonUp = false;
private float _graspButtonDownSlopTimer = 0F;
public override Vector3 GetGraspPoint() {
return graspPoint.transform.position;
}
protected override void fixedUpdateGraspingState() {
refreshClosestGraspableObject();
fixedUpdateGraspButtonState();
}
private void refreshClosestGraspableObject() {
_closestGraspableObject = null;
float closestGraspableDistance = float.PositiveInfinity;
foreach (var intObj in graspCandidates) {
float testDist = intObj.GetHoverDistance(this.graspPoint.position);
if (testDist < maxGraspDistance && testDist < closestGraspableDistance) {
_closestGraspableObject = intObj;
closestGraspableDistance = testDist;
}
}
}
private void fixedUpdateGraspButtonState() {
_graspButtonDown = false;
_graspButtonUp = false;
bool graspButton = _graspButtonLastFrame;
if (!_graspButtonLastFrame) {
if (graspAxisOverride == null) {
try {
graspButton = Input.GetAxis(graspButtonAxis) > graspDepressedValue;
} catch {
Debug.LogError("INPUT AXIS NOT SET UP. Go to your Input Manager and add a definition for " + graspButtonAxis + " on the " + (isLeft ? "9" : "10") + "th Joystick Axis.");
graspButton = Input.GetKey(isLeft ? KeyCode.JoystickButton14: KeyCode.JoystickButton15);
}
}
else {
graspButton = graspAxisOverride() > graspDepressedValue;
}
if (graspButton) {
// Grasp button was _just_ depressed this frame.
_graspButtonDown = true;
_graspButtonDownSlopTimer = graspTimingSlop;
}
}
else {
if (graspReleasedValue > graspDepressedValue) {
Debug.LogWarning("The graspReleasedValue should be less than or equal to the "
+ "graspDepressedValue!", this);
graspReleasedValue = graspDepressedValue;
}
if (graspAxisOverride == null) {
try {
graspButton = Input.GetAxis(graspButtonAxis) > graspDepressedValue;
} catch {
Debug.LogError("INPUT AXIS NOT SET UP. Go to your Input Manager and add a definition for " + graspButtonAxis + " on the " + (isLeft ? "9" : "10") + "th Joystick Axis.");
graspButton = Input.GetKey(isLeft ? KeyCode.JoystickButton14 : KeyCode.JoystickButton15);
}
}
else {
graspButton = graspAxisOverride() > graspReleasedValue;
}
if (!graspButton) {
// Grasp button was _just_ released this frame.
_graspButtonUp = true;
_graspButtonDownSlopTimer = 0F;
}
}
if (_graspButtonDownSlopTimer > 0F) {
_graspButtonDownSlopTimer -= Time.fixedDeltaTime;
}
_graspButtonLastFrame = graspButton;
}
protected override bool checkShouldGrasp(out IInteractionBehaviour objectToGrasp) {
bool shouldGrasp = !isGraspingObject
&& (_graspButtonDown || _graspButtonDownSlopTimer > 0F)
&& _closestGraspableObject != null;
objectToGrasp = null;
if (shouldGrasp) { objectToGrasp = _closestGraspableObject; }
return shouldGrasp;
}
protected override bool checkShouldRelease(out IInteractionBehaviour objectToRelease) {
bool shouldRelease = _graspButtonUp && isGraspingObject;
objectToRelease = null;
if (shouldRelease) { objectToRelease = graspedObject; }
return shouldRelease;
}
#endregion
#region Gizmos
public override void OnDrawRuntimeGizmos(RuntimeGizmos.RuntimeGizmoDrawer drawer) {
base.OnDrawRuntimeGizmos(drawer);
// Grasp Point
float graspAmount = 0F;
if (graspAxisOverride != null) graspAmount = graspAxisOverride();
else {
try {
graspAmount = Input.GetAxis(graspButtonAxis);
}
catch (ArgumentException) { }
}
drawer.color = Color.Lerp(GizmoColors.GraspPoint, Color.white, graspAmount);
drawer.DrawWireSphere(GetGraspPoint(), maxGraspDistance);
// Nearest graspable object
if (_closestGraspableObject != null) {
drawer.color = Color.Lerp(GizmoColors.Graspable, Color.white, Mathf.Sin(Time.time * 2 * Mathf.PI * 2F));
drawer.DrawWireSphere(_closestGraspableObject.rigidbody.position, maxGraspDistance * 0.75F);
}
}
#endregion
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.WindowsAzure.Management.RemoteApp;
using Microsoft.WindowsAzure.Management.RemoteApp.Models;
using Newtonsoft.Json.Linq;
namespace Microsoft.WindowsAzure.Management.RemoteApp
{
/// <summary>
/// RemoteApp virtual network operations.
/// </summary>
internal partial class VNetOperations : IServiceOperations<RemoteAppManagementClient>, IVNetOperations
{
/// <summary>
/// Initializes a new instance of the VNetOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal VNetOperations(RemoteAppManagementClient client)
{
this._client = client;
}
private RemoteAppManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.WindowsAzure.Management.RemoteApp.RemoteAppManagementClient.
/// </summary>
public RemoteAppManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates or updates a virtual network.
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='vNetDetails'>
/// Required. Details about the virtual network to create or update.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The response containing the operation tracking id.
/// </returns>
public async Task<OperationResultWithTrackingId> CreateOrUpdateAsync(string vNetName, VNetParameter vNetDetails, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
if (vNetDetails == null)
{
throw new ArgumentNullException("vNetDetails");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
tracingParameters.Add("vNetDetails", vNetDetails);
TracingAdapter.Enter(invocationId, this, "CreateOrUpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject vNetParameterValue = new JObject();
requestDoc = vNetParameterValue;
if (vNetDetails.Region != null)
{
vNetParameterValue["Region"] = vNetDetails.Region;
}
if (vNetDetails.VnetAddressSpaces != null)
{
JArray vnetAddressSpacesArray = new JArray();
foreach (string vnetAddressSpacesItem in vNetDetails.VnetAddressSpaces)
{
vnetAddressSpacesArray.Add(vnetAddressSpacesItem);
}
vNetParameterValue["VnetAddressSpaces"] = vnetAddressSpacesArray;
}
if (vNetDetails.GatewaySubnet != null)
{
vNetParameterValue["GatewaySubnet"] = vNetDetails.GatewaySubnet;
}
if (vNetDetails.LocalAddressSpaces != null)
{
JArray localAddressSpacesArray = new JArray();
foreach (string localAddressSpacesItem in vNetDetails.LocalAddressSpaces)
{
localAddressSpacesArray.Add(localAddressSpacesItem);
}
vNetParameterValue["LocalAddressSpaces"] = localAddressSpacesArray;
}
if (vNetDetails.VpnAddress != null)
{
vNetParameterValue["VpnAddress"] = vNetDetails.VpnAddress;
}
if (vNetDetails.DnsServers != null)
{
JArray dnsServersArray = new JArray();
foreach (string dnsServersItem in vNetDetails.DnsServers)
{
dnsServersArray.Add(dnsServersItem);
}
vNetParameterValue["DnsServers"] = dnsServersArray;
}
if (vNetDetails.GatewayIp != null)
{
vNetParameterValue["GatewayIp"] = vNetDetails.GatewayIp;
}
vNetParameterValue["GatewayType"] = ((int)vNetDetails.GatewayType);
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResultWithTrackingId result = null;
// Deserialize Response
result = new OperationResultWithTrackingId();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (httpResponse.Headers.Contains("x-remoteapp-operation-tracking-id"))
{
result.TrackingId = httpResponse.Headers.GetValues("x-remoteapp-operation-tracking-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Deletes the specified RemoteApp virtual network
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The response containing the operation tracking id.
/// </returns>
public async Task<OperationResultWithTrackingId> DeleteAsync(string vNetName, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResultWithTrackingId result = null;
// Deserialize Response
result = new OperationResultWithTrackingId();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-remoteapp-operation-tracking-id"))
{
result.TrackingId = httpResponse.Headers.GetValues("x-remoteapp-operation-tracking-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets the specified virtual network's configuration.
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='includeSharedKey'>
/// Required. Set to true to get back the shared key used to configure
/// the VPN connection
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// RemoteApp virtual network information.
/// </returns>
public async Task<VNetResult> GetAsync(string vNetName, bool includeSharedKey, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
tracingParameters.Add("includeSharedKey", includeSharedKey);
TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
List<string> queryParameters = new List<string>();
queryParameters.Add("includeKey=" + Uri.EscapeDataString(includeSharedKey.ToString().ToLower()));
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
VNetResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new VNetResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
VNet vNetInstance = new VNet();
result.VNet = vNetInstance;
JToken nameValue = responseDoc["Name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
vNetInstance.Name = nameInstance;
}
JToken stateValue = responseDoc["State"];
if (stateValue != null && stateValue.Type != JTokenType.Null)
{
VNetState stateInstance = ((VNetState)(((int)stateValue)));
vNetInstance.State = stateInstance;
}
JToken stateInfoValue = responseDoc["StateInfo"];
if (stateInfoValue != null && stateInfoValue.Type != JTokenType.Null)
{
string stateInfoInstance = ((string)stateInfoValue);
vNetInstance.StateInfo = stateInfoInstance;
}
JToken sharedKeyValue = responseDoc["SharedKey"];
if (sharedKeyValue != null && sharedKeyValue.Type != JTokenType.Null)
{
string sharedKeyInstance = ((string)sharedKeyValue);
vNetInstance.SharedKey = sharedKeyInstance;
}
JToken upgradeInProgressValue = responseDoc["UpgradeInProgress"];
if (upgradeInProgressValue != null && upgradeInProgressValue.Type != JTokenType.Null)
{
bool upgradeInProgressInstance = ((bool)upgradeInProgressValue);
vNetInstance.UpgradeInProgress = upgradeInProgressInstance;
}
JToken regionValue = responseDoc["Region"];
if (regionValue != null && regionValue.Type != JTokenType.Null)
{
string regionInstance = ((string)regionValue);
vNetInstance.Region = regionInstance;
}
JToken vnetAddressSpacesArray = responseDoc["VnetAddressSpaces"];
if (vnetAddressSpacesArray != null && vnetAddressSpacesArray.Type != JTokenType.Null)
{
foreach (JToken vnetAddressSpacesValue in ((JArray)vnetAddressSpacesArray))
{
vNetInstance.VnetAddressSpaces.Add(((string)vnetAddressSpacesValue));
}
}
JToken gatewaySubnetValue = responseDoc["GatewaySubnet"];
if (gatewaySubnetValue != null && gatewaySubnetValue.Type != JTokenType.Null)
{
string gatewaySubnetInstance = ((string)gatewaySubnetValue);
vNetInstance.GatewaySubnet = gatewaySubnetInstance;
}
JToken localAddressSpacesArray = responseDoc["LocalAddressSpaces"];
if (localAddressSpacesArray != null && localAddressSpacesArray.Type != JTokenType.Null)
{
foreach (JToken localAddressSpacesValue in ((JArray)localAddressSpacesArray))
{
vNetInstance.LocalAddressSpaces.Add(((string)localAddressSpacesValue));
}
}
JToken vpnAddressValue = responseDoc["VpnAddress"];
if (vpnAddressValue != null && vpnAddressValue.Type != JTokenType.Null)
{
string vpnAddressInstance = ((string)vpnAddressValue);
vNetInstance.VpnAddress = vpnAddressInstance;
}
JToken dnsServersArray = responseDoc["DnsServers"];
if (dnsServersArray != null && dnsServersArray.Type != JTokenType.Null)
{
foreach (JToken dnsServersValue in ((JArray)dnsServersArray))
{
vNetInstance.DnsServers.Add(((string)dnsServersValue));
}
}
JToken gatewayIpValue = responseDoc["GatewayIp"];
if (gatewayIpValue != null && gatewayIpValue.Type != JTokenType.Null)
{
string gatewayIpInstance = ((string)gatewayIpValue);
vNetInstance.GatewayIp = gatewayIpInstance;
}
JToken gatewayTypeValue = responseDoc["GatewayType"];
if (gatewayTypeValue != null && gatewayTypeValue.Type != JTokenType.Null)
{
GatewayType gatewayTypeInstance = ((GatewayType)(((int)gatewayTypeValue)));
vNetInstance.GatewayType = gatewayTypeInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets the status of the call from ResetVpnSharedKey.
/// </summary>
/// <param name='trackingId'>
/// Required. Tracking ID returned by ResetVpnSharedKey
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Current state of a long running operation.
/// </returns>
public async Task<VNetOperationStatusResult> GetResetVpnSharedKeyOperationStatusAsync(string trackingId, CancellationToken cancellationToken)
{
// Validate
if (trackingId == null)
{
throw new ArgumentNullException("trackingId");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("trackingId", trackingId);
TracingAdapter.Enter(invocationId, this, "GetResetVpnSharedKeyOperationStatusAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/operationStatus/";
url = url + Uri.EscapeDataString(trackingId);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
VNetOperationStatusResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new VNetOperationStatusResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken statusValue = responseDoc["Status"];
if (statusValue != null && statusValue.Type != JTokenType.Null)
{
VNetOperationStatus statusInstance = ((VNetOperationStatus)(((int)statusValue)));
result.Status = statusInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets a configuration script to configure the VPN deviceto connect
/// to the given virtual network. Run this script on the VPN device.
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='vendor'>
/// Optional. Vendor
/// </param>
/// <param name='vpnDevice'>
/// Optional. Model of the VPN
/// </param>
/// <param name='osFamily'>
/// Optional. OS Family
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Script that can be used to set up a VPN device.
/// </returns>
public async Task<VNetConfigScriptResult> GetVpnDeviceConfigScriptAsync(string vNetName, string vendor, string vpnDevice, string osFamily, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
tracingParameters.Add("vendor", vendor);
tracingParameters.Add("vpnDevice", vpnDevice);
tracingParameters.Add("osFamily", osFamily);
TracingAdapter.Enter(invocationId, this, "GetVpnDeviceConfigScriptAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
url = url + "/vpnscript";
List<string> queryParameters = new List<string>();
if (vendor != null)
{
queryParameters.Add("vendor=" + Uri.EscapeDataString(vendor));
}
if (vpnDevice != null)
{
queryParameters.Add("platform=" + Uri.EscapeDataString(vpnDevice));
}
if (osFamily != null)
{
queryParameters.Add("osFamily=" + Uri.EscapeDataString(osFamily));
}
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
VNetConfigScriptResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new VNetConfigScriptResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
string configScriptInstance = ((string)responseDoc);
result.ConfigScript = configScriptInstance;
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets details of supported VPN devices
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The VPN device information.
/// </returns>
public async Task<VNetVpnDeviceResult> GetVpnDevicesAsync(string vNetName, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
TracingAdapter.Enter(invocationId, this, "GetVpnDevicesAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
url = url + "/vpndevices";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
VNetVpnDeviceResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new VNetVpnDeviceResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken vendorsArray = responseDoc["Vendors"];
if (vendorsArray != null && vendorsArray.Type != JTokenType.Null)
{
foreach (JToken vendorsValue in ((JArray)vendorsArray))
{
Vendor vendorInstance = new Vendor();
result.Vendors.Add(vendorInstance);
JToken nameValue = vendorsValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
vendorInstance.Name = nameInstance;
}
JToken platformsArray = vendorsValue["Platforms"];
if (platformsArray != null && platformsArray.Type != JTokenType.Null)
{
foreach (JToken platformsValue in ((JArray)platformsArray))
{
Platform platformInstance = new Platform();
vendorInstance.Platforms.Add(platformInstance);
JToken nameValue2 = platformsValue["name"];
if (nameValue2 != null && nameValue2.Type != JTokenType.Null)
{
string nameInstance2 = ((string)nameValue2);
platformInstance.Name = nameInstance2;
}
JToken osFamiliesArray = platformsValue["OsFamilies"];
if (osFamiliesArray != null && osFamiliesArray.Type != JTokenType.Null)
{
foreach (JToken osFamiliesValue in ((JArray)osFamiliesArray))
{
OsFamily osFamilyInstance = new OsFamily();
platformInstance.OsFamilies.Add(osFamilyInstance);
JToken nameValue3 = osFamiliesValue["name"];
if (nameValue3 != null && nameValue3.Type != JTokenType.Null)
{
string nameInstance3 = ((string)nameValue3);
osFamilyInstance.Name = nameInstance3;
}
}
}
}
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets a list of virtual network configurations.
/// </summary>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// List of virtual networks.
/// </returns>
public async Task<VNetListResult> ListAsync(CancellationToken cancellationToken)
{
// Validate
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
VNetListResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new VNetListResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken vNetListArray = responseDoc;
if (vNetListArray != null && vNetListArray.Type != JTokenType.Null)
{
foreach (JToken vNetListValue in ((JArray)vNetListArray))
{
VNet vNetInstance = new VNet();
result.VNetList.Add(vNetInstance);
JToken nameValue = vNetListValue["Name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
vNetInstance.Name = nameInstance;
}
JToken stateValue = vNetListValue["State"];
if (stateValue != null && stateValue.Type != JTokenType.Null)
{
VNetState stateInstance = ((VNetState)(((int)stateValue)));
vNetInstance.State = stateInstance;
}
JToken stateInfoValue = vNetListValue["StateInfo"];
if (stateInfoValue != null && stateInfoValue.Type != JTokenType.Null)
{
string stateInfoInstance = ((string)stateInfoValue);
vNetInstance.StateInfo = stateInfoInstance;
}
JToken sharedKeyValue = vNetListValue["SharedKey"];
if (sharedKeyValue != null && sharedKeyValue.Type != JTokenType.Null)
{
string sharedKeyInstance = ((string)sharedKeyValue);
vNetInstance.SharedKey = sharedKeyInstance;
}
JToken upgradeInProgressValue = vNetListValue["UpgradeInProgress"];
if (upgradeInProgressValue != null && upgradeInProgressValue.Type != JTokenType.Null)
{
bool upgradeInProgressInstance = ((bool)upgradeInProgressValue);
vNetInstance.UpgradeInProgress = upgradeInProgressInstance;
}
JToken regionValue = vNetListValue["Region"];
if (regionValue != null && regionValue.Type != JTokenType.Null)
{
string regionInstance = ((string)regionValue);
vNetInstance.Region = regionInstance;
}
JToken vnetAddressSpacesArray = vNetListValue["VnetAddressSpaces"];
if (vnetAddressSpacesArray != null && vnetAddressSpacesArray.Type != JTokenType.Null)
{
foreach (JToken vnetAddressSpacesValue in ((JArray)vnetAddressSpacesArray))
{
vNetInstance.VnetAddressSpaces.Add(((string)vnetAddressSpacesValue));
}
}
JToken gatewaySubnetValue = vNetListValue["GatewaySubnet"];
if (gatewaySubnetValue != null && gatewaySubnetValue.Type != JTokenType.Null)
{
string gatewaySubnetInstance = ((string)gatewaySubnetValue);
vNetInstance.GatewaySubnet = gatewaySubnetInstance;
}
JToken localAddressSpacesArray = vNetListValue["LocalAddressSpaces"];
if (localAddressSpacesArray != null && localAddressSpacesArray.Type != JTokenType.Null)
{
foreach (JToken localAddressSpacesValue in ((JArray)localAddressSpacesArray))
{
vNetInstance.LocalAddressSpaces.Add(((string)localAddressSpacesValue));
}
}
JToken vpnAddressValue = vNetListValue["VpnAddress"];
if (vpnAddressValue != null && vpnAddressValue.Type != JTokenType.Null)
{
string vpnAddressInstance = ((string)vpnAddressValue);
vNetInstance.VpnAddress = vpnAddressInstance;
}
JToken dnsServersArray = vNetListValue["DnsServers"];
if (dnsServersArray != null && dnsServersArray.Type != JTokenType.Null)
{
foreach (JToken dnsServersValue in ((JArray)dnsServersArray))
{
vNetInstance.DnsServers.Add(((string)dnsServersValue));
}
}
JToken gatewayIpValue = vNetListValue["GatewayIp"];
if (gatewayIpValue != null && gatewayIpValue.Type != JTokenType.Null)
{
string gatewayIpInstance = ((string)gatewayIpValue);
vNetInstance.GatewayIp = gatewayIpInstance;
}
JToken gatewayTypeValue = vNetListValue["GatewayType"];
if (gatewayTypeValue != null && gatewayTypeValue.Type != JTokenType.Null)
{
GatewayType gatewayTypeInstance = ((GatewayType)(((int)gatewayTypeValue)));
vNetInstance.GatewayType = gatewayTypeInstance;
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Posts a request to asynchronously generate a new shared key for the
/// specified virtual network and returns a tracking ID for the
/// operation.Use GetResetVpnSharedKeyOperationStatus with this
/// tracking ID to determine when the operation has completed. When
/// the status returns Success,call Get passing the virtual network
/// name and true for the includeSharedKey parameter. This will return
/// a VNetResult containing the new key.Call GetVpnConfigScript to get
/// back the configuration script to configure the VPN device. Run
/// this script using the new key on the VPN device.
/// </summary>
/// <param name='vNetName'>
/// Required. RemoteApp virtual network name.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The response containing the operation tracking id.
/// </returns>
public async Task<OperationResultWithTrackingId> ResetVpnSharedKeyAsync(string vNetName, CancellationToken cancellationToken)
{
// Validate
if (vNetName == null)
{
throw new ArgumentNullException("vNetName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("vNetName", vNetName);
TracingAdapter.Enter(invocationId, this, "ResetVpnSharedKeyAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/services/";
if (this.Client.RdfeNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.RdfeNamespace);
}
url = url + "/vnets/";
url = url + Uri.EscapeDataString(vNetName);
url = url + "/resetsharedkey";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-09-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Post;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept", "application/json; charset=utf-8");
httpRequest.Headers.Add("x-ms-version", "2014-08-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResultWithTrackingId result = null;
// Deserialize Response
result = new OperationResultWithTrackingId();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (httpResponse.Headers.Contains("x-remoteapp-operation-tracking-id"))
{
result.TrackingId = httpResponse.Headers.GetValues("x-remoteapp-operation-tracking-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
using System;
using System.ComponentModel;
using EmergeTk.Model;
namespace EmergeTk.Widgets.Html
{
[DefaultProperty("Text")]
public class TextBox : Widget, IDataBindable
{
//isPassword, isDisabled, rows, cols
private const string UpdateFormat = "{0}.SetText('{1}');";
private bool isPassword = false, isDisabled = false, isRich = false,
isCodeView = false, isInline = false, autoUpdate = true, saveOnChange = false;
private int rows = 1, cols = 0;
private string text = "", currentTextOnClient;
private string helpText;
public string Text
{
get
{
return text;
}
set
{
if (text != value)
{
//log.Debug( "setting text", text, value );
text = value;
if (this.rendered && text != currentTextOnClient)
InvokeClientMethod("SetText", Util.ToJavaScriptString(value));
currentTextOnClient = text;
ClientArguments["defaultValue"] = Util.Quotize(Util.FormatForClient(value));
RaisePropertyChangedNotification("Text");
}
}
}
public bool IsRich
{
get
{
return isRich;
}
set
{
isRich = value;
RaisePropertyChangedNotification("IsRich");
SetClientAttribute("isRich", isRich ? "1" : "0");
}
}
public bool IsInline
{
get
{
return isInline;
}
set
{
isInline = value;
SetClientAttribute("isInline", isInline ? "1" : "0");
RaisePropertyChangedNotification("IsInline");
SetClientAttribute("autoUpdate", autoUpdate ? "1" : "0");
}
}
public bool SaveOnChange
{
get { return saveOnChange; }
set { saveOnChange = value; }
}
public bool AutoUpdate
{
get
{
return autoUpdate;
}
set
{
autoUpdate = value;
SetClientAttribute("autoUpdate", autoUpdate ? "1" : "0");
RaisePropertyChangedNotification("AutoUpdate");
}
}
public bool IsDisabled
{
get
{
return isDisabled;
}
set
{
isDisabled = value;
ClientArguments["isDisabled"] = isDisabled ? "1" : "0";
RaisePropertyChangedNotification("IsDisabled");
}
}
public bool IsPassword
{
get
{
return isPassword;
}
set
{
isPassword = value;
ClientArguments["isPassword"] = isPassword ? "1" : "0";
RaisePropertyChangedNotification("IsPassword");
}
}
public bool IsCodeView
{
get
{
return isCodeView;
}
set
{
isCodeView = value;
ClientArguments["isCodeView"] = isCodeView ? "1" : "0";
RaisePropertyChangedNotification("IsCodeView");
}
}
public int Rows
{
get
{
return rows;
}
set
{
rows = value;
ClientArguments["rows"] = rows.ToString();
RaisePropertyChangedNotification("Rows");
}
}
public int Columns
{
get
{
return cols;
}
set
{
cols = value;
ClientArguments["cols"] = cols.ToString();
RaisePropertyChangedNotification("Columns");
}
}
public void Changed(string newText)
{
currentTextOnClient = newText;
string oldText = Text;
Text = newText;
InvokeChangedEvent(oldText, newText);
if( saveOnChange && Record != null )
{
Record.Save();
if( this.Bindings != null && this.Bindings.Count == 1 )
{
RootContext.ClearNotifications();
RootContext.SendClientNotification("", this.Bindings[0].Fields[0] + " saved.");
}
}
}
private event EventHandler<KeyPressEventArgs> onKeyUp;
private event EventHandler<ClickEventArgs> onEnter;
public event EventHandler<KeyPressEventArgs> OnKeyUp
{
add { onKeyUp += value; ClientArguments["onKeyUp"] = "1"; }
remove { onKeyUp -= value; }
}
public event EventHandler<ClickEventArgs> OnEnter
{
add { onEnter += value; ClientArguments["onEnter"] = "1"; }
remove { onEnter -= value; }
}
public TextBox(string id, string text)
{
this.Id = id;
this.text = text;
}
public TextBox() {ClientArguments["defaultValue"] = "''"; }
public override void HandleEvents(string evt, string args)
{
if (evt == "OnChanged")
{
//System.Console.WriteLine("changing {0} from {1} to {2}", this.Id, this.Text, args);
Changed(args);
RootContext.SendCommand("removeWaitFor({0});", this.ClientId);
}
else if (evt == "OnEnter")
{
ClickEventArgs ea = new ClickEventArgs(this );
Changed(args);
if (onEnter != null)
onEnter(this, ea );
}
else if( evt == "OnKeyUp" )
{
KeyPressEventArgs ea = new KeyPressEventArgs(this, args );
Changed(ea.Value);
if (onKeyUp != null)
onKeyUp(this, ea );
}
else
{
base.HandleEvents(evt, args);
}
}
public override string ToString()
{
return Text;
}
override public object Value
{
get
{
return this.Text;
}
set
{
if (value != null)
Changed(value.ToString());
RaisePropertyChangedNotification("Value");
}
}
public bool IsPassThrough
{
get { return false; }
}
override public string DefaultProperty
{
get { return "Text"; }
}
private Binding binding;
public Binding Binding
{
get { return binding; }
set { binding = value; }
}
public string HelpText {
get {
return helpText;
}
set {
helpText = value;
SetClientProperty("helpText",Util.ToJavaScriptString(helpText));
}
}
public void Focus()
{
InvokeClientMethod("Focus");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class ArrayLengthTests
{
#region Bool tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckBoolArrayLengthTest(bool useInterpreter)
{
CheckBoolArrayLengthExpression(GenerateBoolArray(0), useInterpreter);
CheckBoolArrayLengthExpression(GenerateBoolArray(1), useInterpreter);
CheckBoolArrayLengthExpression(GenerateBoolArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionBoolArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckBoolArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Byte tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckByteArrayLengthTest(bool useInterpreter)
{
CheckByteArrayLengthExpression(GenerateByteArray(0), useInterpreter);
CheckByteArrayLengthExpression(GenerateByteArray(1), useInterpreter);
CheckByteArrayLengthExpression(GenerateByteArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionByteArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckByteArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Custom tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckCustomArrayLengthTest(bool useInterpreter)
{
CheckCustomArrayLengthExpression(GenerateCustomArray(0), useInterpreter);
CheckCustomArrayLengthExpression(GenerateCustomArray(1), useInterpreter);
CheckCustomArrayLengthExpression(GenerateCustomArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionCustomArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckCustomArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Char tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckCharArrayLengthTest(bool useInterpreter)
{
CheckCharArrayLengthExpression(GenerateCharArray(0), useInterpreter);
CheckCharArrayLengthExpression(GenerateCharArray(1), useInterpreter);
CheckCharArrayLengthExpression(GenerateCharArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionCharArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckCharArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Custom2 tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckCustom2ArrayLengthTest(bool useInterpreter)
{
CheckCustom2ArrayLengthExpression(GenerateCustom2Array(0), useInterpreter);
CheckCustom2ArrayLengthExpression(GenerateCustom2Array(1), useInterpreter);
CheckCustom2ArrayLengthExpression(GenerateCustom2Array(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionCustom2ArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckCustom2ArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Decimal tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckDecimalArrayLengthTest(bool useInterpreter)
{
CheckDecimalArrayLengthExpression(GenerateDecimalArray(0), useInterpreter);
CheckDecimalArrayLengthExpression(GenerateDecimalArray(1), useInterpreter);
CheckDecimalArrayLengthExpression(GenerateDecimalArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionDecimalArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckDecimalArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Delegate tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckDelegateArrayLengthTest(bool useInterpreter)
{
CheckDelegateArrayLengthExpression(GenerateDelegateArray(0), useInterpreter);
CheckDelegateArrayLengthExpression(GenerateDelegateArray(1), useInterpreter);
CheckDelegateArrayLengthExpression(GenerateDelegateArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionDelegateArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckDelegateArrayLengthExpression(null, useInterpreter));
}
#endregion
#region double tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckdoubleArrayLengthTest(bool useInterpreter)
{
CheckDoubleArrayLengthExpression(GeneratedoubleArray(0), useInterpreter);
CheckDoubleArrayLengthExpression(GeneratedoubleArray(1), useInterpreter);
CheckDoubleArrayLengthExpression(GeneratedoubleArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptiondoubleArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckDoubleArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Enum tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckEnumArrayLengthTest(bool useInterpreter)
{
CheckEnumArrayLengthExpression(GenerateEnumArray(0), useInterpreter);
CheckEnumArrayLengthExpression(GenerateEnumArray(1), useInterpreter);
CheckEnumArrayLengthExpression(GenerateEnumArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionEnumArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckEnumArrayLengthExpression(null, useInterpreter));
}
#endregion
#region EnumLong tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckEnumLongArrayLengthTest(bool useInterpreter)
{
CheckEnumLongArrayLengthExpression(GenerateEnumLongArray(0), useInterpreter);
CheckEnumLongArrayLengthExpression(GenerateEnumLongArray(1), useInterpreter);
CheckEnumLongArrayLengthExpression(GenerateEnumLongArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionEnumLongArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckEnumLongArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Float tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckFloatArrayLengthTest(bool useInterpreter)
{
CheckFloatArrayLengthExpression(GenerateFloatArray(0), useInterpreter);
CheckFloatArrayLengthExpression(GenerateFloatArray(1), useInterpreter);
CheckFloatArrayLengthExpression(GenerateFloatArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionFloatArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckFloatArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Func tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckFuncArrayLengthTest(bool useInterpreter)
{
CheckFuncArrayLengthExpression(GenerateFuncArray(0), useInterpreter);
CheckFuncArrayLengthExpression(GenerateFuncArray(1), useInterpreter);
CheckFuncArrayLengthExpression(GenerateFuncArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionFuncArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckFuncArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Interface tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckInterfaceArrayLengthTest(bool useInterpreter)
{
CheckInterfaceArrayLengthExpression(GenerateInterfaceArray(0), useInterpreter);
CheckInterfaceArrayLengthExpression(GenerateInterfaceArray(1), useInterpreter);
CheckInterfaceArrayLengthExpression(GenerateInterfaceArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionInterfaceArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckInterfaceArrayLengthExpression(null, useInterpreter));
}
#endregion
#region IEquatable tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIEquatableArrayLengthTest(bool useInterpreter)
{
CheckIEquatableArrayLengthExpression(GenerateIEquatableArray(0), useInterpreter);
CheckIEquatableArrayLengthExpression(GenerateIEquatableArray(1), useInterpreter);
CheckIEquatableArrayLengthExpression(GenerateIEquatableArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionIEquatableArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckIEquatableArrayLengthExpression(null, useInterpreter));
}
#endregion
#region IEquatable2 tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIEquatable2ArrayLengthTest(bool useInterpreter)
{
CheckIEquatable2ArrayLengthExpression(GenerateIEquatable2Array(0), useInterpreter);
CheckIEquatable2ArrayLengthExpression(GenerateIEquatable2Array(1), useInterpreter);
CheckIEquatable2ArrayLengthExpression(GenerateIEquatable2Array(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionIEquatable2ArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckIEquatable2ArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Int tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIntArrayLengthTest(bool useInterpreter)
{
CheckIntArrayLengthExpression(GenerateIntArray(0), useInterpreter);
CheckIntArrayLengthExpression(GenerateIntArray(1), useInterpreter);
CheckIntArrayLengthExpression(GenerateIntArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionIntArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckIntArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Long tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckLongArrayLengthTest(bool useInterpreter)
{
CheckLongArrayLengthExpression(GenerateLongArray(0), useInterpreter);
CheckLongArrayLengthExpression(GenerateLongArray(1), useInterpreter);
CheckLongArrayLengthExpression(GenerateLongArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionLongArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckLongArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Object tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckObjectArrayLengthTest(bool useInterpreter)
{
CheckObjectArrayLengthExpression(GenerateObjectArray(0), useInterpreter);
CheckObjectArrayLengthExpression(GenerateObjectArray(1), useInterpreter);
CheckObjectArrayLengthExpression(GenerateObjectArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionObjectArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckObjectArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Struct tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStructArrayLengthTest(bool useInterpreter)
{
CheckStructArrayLengthExpression(GenerateStructArray(0), useInterpreter);
CheckStructArrayLengthExpression(GenerateStructArray(1), useInterpreter);
CheckStructArrayLengthExpression(GenerateStructArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStructArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStructArrayLengthExpression(null, useInterpreter));
}
#endregion
#region SByte tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckSByteArrayLengthTest(bool useInterpreter)
{
CheckSByteArrayLengthExpression(GenerateSByteArray(0), useInterpreter);
CheckSByteArrayLengthExpression(GenerateSByteArray(1), useInterpreter);
CheckSByteArrayLengthExpression(GenerateSByteArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionSByteArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckSByteArrayLengthExpression(null, useInterpreter));
}
#endregion
#region StructWithString tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStructWithStringArrayLengthTest(bool useInterpreter)
{
CheckStructWithStringArrayLengthExpression(GenerateStructWithStringArray(0), useInterpreter);
CheckStructWithStringArrayLengthExpression(GenerateStructWithStringArray(1), useInterpreter);
CheckStructWithStringArrayLengthExpression(GenerateStructWithStringArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStructWithStringArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStructWithStringArrayLengthExpression(null, useInterpreter));
}
#endregion
#region StructWithStringAndStruct tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStructWithStringAndStructArrayLengthTest(bool useInterpreter)
{
CheckStructWithStringAndStructArrayLengthExpression(GenerateStructWithStringAndStructArray(0), useInterpreter);
CheckStructWithStringAndStructArrayLengthExpression(GenerateStructWithStringAndStructArray(1), useInterpreter);
CheckStructWithStringAndStructArrayLengthExpression(GenerateStructWithStringAndStructArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStructWithStringAndStructArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStructWithStringAndStructArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Short tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckShortArrayLengthTest(bool useInterpreter)
{
CheckShortArrayLengthExpression(GenerateShortArray(0), useInterpreter);
CheckShortArrayLengthExpression(GenerateShortArray(1), useInterpreter);
CheckShortArrayLengthExpression(GenerateShortArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionShortArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckShortArrayLengthExpression(null, useInterpreter));
}
#endregion
#region StructWithTwoFields tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStructWithTwoFieldsArrayLengthTest(bool useInterpreter)
{
CheckStructWithTwoFieldsArrayLengthExpression(GenerateStructWithTwoFieldsArray(0), useInterpreter);
CheckStructWithTwoFieldsArrayLengthExpression(GenerateStructWithTwoFieldsArray(1), useInterpreter);
CheckStructWithTwoFieldsArrayLengthExpression(GenerateStructWithTwoFieldsArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStructWithTwoFieldsArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStructWithTwoFieldsArrayLengthExpression(null, useInterpreter));
}
#endregion
#region StructWithValue tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStructWithValueArrayLengthTest(bool useInterpreter)
{
CheckStructWithValueArrayLengthExpression(GenerateStructWithValueArray(0), useInterpreter);
CheckStructWithValueArrayLengthExpression(GenerateStructWithValueArray(1), useInterpreter);
CheckStructWithValueArrayLengthExpression(GenerateStructWithValueArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStructWithValueArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStructWithValueArrayLengthExpression(null, useInterpreter));
}
#endregion
#region String tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckStringArrayLengthTest(bool useInterpreter)
{
CheckStringArrayLengthExpression(GenerateStringArray(0), useInterpreter);
CheckStringArrayLengthExpression(GenerateStringArray(1), useInterpreter);
CheckStringArrayLengthExpression(GenerateStringArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionStringArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckStringArrayLengthExpression(null, useInterpreter));
}
#endregion
#region UInt tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUIntArrayLengthTest(bool useInterpreter)
{
CheckUIntArrayLengthExpression(GenerateUIntArray(0), useInterpreter);
CheckUIntArrayLengthExpression(GenerateUIntArray(1), useInterpreter);
CheckUIntArrayLengthExpression(GenerateUIntArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionUIntArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckUIntArrayLengthExpression(null, useInterpreter));
}
#endregion
#region ULong tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckULongArrayLengthTest(bool useInterpreter)
{
CheckULongArrayLengthExpression(GenerateULongArray(0), useInterpreter);
CheckULongArrayLengthExpression(GenerateULongArray(1), useInterpreter);
CheckULongArrayLengthExpression(GenerateULongArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionULongArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckULongArrayLengthExpression(null, useInterpreter));
}
#endregion
#region UShort tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUShortArrayLengthTest(bool useInterpreter)
{
CheckUShortArrayLengthExpression(GenerateUShortArray(0), useInterpreter);
CheckUShortArrayLengthExpression(GenerateUShortArray(1), useInterpreter);
CheckUShortArrayLengthExpression(GenerateUShortArray(5), useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionUShortArrayLengthTest(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckUShortArrayLengthExpression(null, useInterpreter));
}
#endregion
#region Generic tests
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericCustomArrayLengthTest(bool useInterpreter)
{
CheckGenericArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericCustomArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericEnumArrayLengthTest(bool useInterpreter)
{
CheckGenericArrayLengthTestHelper<E>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericEnumArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericArrayLengthTestHelper<E>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericObjectArrayLengthTest(bool useInterpreter)
{
CheckGenericArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericObjectArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericStructArrayLengthTest(bool useInterpreter)
{
CheckGenericArrayLengthTestHelper<S>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericStructArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericArrayLengthTestHelper<S>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericStructWithStringAndFieldArrayLengthTest(bool useInterpreter)
{
CheckGenericArrayLengthTestHelper<Scs>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericStructWithStringAndFieldArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericArrayLengthTestHelper<Scs>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericCustomWithClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithClassRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericCustomWithClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithClassRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericObjectWithClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithClassRestrictionArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericObjectWithClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithClassRestrictionArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericCustomWithClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithClassAndNewRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericCustomWithClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithClassAndNewRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericObjectWithClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithClassAndNewRestrictionArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericObjectWithClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithClassAndNewRestrictionArrayLengthTestHelper<object>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericCustomWithSubClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithSubClassRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericCustomWithSubClassRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithSubClassRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericCustomWithSubClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithSubClassAndNewRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericCustomWithSubClassAndNewRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithSubClassAndNewRestrictionArrayLengthTestHelper<C>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericEnumWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithStructRestrictionArrayLengthTestHelper<E>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericEnumWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithStructRestrictionArrayLengthTestHelper<E>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericStructWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithStructRestrictionArrayLengthTestHelper<S>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericStructWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithStructRestrictionArrayLengthTestHelper<S>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckGenericStructWithStringAndFieldWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckGenericWithStructRestrictionArrayLengthTestHelper<Scs>(useInterpreter);
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckExceptionGenericStructWithStringAndFieldWithStructRestrictionArrayLengthTest(bool useInterpreter)
{
CheckExceptionGenericWithStructRestrictionArrayLengthTestHelper<Scs>(useInterpreter);
}
#endregion
#region Generic helpers
public static void CheckGenericArrayLengthTestHelper<T>(bool useInterpreter)
{
CheckGenericArrayLengthExpression<T>(GenerateGenericArray<T>(0), useInterpreter);
CheckGenericArrayLengthExpression<T>(GenerateGenericArray<T>(1), useInterpreter);
CheckGenericArrayLengthExpression<T>(GenerateGenericArray<T>(5), useInterpreter);
}
public static void CheckExceptionGenericArrayLengthTestHelper<T>(bool useInterpreter)
{
Assert.Throws<NullReferenceException>(() => CheckGenericArrayLengthExpression<T>(null, useInterpreter));
}
public static void CheckGenericWithClassRestrictionArrayLengthTestHelper<Tc>(bool useInterpreter) where Tc : class
{
CheckGenericWithClassRestrictionArrayLengthExpression<Tc>(GenerateGenericWithClassRestrictionArray<Tc>(0), useInterpreter);
CheckGenericWithClassRestrictionArrayLengthExpression<Tc>(GenerateGenericWithClassRestrictionArray<Tc>(1), useInterpreter);
CheckGenericWithClassRestrictionArrayLengthExpression<Tc>(GenerateGenericWithClassRestrictionArray<Tc>(5), useInterpreter);
}
public static void CheckExceptionGenericWithClassRestrictionArrayLengthTestHelper<Tc>(bool useInterpreter) where Tc : class
{
Assert.Throws<NullReferenceException>(() => CheckGenericWithClassRestrictionArrayLengthExpression<Tc>(null, useInterpreter));
}
public static void CheckGenericWithClassAndNewRestrictionArrayLengthTestHelper<Tcn>(bool useInterpreter) where Tcn : class, new()
{
CheckGenericWithClassAndNewRestrictionArrayLengthExpression<Tcn>(GenerateGenericWithClassAndNewRestrictionArray<Tcn>(0), useInterpreter);
CheckGenericWithClassAndNewRestrictionArrayLengthExpression<Tcn>(GenerateGenericWithClassAndNewRestrictionArray<Tcn>(1), useInterpreter);
CheckGenericWithClassAndNewRestrictionArrayLengthExpression<Tcn>(GenerateGenericWithClassAndNewRestrictionArray<Tcn>(5), useInterpreter);
}
public static void CheckExceptionGenericWithClassAndNewRestrictionArrayLengthTestHelper<Tcn>(bool useInterpreter) where Tcn : class, new()
{
Assert.Throws<NullReferenceException>(() => CheckGenericWithClassAndNewRestrictionArrayLengthExpression<Tcn>(null, useInterpreter));
}
public static void CheckGenericWithSubClassRestrictionArrayLengthTestHelper<TC>(bool useInterpreter) where TC : C
{
CheckGenericWithSubClassRestrictionArrayLengthExpression<TC>(GenerateGenericWithSubClassRestrictionArray<TC>(0), useInterpreter);
CheckGenericWithSubClassRestrictionArrayLengthExpression<TC>(GenerateGenericWithSubClassRestrictionArray<TC>(1), useInterpreter);
CheckGenericWithSubClassRestrictionArrayLengthExpression<TC>(GenerateGenericWithSubClassRestrictionArray<TC>(5), useInterpreter);
}
public static void CheckExceptionGenericWithSubClassRestrictionArrayLengthTestHelper<TC>(bool useInterpreter) where TC : C
{
Assert.Throws<NullReferenceException>(() => CheckGenericWithSubClassRestrictionArrayLengthExpression<TC>(null, useInterpreter));
}
public static void CheckGenericWithSubClassAndNewRestrictionArrayLengthTestHelper<TCn>(bool useInterpreter) where TCn : C, new()
{
CheckGenericWithSubClassAndNewRestrictionArrayLengthExpression<TCn>(GenerateGenericWithSubClassAndNewRestrictionArray<TCn>(0), useInterpreter);
CheckGenericWithSubClassAndNewRestrictionArrayLengthExpression<TCn>(GenerateGenericWithSubClassAndNewRestrictionArray<TCn>(1), useInterpreter);
CheckGenericWithSubClassAndNewRestrictionArrayLengthExpression<TCn>(GenerateGenericWithSubClassAndNewRestrictionArray<TCn>(5), useInterpreter);
}
public static void CheckExceptionGenericWithSubClassAndNewRestrictionArrayLengthTestHelper<TCn>(bool useInterpreter) where TCn : C, new()
{
Assert.Throws<NullReferenceException>(() => CheckGenericWithSubClassAndNewRestrictionArrayLengthExpression<TCn>(null, useInterpreter));
}
public static void CheckGenericWithStructRestrictionArrayLengthTestHelper<Ts>(bool useInterpreter) where Ts : struct
{
CheckGenericWithStructRestrictionArrayLengthExpression<Ts>(GenerateGenericWithStructRestrictionArray<Ts>(0), useInterpreter);
CheckGenericWithStructRestrictionArrayLengthExpression<Ts>(GenerateGenericWithStructRestrictionArray<Ts>(1), useInterpreter);
CheckGenericWithStructRestrictionArrayLengthExpression<Ts>(GenerateGenericWithStructRestrictionArray<Ts>(5), useInterpreter);
}
public static void CheckExceptionGenericWithStructRestrictionArrayLengthTestHelper<Ts>(bool useInterpreter) where Ts : struct
{
Assert.Throws<NullReferenceException>(() => CheckGenericWithStructRestrictionArrayLengthExpression<Ts>(null, useInterpreter));
}
#endregion
#region Generate array
private static bool[] GenerateBoolArray(int size)
{
bool[] array = new bool[] { true, false };
bool[] result = new bool[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static byte[] GenerateByteArray(int size)
{
byte[] array = new byte[] { 0, 1, byte.MaxValue };
byte[] result = new byte[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static C[] GenerateCustomArray(int size)
{
C[] array = new C[] { null, new C(), new D(), new D(0), new D(5) };
C[] result = new C[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static char[] GenerateCharArray(int size)
{
char[] array = new char[] { '\0', '\b', 'A', '\uffff' };
char[] result = new char[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static D[] GenerateCustom2Array(int size)
{
D[] array = new D[] { null, new D(), new D(0), new D(5) };
D[] result = new D[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static decimal[] GenerateDecimalArray(int size)
{
decimal[] array = new decimal[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue };
decimal[] result = new decimal[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Delegate[] GenerateDelegateArray(int size)
{
Delegate[] array = new Delegate[] { null, (Func<object>)delegate () { return null; }, (Func<int, int>)delegate (int i) { return i + 1; }, (Action<object>)delegate { } };
Delegate[] result = new Delegate[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static double[] GeneratedoubleArray(int size)
{
double[] array = new double[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN };
double[] result = new double[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static E[] GenerateEnumArray(int size)
{
E[] array = new E[] { (E)0, E.A, E.B, (E)int.MaxValue, (E)int.MinValue };
E[] result = new E[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static El[] GenerateEnumLongArray(int size)
{
El[] array = new El[] { (El)0, El.A, El.B, (El)long.MaxValue, (El)long.MinValue };
El[] result = new El[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static float[] GenerateFloatArray(int size)
{
float[] array = new float[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN };
float[] result = new float[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Func<object>[] GenerateFuncArray(int size)
{
Func<object>[] array = new Func<object>[] { null, (Func<object>)delegate () { return null; } };
Func<object>[] result = new Func<object>[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static I[] GenerateInterfaceArray(int size)
{
I[] array = new I[] { null, new C(), new D(), new D(0), new D(5) };
I[] result = new I[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static IEquatable<C>[] GenerateIEquatableArray(int size)
{
IEquatable<C>[] array = new IEquatable<C>[] { null, new C(), new D(), new D(0), new D(5) };
IEquatable<C>[] result = new IEquatable<C>[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static IEquatable<D>[] GenerateIEquatable2Array(int size)
{
IEquatable<D>[] array = new IEquatable<D>[] { null, new D(), new D(0), new D(5) };
IEquatable<D>[] result = new IEquatable<D>[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static int[] GenerateIntArray(int size)
{
int[] array = new int[] { 0, 1, -1, int.MinValue, int.MaxValue };
int[] result = new int[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static long[] GenerateLongArray(int size)
{
long[] array = new long[] { 0, 1, -1, long.MinValue, long.MaxValue };
long[] result = new long[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static object[] GenerateObjectArray(int size)
{
object[] array = new object[] { null, new object(), new C(), new D(3) };
object[] result = new object[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static S[] GenerateStructArray(int size)
{
S[] array = new S[] { default(S), new S() };
S[] result = new S[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static sbyte[] GenerateSByteArray(int size)
{
sbyte[] array = new sbyte[] { 0, 1, -1, sbyte.MinValue, sbyte.MaxValue };
sbyte[] result = new sbyte[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Sc[] GenerateStructWithStringArray(int size)
{
Sc[] array = new Sc[] { default(Sc), new Sc(), new Sc(null) };
Sc[] result = new Sc[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Scs[] GenerateStructWithStringAndStructArray(int size)
{
Scs[] array = new Scs[] { default(Scs), new Scs(), new Scs(null, new S()) };
Scs[] result = new Scs[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static short[] GenerateShortArray(int size)
{
short[] array = new short[] { 0, 1, -1, short.MinValue, short.MaxValue };
short[] result = new short[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Sp[] GenerateStructWithTwoFieldsArray(int size)
{
Sp[] array = new Sp[] { default(Sp), new Sp(), new Sp(5, 5.0) };
Sp[] result = new Sp[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Ss[] GenerateStructWithValueArray(int size)
{
Ss[] array = new Ss[] { default(Ss), new Ss(), new Ss(new S()) };
Ss[] result = new Ss[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static string[] GenerateStringArray(int size)
{
string[] array = new string[] { null, "", "a", "foo" };
string[] result = new string[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static uint[] GenerateUIntArray(int size)
{
uint[] array = new uint[] { 0, 1, uint.MaxValue };
uint[] result = new uint[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static ulong[] GenerateULongArray(int size)
{
ulong[] array = new ulong[] { 0, 1, ulong.MaxValue };
ulong[] result = new ulong[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static ushort[] GenerateUShortArray(int size)
{
ushort[] array = new ushort[] { 0, 1, ushort.MaxValue };
ushort[] result = new ushort[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static T[] GenerateGenericArray<T>(int size)
{
T[] array = new T[] { default(T) };
T[] result = new T[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Tc[] GenerateGenericWithClassRestrictionArray<Tc>(int size) where Tc : class
{
Tc[] array = new Tc[] { null, default(Tc) };
Tc[] result = new Tc[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Tcn[] GenerateGenericWithClassAndNewRestrictionArray<Tcn>(int size) where Tcn : class, new()
{
Tcn[] array = new Tcn[] { null, default(Tcn), new Tcn() };
Tcn[] result = new Tcn[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static TC[] GenerateGenericWithSubClassRestrictionArray<TC>(int size) where TC : C
{
TC[] array = new TC[] { null, default(TC), (TC)new C() };
TC[] result = new TC[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static TCn[] GenerateGenericWithSubClassAndNewRestrictionArray<TCn>(int size) where TCn : C, new()
{
TCn[] array = new TCn[] { null, default(TCn), new TCn(), (TCn)new C() };
TCn[] result = new TCn[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
private static Ts[] GenerateGenericWithStructRestrictionArray<Ts>(int size) where Ts : struct
{
Ts[] array = new Ts[] { default(Ts), new Ts() };
Ts[] result = new Ts[size];
for (int i = 0; i < size; i++)
{
result[i] = array[i % array.Length];
}
return result;
}
#endregion
#region Check length expression
private static void CheckBoolArrayLengthExpression(bool[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(bool[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckByteArrayLengthExpression(byte[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(byte[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckCustomArrayLengthExpression(C[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(C[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckCharArrayLengthExpression(char[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(char[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckCustom2ArrayLengthExpression(D[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(D[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckDecimalArrayLengthExpression(decimal[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(decimal[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckDelegateArrayLengthExpression(Delegate[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Delegate[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckDoubleArrayLengthExpression(double[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(double[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckEnumArrayLengthExpression(E[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(E[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckEnumLongArrayLengthExpression(El[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(El[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckFloatArrayLengthExpression(float[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(float[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckFuncArrayLengthExpression(Func<object>[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Func<object>[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckInterfaceArrayLengthExpression(I[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(I[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckIEquatableArrayLengthExpression(IEquatable<C>[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(IEquatable<C>[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckIEquatable2ArrayLengthExpression(IEquatable<D>[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(IEquatable<D>[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckIntArrayLengthExpression(int[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(int[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckLongArrayLengthExpression(long[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(long[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckObjectArrayLengthExpression(object[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(object[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStructArrayLengthExpression(S[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(S[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckSByteArrayLengthExpression(sbyte[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(sbyte[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStructWithStringArrayLengthExpression(Sc[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Sc[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStructWithStringAndStructArrayLengthExpression(Scs[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Scs[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckShortArrayLengthExpression(short[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(short[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStructWithTwoFieldsArrayLengthExpression(Sp[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Sp[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStructWithValueArrayLengthExpression(Ss[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Ss[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckStringArrayLengthExpression(string[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(string[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckUIntArrayLengthExpression(uint[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(uint[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckULongArrayLengthExpression(ulong[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(ulong[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckUShortArrayLengthExpression(ushort[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(ushort[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericArrayLengthExpression<T>(T[] array, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(T[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericWithClassRestrictionArrayLengthExpression<Tc>(Tc[] array, bool useInterpreter) where Tc : class
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Tc[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericWithClassAndNewRestrictionArrayLengthExpression<Tcn>(Tcn[] array, bool useInterpreter) where Tcn : class, new()
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Tcn[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericWithSubClassRestrictionArrayLengthExpression<TC>(TC[] array, bool useInterpreter) where TC : C
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(TC[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericWithSubClassAndNewRestrictionArrayLengthExpression<TCn>(TCn[] array, bool useInterpreter) where TCn : C, new()
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(TCn[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
private static void CheckGenericWithStructRestrictionArrayLengthExpression<Ts>(Ts[] array, bool useInterpreter) where Ts : struct
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.ArrayLength(Expression.Constant(array, typeof(Ts[]))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(array.Length, f());
}
#endregion
#region ToString
[Fact]
public static void ToStringTest()
{
UnaryExpression e = Expression.ArrayLength(Expression.Parameter(typeof(int[]), "xs"));
Assert.Equal("ArrayLength(xs)", e.ToString());
}
#endregion
[Fact]
public static void NullArray()
{
Assert.Throws<ArgumentNullException>("array", () => Expression.ArrayLength(null));
}
[Fact]
public static void IsNotArray()
{
Expression notArray = Expression.Constant(8);
Assert.Throws<ArgumentException>("array", () => Expression.ArrayLength(notArray));
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void ArrayTypeArrayAllowed(bool useInterpreter)
{
Array arr = new[] { 1, 2, 3 };
Func<int> func =
Expression.Lambda<Func<int>>(Expression.ArrayLength(Expression.Constant(arr))).Compile(useInterpreter);
Assert.Equal(3, func());
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void ArrayExplicitlyTypeArrayNotAllowed(bool useInterpreter)
{
Array arr = new[] { 1, 2, 3 };
Expression arrayExpression = Expression.Constant(arr, typeof(Array));
Assert.Throws<ArgumentException>("array", () => Expression.ArrayLength(arrayExpression));
}
[Fact]
public static void ArrayTypeArrayNotAllowedIfNotSZArray()
{
Array arr = new[,] { { 1, 2, 3 }, { 1, 2, 2 } };
Assert.Throws<ArgumentException>("array", () => Expression.ArrayLength(Expression.Constant(arr)));
arr = Array.CreateInstance(typeof(int), new[] { 3 }, new[] { -1 });
Assert.Throws<ArgumentException>("array", () => Expression.ArrayLength(Expression.Constant(arr)));
}
[Fact]
public static void UnreadableArray()
{
Expression array = Expression.Property(null, typeof(Unreadable<int[]>), nameof(Unreadable<int>.WriteOnly));
Assert.Throws<ArgumentException>(() => Expression.ArrayLength(array));
}
private static IEnumerable<object[]> TestArrays()
=> Enumerable.Range(0, 6).Select(i => new object[] {new int[i * i]});
[Theory, PerCompilationType(nameof(TestArrays))]
public static void MakeUnaryArrayLength(int[] array, bool useInterpreter)
{
Expression<Func<int>> lambda = Expression.Lambda<Func<int>>(
Expression.MakeUnary(ExpressionType.ArrayLength, Expression.Constant(array), null));
Func<int> func = lambda.Compile(useInterpreter);
Assert.Equal(array.Length, func());
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Hyak.Common.Internals;
using Microsoft.Azure;
using Microsoft.Azure.Management.Resources;
using Microsoft.Azure.Management.Resources.Models;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.Resources
{
/// <summary>
/// Operations for managing resource groups.
/// </summary>
internal partial class ResourceGroupOperations : IServiceOperations<ResourceManagementClient>, IResourceGroupOperations
{
/// <summary>
/// Initializes a new instance of the ResourceGroupOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal ResourceGroupOperations(ResourceManagementClient client)
{
this._client = client;
}
private ResourceManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.Resources.ResourceManagementClient.
/// </summary>
public ResourceManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Begin deleting resource group.To determine whether the operation
/// has finished processing the request, call
/// GetLongRunningOperationStatus.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to be deleted. The name is
/// case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public async Task<LongRunningOperationResponse> BeginDeletingAsync(string resourceGroupName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceGroupName != null && resourceGroupName.Length > 1000)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
TracingAdapter.Enter(invocationId, this, "BeginDeletingAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LongRunningOperationResponse result = null;
// Deserialize Response
result = new LongRunningOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("Location"))
{
result.OperationStatusLink = httpResponse.Headers.GetValues("Location").FirstOrDefault();
}
if (httpResponse.Headers.Contains("Retry-After"))
{
result.RetryAfter = int.Parse(httpResponse.Headers.GetValues("Retry-After").FirstOrDefault(), CultureInfo.InvariantCulture);
}
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (statusCode == HttpStatusCode.Conflict)
{
result.Status = OperationStatus.Failed;
}
if (statusCode == HttpStatusCode.OK)
{
result.Status = OperationStatus.Succeeded;
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Checks whether resource group exists.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to check. The name is case
/// insensitive.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Resource group information.
/// </returns>
public async Task<ResourceGroupExistsResult> CheckExistenceAsync(string resourceGroupName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceGroupName != null && resourceGroupName.Length > 1000)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
TracingAdapter.Enter(invocationId, this, "CheckExistenceAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Head;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.NoContent && statusCode != HttpStatusCode.NotFound)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupExistsResult result = null;
// Deserialize Response
result = new ResourceGroupExistsResult();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (statusCode == HttpStatusCode.NoContent)
{
result.Exists = true;
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Create a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to be created or updated.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the create or update resource
/// group service operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Resource group information.
/// </returns>
public async Task<ResourceGroupCreateOrUpdateResult> CreateOrUpdateAsync(string resourceGroupName, ResourceGroup parameters, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceGroupName != null && resourceGroupName.Length > 1000)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.Location == null)
{
throw new ArgumentNullException("parameters.Location");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "CreateOrUpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject resourceGroupValue = new JObject();
requestDoc = resourceGroupValue;
resourceGroupValue["location"] = parameters.Location;
if (parameters.Properties != null)
{
resourceGroupValue["properties"] = JObject.Parse(parameters.Properties);
}
if (parameters.Tags != null)
{
if (parameters.Tags is ILazyCollection == false || ((ILazyCollection)parameters.Tags).IsInitialized)
{
JObject tagsDictionary = new JObject();
foreach (KeyValuePair<string, string> pair in parameters.Tags)
{
string tagsKey = pair.Key;
string tagsValue = pair.Value;
tagsDictionary[tagsKey] = tagsValue;
}
resourceGroupValue["tags"] = tagsDictionary;
}
}
if (parameters.ProvisioningState != null)
{
resourceGroupValue["provisioningState"] = parameters.ProvisioningState;
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Created)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupCreateOrUpdateResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK || statusCode == HttpStatusCode.Created)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ResourceGroupCreateOrUpdateResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
ResourceGroupExtended resourceGroupInstance = new ResourceGroupExtended();
result.ResourceGroup = resourceGroupInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
resourceGroupInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
resourceGroupInstance.Name = nameInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
JToken provisioningStateValue = propertiesValue["provisioningState"];
if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null)
{
string provisioningStateInstance = ((string)provisioningStateValue);
resourceGroupInstance.ProvisioningState = provisioningStateInstance;
}
}
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
resourceGroupInstance.Location = locationInstance;
}
JToken propertiesValue2 = responseDoc["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
string propertiesInstance = propertiesValue2.ToString(Newtonsoft.Json.Formatting.Indented);
resourceGroupInstance.Properties = propertiesInstance;
}
JToken tagsSequenceElement = ((JToken)responseDoc["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey2 = ((string)property.Name);
string tagsValue2 = ((string)property.Value);
resourceGroupInstance.Tags.Add(tagsKey2, tagsValue2);
}
}
JToken provisioningStateValue2 = responseDoc["provisioningState"];
if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null)
{
string provisioningStateInstance2 = ((string)provisioningStateValue2);
resourceGroupInstance.ProvisioningState = provisioningStateInstance2;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Delete resource group and all of its resources.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to be deleted. The name is
/// case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<AzureOperationResponse> DeleteAsync(string resourceGroupName, CancellationToken cancellationToken)
{
ResourceManagementClient client = this.Client;
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
cancellationToken.ThrowIfCancellationRequested();
LongRunningOperationResponse response = await client.ResourceGroups.BeginDeletingAsync(resourceGroupName, cancellationToken).ConfigureAwait(false);
cancellationToken.ThrowIfCancellationRequested();
LongRunningOperationResponse result = await client.GetLongRunningOperationStatusAsync(response.OperationStatusLink, cancellationToken).ConfigureAwait(false);
int delayInSeconds = response.RetryAfter;
if (delayInSeconds == 0)
{
delayInSeconds = 30;
}
if (client.LongRunningOperationInitialTimeout >= 0)
{
delayInSeconds = client.LongRunningOperationInitialTimeout;
}
while ((result.Status != Microsoft.Azure.OperationStatus.InProgress) == false)
{
cancellationToken.ThrowIfCancellationRequested();
await TaskEx.Delay(delayInSeconds * 1000, cancellationToken).ConfigureAwait(false);
cancellationToken.ThrowIfCancellationRequested();
result = await client.GetLongRunningOperationStatusAsync(response.OperationStatusLink, cancellationToken).ConfigureAwait(false);
delayInSeconds = result.RetryAfter;
if (delayInSeconds == 0)
{
delayInSeconds = 15;
}
if (client.LongRunningOperationRetryTimeout >= 0)
{
delayInSeconds = client.LongRunningOperationRetryTimeout;
}
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
/// <summary>
/// Get a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to get. The name is case
/// insensitive.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Resource group information.
/// </returns>
public async Task<ResourceGroupGetResult> GetAsync(string resourceGroupName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceGroupName != null && resourceGroupName.Length > 1000)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupGetResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ResourceGroupGetResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
ResourceGroupExtended resourceGroupInstance = new ResourceGroupExtended();
result.ResourceGroup = resourceGroupInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
resourceGroupInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
resourceGroupInstance.Name = nameInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
JToken provisioningStateValue = propertiesValue["provisioningState"];
if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null)
{
string provisioningStateInstance = ((string)provisioningStateValue);
resourceGroupInstance.ProvisioningState = provisioningStateInstance;
}
}
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
resourceGroupInstance.Location = locationInstance;
}
JToken propertiesValue2 = responseDoc["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
string propertiesInstance = propertiesValue2.ToString(Newtonsoft.Json.Formatting.Indented);
resourceGroupInstance.Properties = propertiesInstance;
}
JToken tagsSequenceElement = ((JToken)responseDoc["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey = ((string)property.Name);
string tagsValue = ((string)property.Value);
resourceGroupInstance.Tags.Add(tagsKey, tagsValue);
}
}
JToken provisioningStateValue2 = responseDoc["provisioningState"];
if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null)
{
string provisioningStateInstance2 = ((string)provisioningStateValue2);
resourceGroupInstance.ProvisioningState = provisioningStateInstance2;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Gets a collection of resource groups.
/// </summary>
/// <param name='parameters'>
/// Optional. Query parameters. If null is passed returns all resource
/// groups.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// List of resource groups.
/// </returns>
public async Task<ResourceGroupListResult> ListAsync(ResourceGroupListParameters parameters, CancellationToken cancellationToken)
{
// Validate
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups";
List<string> queryParameters = new List<string>();
List<string> odataFilter = new List<string>();
if (parameters != null && parameters.TagName != null)
{
odataFilter.Add("tagname eq '" + Uri.EscapeDataString(parameters.TagName) + "'");
}
if (parameters != null && parameters.TagValue != null)
{
odataFilter.Add("tagvalue eq '" + Uri.EscapeDataString(parameters.TagValue) + "'");
}
if (odataFilter.Count > 0)
{
queryParameters.Add("$filter=" + string.Join(" and ", odataFilter));
}
if (parameters != null && parameters.Top != null)
{
queryParameters.Add("$top=" + Uri.EscapeDataString(parameters.Top.Value.ToString()));
}
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupListResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ResourceGroupListResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
ResourceGroupExtended resourceGroupJsonFormatInstance = new ResourceGroupExtended();
result.ResourceGroups.Add(resourceGroupJsonFormatInstance);
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
resourceGroupJsonFormatInstance.Id = idInstance;
}
JToken nameValue = valueValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
resourceGroupJsonFormatInstance.Name = nameInstance;
}
JToken propertiesValue = valueValue["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
JToken provisioningStateValue = propertiesValue["provisioningState"];
if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null)
{
string provisioningStateInstance = ((string)provisioningStateValue);
resourceGroupJsonFormatInstance.ProvisioningState = provisioningStateInstance;
}
}
JToken locationValue = valueValue["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
resourceGroupJsonFormatInstance.Location = locationInstance;
}
JToken propertiesValue2 = valueValue["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
string propertiesInstance = propertiesValue2.ToString(Newtonsoft.Json.Formatting.Indented);
resourceGroupJsonFormatInstance.Properties = propertiesInstance;
}
JToken tagsSequenceElement = ((JToken)valueValue["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey = ((string)property.Name);
string tagsValue = ((string)property.Value);
resourceGroupJsonFormatInstance.Tags.Add(tagsKey, tagsValue);
}
}
JToken provisioningStateValue2 = valueValue["provisioningState"];
if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null)
{
string provisioningStateInstance2 = ((string)provisioningStateValue2);
resourceGroupJsonFormatInstance.ProvisioningState = provisioningStateInstance2;
}
}
}
JToken nextLinkValue = responseDoc["nextLink"];
if (nextLinkValue != null && nextLinkValue.Type != JTokenType.Null)
{
string nextLinkInstance = ((string)nextLinkValue);
result.NextLink = nextLinkInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Get a list of deployments.
/// </summary>
/// <param name='nextLink'>
/// Required. NextLink from the previous successful call to List
/// operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// List of resource groups.
/// </returns>
public async Task<ResourceGroupListResult> ListNextAsync(string nextLink, CancellationToken cancellationToken)
{
// Validate
if (nextLink == null)
{
throw new ArgumentNullException("nextLink");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextLink", nextLink);
TracingAdapter.Enter(invocationId, this, "ListNextAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + nextLink;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupListResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ResourceGroupListResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
ResourceGroupExtended resourceGroupJsonFormatInstance = new ResourceGroupExtended();
result.ResourceGroups.Add(resourceGroupJsonFormatInstance);
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
resourceGroupJsonFormatInstance.Id = idInstance;
}
JToken nameValue = valueValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
resourceGroupJsonFormatInstance.Name = nameInstance;
}
JToken propertiesValue = valueValue["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
JToken provisioningStateValue = propertiesValue["provisioningState"];
if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null)
{
string provisioningStateInstance = ((string)provisioningStateValue);
resourceGroupJsonFormatInstance.ProvisioningState = provisioningStateInstance;
}
}
JToken locationValue = valueValue["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
resourceGroupJsonFormatInstance.Location = locationInstance;
}
JToken propertiesValue2 = valueValue["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
string propertiesInstance = propertiesValue2.ToString(Newtonsoft.Json.Formatting.Indented);
resourceGroupJsonFormatInstance.Properties = propertiesInstance;
}
JToken tagsSequenceElement = ((JToken)valueValue["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey = ((string)property.Name);
string tagsValue = ((string)property.Value);
resourceGroupJsonFormatInstance.Tags.Add(tagsKey, tagsValue);
}
}
JToken provisioningStateValue2 = valueValue["provisioningState"];
if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null)
{
string provisioningStateInstance2 = ((string)provisioningStateValue2);
resourceGroupJsonFormatInstance.ProvisioningState = provisioningStateInstance2;
}
}
}
JToken nextLinkValue = responseDoc["nextLink"];
if (nextLinkValue != null && nextLinkValue.Type != JTokenType.Null)
{
string nextLinkInstance = ((string)nextLinkValue);
result.NextLink = nextLinkInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Resource groups can be updated through a simple PATCH operation to
/// a group address. The format of the request is the same as that for
/// creating a resource groups, though if a field is unspecified
/// current value will be carried over.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group to be created or updated.
/// The name is case insensitive.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the update state resource group
/// service operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Resource group information.
/// </returns>
public async Task<ResourceGroupPatchResult> PatchAsync(string resourceGroupName, ResourceGroup parameters, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceGroupName != null && resourceGroupName.Length > 1000)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false)
{
throw new ArgumentOutOfRangeException("resourceGroupName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.Location == null)
{
throw new ArgumentNullException("parameters.Location");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "PatchAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourcegroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2014-04-01-preview");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = new HttpMethod("PATCH");
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject resourceGroupValue = new JObject();
requestDoc = resourceGroupValue;
resourceGroupValue["location"] = parameters.Location;
if (parameters.Properties != null)
{
resourceGroupValue["properties"] = JObject.Parse(parameters.Properties);
}
if (parameters.Tags != null)
{
if (parameters.Tags is ILazyCollection == false || ((ILazyCollection)parameters.Tags).IsInitialized)
{
JObject tagsDictionary = new JObject();
foreach (KeyValuePair<string, string> pair in parameters.Tags)
{
string tagsKey = pair.Key;
string tagsValue = pair.Value;
tagsDictionary[tagsKey] = tagsValue;
}
resourceGroupValue["tags"] = tagsDictionary;
}
}
if (parameters.ProvisioningState != null)
{
resourceGroupValue["provisioningState"] = parameters.ProvisioningState;
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ResourceGroupPatchResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ResourceGroupPatchResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
ResourceGroupExtended resourceGroupInstance = new ResourceGroupExtended();
result.ResourceGroup = resourceGroupInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
resourceGroupInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
resourceGroupInstance.Name = nameInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
JToken provisioningStateValue = propertiesValue["provisioningState"];
if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null)
{
string provisioningStateInstance = ((string)provisioningStateValue);
resourceGroupInstance.ProvisioningState = provisioningStateInstance;
}
}
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
resourceGroupInstance.Location = locationInstance;
}
JToken propertiesValue2 = responseDoc["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
string propertiesInstance = propertiesValue2.ToString(Newtonsoft.Json.Formatting.Indented);
resourceGroupInstance.Properties = propertiesInstance;
}
JToken tagsSequenceElement = ((JToken)responseDoc["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey2 = ((string)property.Name);
string tagsValue2 = ((string)property.Value);
resourceGroupInstance.Tags.Add(tagsKey2, tagsValue2);
}
}
JToken provisioningStateValue2 = responseDoc["provisioningState"];
if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null)
{
string provisioningStateInstance2 = ((string)provisioningStateValue2);
resourceGroupInstance.ProvisioningState = provisioningStateInstance2;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
namespace Gallio.Runtime.Extensibility
{
/// <summary>
/// A service locator implementation based on a registry.
/// </summary>
public class RegistryServiceLocator : IServiceLocator
{
private readonly IRegistry registry;
/// <summary>
/// Creates a service locator based on a registry.
/// </summary>
/// <param name="registry">The registry.</param>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="registry"/> is null.</exception>
public RegistryServiceLocator(IRegistry registry)
{
if (registry == null)
throw new ArgumentNullException("registry");
this.registry = registry;
}
/// <summary>
/// Gets the registry.
/// </summary>
public IRegistry Registry
{
get { return registry; }
}
/// <inheritdoc />
public TService Resolve<TService>()
{
return (TService)ResolveImpl(typeof(TService));
}
/// <inheritdoc />
public object Resolve(Type serviceType)
{
if (serviceType == null)
throw new ArgumentNullException("serviceType");
return ResolveImpl(serviceType);
}
/// <inheritdoc />
public IList<TService> ResolveAll<TService>()
{
return ResolveAllImpl<TService>(typeof(TService));
}
/// <inheritdoc />
public IList<object> ResolveAll(Type serviceType)
{
if (serviceType == null)
throw new ArgumentNullException("serviceType");
return ResolveAllImpl<object>(serviceType);
}
/// <inheritdoc />
public object ResolveByComponentId(string componentId)
{
if (componentId == null)
throw new ArgumentNullException("componentId");
IComponentDescriptor descriptor = ResolveNonDisabledDescriptorByComponentId(componentId);
return descriptor.ResolveComponent();
}
/// <inheritdoc />
public ComponentHandle<TService, TTraits> ResolveHandle<TService, TTraits>() where TTraits : Traits
{
IComponentDescriptor descriptor = ResolveNonDisabledDescriptor(typeof(TService));
return ComponentHandle.CreateInstance<TService, TTraits>(descriptor);
}
/// <inheritdoc />
public ComponentHandle ResolveHandle(Type serviceType)
{
if (serviceType == null)
throw new ArgumentNullException("serviceType");
IComponentDescriptor descriptor = ResolveNonDisabledDescriptor(serviceType);
return ComponentHandle.CreateInstance(descriptor);
}
/// <inheritdoc />
public IList<ComponentHandle<TService, TTraits>> ResolveAllHandles<TService, TTraits>() where TTraits : Traits
{
var result = new List<ComponentHandle<TService, TTraits>>();
foreach (IComponentDescriptor descriptor in ResolveAllNonDisabledDescriptors(typeof(TService)))
result.Add(ComponentHandle.CreateInstance<TService, TTraits>(descriptor));
return result;
}
/// <inheritdoc />
public IList<ComponentHandle> ResolveAllHandles(Type serviceType)
{
if (serviceType == null)
throw new ArgumentNullException("serviceType");
var result = new List<ComponentHandle>();
foreach (IComponentDescriptor descriptor in ResolveAllNonDisabledDescriptors(serviceType))
result.Add(ComponentHandle.CreateInstance(descriptor));
return result;
}
/// <inheritdoc />
public ComponentHandle ResolveHandleByComponentId(string componentId)
{
if (componentId == null)
throw new ArgumentNullException("componentId");
IComponentDescriptor descriptor = ResolveNonDisabledDescriptorByComponentId(componentId);
return ComponentHandle.CreateInstance(descriptor);
}
/// <inheritdoc />
public bool HasService(Type serviceType)
{
if (serviceType == null)
throw new ArgumentNullException("serviceType");
IServiceDescriptor descriptor = registry.Services.GetByServiceType(serviceType);
return descriptor != null && !descriptor.IsDisabled;
}
/// <inheritdoc />
public bool HasComponent(string componentId)
{
if (componentId == null)
throw new ArgumentNullException("componentId");
IComponentDescriptor descriptor = registry.Components[componentId];
return descriptor != null && !descriptor.IsDisabled;
}
private object ResolveImpl(Type serviceType)
{
return ResolveNonDisabledDescriptor(serviceType).ResolveComponent();
}
private IList<TService> ResolveAllImpl<TService>(Type serviceType)
{
var result = new List<TService>();
foreach (IComponentDescriptor descriptor in ResolveAllNonDisabledDescriptors(serviceType))
result.Add((TService)descriptor.ResolveComponent());
return result;
}
private IComponentDescriptor ResolveNonDisabledDescriptorByComponentId(string componentId)
{
IComponentDescriptor descriptor = registry.Components[componentId];
if (descriptor == null)
throw new RuntimeException(string.Format("Could not resolve component with id '{0}' because it does not appear to be registered.", componentId));
if (descriptor.IsDisabled)
throw new RuntimeException(string.Format("Could not resolve component with id '{0}' because it has been disabled. Reason: {1}", componentId, descriptor.DisabledReason));
return descriptor;
}
private IEnumerable<IComponentDescriptor> ResolveAllNonDisabledDescriptors(Type serviceType)
{
foreach (IComponentDescriptor descriptor in registry.Components.FindByServiceType(serviceType))
if (!descriptor.IsDisabled)
yield return descriptor;
}
private IComponentDescriptor ResolveNonDisabledDescriptor(Type serviceType)
{
IEnumerable<IComponentDescriptor> descriptors = ResolveAllNonDisabledDescriptors(serviceType);
IEnumerator<IComponentDescriptor> descriptorEnumerator = descriptors.GetEnumerator();
if (!descriptorEnumerator.MoveNext())
throw new RuntimeException(string.Format("Could not resolve component for service type '{0}' because there do not appear to be any components registered and enabled for that service type.", serviceType));
IComponentDescriptor descriptor = descriptorEnumerator.Current;
if (descriptorEnumerator.MoveNext())
throw new RuntimeException(string.Format("Could not resolve component for service type '{0}' because there are more than one of them registered and enabled so the request is ambiguous.", serviceType));
return descriptor;
}
}
}
| |
/***************************************************************************
* Feed.cs
*
* Copyright (C) 2007 Michael C. Urbanski
* Written by Mike Urbanski <michael.c.urbanski@gmail.com>
****************************************************************************/
/* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW:
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
using System;
using System.IO;
using System.Net;
using System.Threading;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using Mono.Unix;
using Hyena;
using Hyena.Data.Sqlite;
using Migo.Net;
using Migo.TaskCore;
using Migo.DownloadCore;
namespace Migo.Syndication
{
public enum FeedAutoDownload : int
{
All = 0,
One = 1,
None = 2
}
// TODO remove this, way too redundant with DownloadStatus
public enum PodcastFeedActivity : int {
Updating = 0,
UpdatePending = 1,
UpdateFailed = 2,
ItemsDownloading = 4,
ItemsQueued = 5,
None = 6
}
public class FeedProvider : MigoModelProvider<Feed>
{
public FeedProvider (HyenaSqliteConnection connection) : base (connection, "PodcastSyndications")
{
}
protected override void CreateTable ()
{
base.CreateTable ();
CreateIndex ("PodcastSyndicationsIndex", "IsSubscribed, Title");
}
protected override int ModelVersion {
get { return 3; }
}
protected override void MigrateTable (int old_version)
{
CheckTable ();
if (old_version < 2) {
Connection.Execute (String.Format ("UPDATE {0} SET IsSubscribed=1", TableName));
}
if (old_version < 3) {
CreateIndex ("PodcastSyndicationsIndex", "IsSubscribed, Title");
}
}
}
public class Feed : MigoItem<Feed>
{
private static FeedProvider provider;
public static FeedProvider Provider {
get { return provider; }
}
public static void Init () {
provider = new FeedProvider (FeedsManager.Instance.Connection);
}
public static bool Exists (string url)
{
return Provider.Connection.Query<int> (String.Format ("select count(*) from {0} where url = ?", Provider.TableName), url) != 0;
}
//private bool canceled;
//private bool deleted;
//private bool updating;
//private ManualResetEvent updatingHandle = new ManualResetEvent (true);
private readonly object sync = new object ();
private string copyright;
private string description;
private string image_url;
private int update_period_minutes = 24 * 60;
private string language;
private DateTime last_build_date = DateTime.MinValue;
private FeedDownloadError lastDownloadError;
private DateTime last_download_time = DateTime.MinValue;
private string link;
//private string local_enclosure_path;
private long dbid = -1;
private long maxItemCount = 200;
private DateTime pubDate;
private FeedSyncSetting syncSetting;
private string title;
private string url;
private string keywords, category;
#region Database-bound Properties
[DatabaseColumn ("FeedID", Constraints = DatabaseColumnConstraints.PrimaryKey)]
public override long DbId {
get { return dbid; }
protected set { dbid = value; }
}
public static string UnknownPodcastTitle = Catalog.GetString ("Unknown Podcast");
[DatabaseColumn]
public string Title {
get { return title ?? UnknownPodcastTitle; }
set { title = value; }
}
[DatabaseColumn]
public string Description {
get { return description; }
set { description = value; }
}
[DatabaseColumn]
public string Url {
get { return url; }
set { url = value; }
}
[DatabaseColumn]
public string Keywords {
get { return keywords; }
set { keywords = value; }
}
[DatabaseColumn]
public string Category {
get { return category; }
set { category = value; }
}
[DatabaseColumn]
public string Copyright {
get { return copyright; }
set { copyright = value; }
}
[DatabaseColumn]
public string ImageUrl {
get { return image_url; }
set { image_url = value; }
}
[DatabaseColumn]
public int UpdatePeriodMinutes {
get { return update_period_minutes; }
set { update_period_minutes = value; }
}
[DatabaseColumn]
public string Language {
get { return language; }
set { language = value; }
}
[DatabaseColumn]
public FeedDownloadError LastDownloadError {
get { return lastDownloadError; }
set { lastDownloadError = value; }
}
[DatabaseColumn]
public DateTime LastDownloadTime {
get { return last_download_time; }
set { last_download_time = value; }
}
[DatabaseColumn]
public string Link {
get { return link; }
set { link = value; }
}
//[DatabaseColumn]
public string LocalEnclosurePath {
get {
string escaped = Hyena.StringUtil.EscapeFilename (Title);
return Path.Combine (FeedsManager.Instance.PodcastStorageDirectory, escaped);
}
//set { local_enclosure_path = value; }
}
[DatabaseColumn]
public long MaxItemCount {
get { return maxItemCount; }
set { maxItemCount = value; }
}
[DatabaseColumn]
public DateTime PubDate {
get { return pubDate; }
set { pubDate = value; }
}
[DatabaseColumn]
public DateTime LastBuildDate {
get { return last_build_date; }
set { last_build_date = value; }
}
/*private DateTime last_downloaded;
[DatabaseColumn]
public DateTime LastDownloaded {
get { return last_downloaded; }
set { last_downloaded = value; }
}*/
[DatabaseColumn]
public FeedSyncSetting SyncSetting {
get { return syncSetting; }
set { syncSetting = value; }
}
[DatabaseColumn]
protected DateTime last_auto_download = DateTime.MinValue;
public DateTime LastAutoDownload {
get { return last_auto_download; }
set { last_auto_download = value; }
}
[DatabaseColumn("AutoDownload")]
protected FeedAutoDownload auto_download = FeedAutoDownload.None;
public FeedAutoDownload AutoDownload {
get { return auto_download; }
set {
if (value == auto_download)
return;
auto_download = value;
CheckForItemsToDownload ();
}
}
[DatabaseColumn("DownloadStatus")]
private FeedDownloadStatus download_status;
public FeedDownloadStatus DownloadStatus {
get { return download_status; }
set { download_status = value; }
}
[DatabaseColumn("IsSubscribed")]
private bool is_subscribed;
public bool IsSubscribed {
get { return is_subscribed; }
set { is_subscribed = value; }
}
#endregion
#region Other Properties
// TODO remove this, way too redundant with DownloadStatus
/*public PodcastFeedActivity Activity {
get { return activity; }
PodcastFeedActivity ret = PodcastFeedActivity.None;
if (this == All) {
return ret;
}
switch (DownloadStatus) {
case FeedDownloadStatus.Pending:
ret = PodcastFeedActivity.UpdatePending;
break;
case FeedDownloadStatus.Downloading:
ret = PodcastFeedActivity.Updating;
break;
case FeedDownloadStatus.DownloadFailed:
ret = PodcastFeedActivity.UpdateFailed;
break;
}
if (ret != PodcastFeedActivity.Updating) {
if (ActiveDownloadCount > 0) {
ret = PodcastFeedActivity.ItemsDownloading;
} else if (QueuedDownloadCount > 0) {
ret = PodcastFeedActivity.ItemsQueued;
}
}
return ret;
}
}*/
public IEnumerable<FeedItem> Items {
get {
if (DbId > 0) {
foreach (FeedItem item in
FeedItem.Provider.FetchAllMatching (String.Format ("{0}.FeedID = {1} ORDER BY {0}.PubDate DESC", FeedItem.Provider.TableName, DbId)))
{
yield return item;
}
}
}
}
#endregion
private static FeedManager Manager {
get { return FeedsManager.Instance.FeedManager; }
}
#region Constructors
public Feed (string url, FeedAutoDownload auto_download) : this ()
{
Url = url;
this.auto_download = auto_download;
}
public Feed ()
{
}
#endregion
#region Internal Methods
// Removing a FeedItem means removing the downloaded file.
/*public void Remove (FeedItem item)
{
if (item == null) {
throw new ArgumentNullException ("item");
}
if (items.Remove (item)) {
inactive_items.Add (item);
OnFeedItemRemoved (item);
}
}
}*/
/*public void Remove (IEnumerable<FeedItem> itms)
{
if (removedItems.Count > 0) {
OnItemsChanged ();
}
}
}*/
#endregion
#region Private Methods
public void SetItems (IEnumerable<FeedItem> items)
{
bool added_any = false;
foreach (FeedItem item in items) {
added_any |= AddItem (item);
}
if (added_any) {
Manager.OnFeedsChanged ();
CheckForItemsToDownload ();
}
}
private bool AddItem (FeedItem item)
{
try {
if (!FeedItem.Exists (this.DbId, item.Guid)) {
item.Feed = this;
item.Save ();
return true;
}
} catch (Exception e) {
Hyena.Log.Exception (e);
}
return false;
}
/*private void UpdateItems (IEnumerable<FeedItem> new_items)
{
ICollection<FeedItem> tmpNew = null;
List<FeedItem> zombies = new List<FeedItem> ();
if (items.Count == 0 && inactive_items.Count == 0) {
tmpNew = new List<FeedItem> (new_items);
} else {
// Get remote items that aren't in the items list
tmpNew = Diff (items, new_items);
// Of those, remove the ones that are in our inactive list
tmpNew = Diff (inactive_items, tmpNew);
// Get a list of inactive items that aren't in the remote list any longer
ICollection<FeedItem> doubleKilledZombies = Diff (
new_items, inactive_items
);
foreach (FeedItem zombie in doubleKilledZombies) {
inactive_items.Remove (zombie);
}
zombies.AddRange (doubleKilledZombies);
foreach (FeedItem fi in Diff (new_items, items)) {
if (fi.Enclosure != null &&
!String.IsNullOrEmpty (fi.Enclosure.LocalPath)) {
// A hack for the podcast plugin, keeps downloaded items
// from being deleted when they are no longer in the feed.
continue;
}
zombies.Add (fi);
}
}
if (tmpNew.Count > 0) {
Add (tmpNew);
}
// TODO merge...should we really be deleting these items?
if (zombies.Count > 0) {
foreach (FeedItem item in zombies) {
if (item.Active) {
zombie.Delete ();
}
}
// TODO merge
//ItemsTableManager.Delete (zombies);
}
}
// Written before LINQ, will update.
private ICollection<FeedItem> Diff (IEnumerable<FeedItem> baseSet,
IEnumerable<FeedItem> overlay) {
bool found;
List<FeedItem> diff = new List<FeedItem> ();
foreach (FeedItem opi in overlay) {
found = false;
foreach (FeedItem bpi in baseSet) {
if (opi.Title == bpi.Title &&
opi.Description == bpi.Description) {
found = true;
break;
}
}
if (!found) {
diff.Add (opi);
}
}
return diff;
}*/
#endregion
#region Public Methods
public void Update ()
{
Manager.QueueUpdate (this);
}
public void Delete ()
{
Delete (true);
Manager.OnFeedsChanged ();
}
public void Delete (bool deleteEnclosures)
{
lock (sync) {
//if (deleted)
// return;
//if (updating) {
// Manager.CancelUpdate (this);
//}
foreach (FeedItem item in Items) {
item.Delete (deleteEnclosures);
}
Provider.Delete (this);
}
//updatingHandle.WaitOne ();
Manager.OnFeedsChanged ();
}
public void MarkAllItemsRead ()
{
lock (sync) {
foreach (FeedItem i in Items) {
i.IsRead = true;
}
}
}
public override string ToString ()
{
return String.Format ("Title: {0} - Url: {1}", Title, Url);
}
public void Save ()
{
Save (true);
}
public void Save (bool notify)
{
Provider.Save (this);
if (LastBuildDate > LastAutoDownload) {
CheckForItemsToDownload ();
}
if (notify) {
Manager.OnFeedsChanged ();
}
}
private void CheckForItemsToDownload ()
{
if (LastDownloadError != FeedDownloadError.None || AutoDownload == FeedAutoDownload.None)
return;
bool only_first = (AutoDownload == FeedAutoDownload.One);
bool any = false;
foreach (FeedItem item in Items) {
if (item.Enclosure != null && item.Active &&
item.Enclosure.DownloadStatus != FeedDownloadStatus.Downloaded && item.PubDate > LastAutoDownload)
{
item.Enclosure.AsyncDownload ();
any = true;
if (only_first)
break;
}
}
if (any) {
LastAutoDownload = DateTime.Now;
Save ();
}
}
/*private bool SetCanceled ()
{
bool ret = false;
if (!canceled && updating) {
ret = canceled = true;
}
return ret;
}*/
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace SinchBackend.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Linq;
using Microsoft.WindowsAzure.Commands.Common;
using Microsoft.WindowsAzure.Commands.Utilities.Properties;
using Microsoft.WindowsAzure.Commands.Utilities.Scheduler.Common;
using Microsoft.WindowsAzure.Commands.Utilities.Scheduler.Model;
using Microsoft.WindowsAzure.Scheduler;
using Microsoft.WindowsAzure.Scheduler.Models;
namespace Microsoft.WindowsAzure.Commands.Utilities.Scheduler
{
public partial class SchedulerMgmntClient
{
#region Create Jobs
/// <summary>
/// Populates ErrorAction values from the request
/// </summary>
/// <param name="jobRequest">Request values</param>
/// <returns>Populated JobErrorAction object</returns>
private JobErrorAction PopulateErrorAction(PSCreateJobParams jobRequest)
{
if (!string.IsNullOrEmpty(jobRequest.ErrorActionMethod) && jobRequest.ErrorActionUri != null)
{
JobErrorAction errorAction = new JobErrorAction
{
Request = new JobHttpRequest
{
Uri = jobRequest.ErrorActionUri,
Method = jobRequest.ErrorActionMethod
}
};
if (jobRequest.ErrorActionHeaders != null)
{
errorAction.Request.Headers = jobRequest.ErrorActionHeaders.ToDictionary();
}
if (jobRequest.ErrorActionMethod.Equals("PUT", StringComparison.OrdinalIgnoreCase) || jobRequest.ErrorActionMethod.Equals("POST", StringComparison.OrdinalIgnoreCase))
errorAction.Request.Body = jobRequest.ErrorActionBody;
return errorAction;
}
if (!string.IsNullOrEmpty(jobRequest.ErrorActionSasToken) && !string.IsNullOrEmpty(jobRequest.ErrorActionStorageAccount) && !string.IsNullOrEmpty(jobRequest.ErrorActionQueueName))
{
return new JobErrorAction
{
QueueMessage = new JobQueueMessage
{
QueueName = jobRequest.ErrorActionQueueName,
StorageAccountName = jobRequest.ErrorActionStorageAccount,
SasToken = jobRequest.ErrorActionSasToken,
Message = jobRequest.ErrorActionQueueBody ?? ""
}
};
}
return null;
}
/// <summary>
/// Creates a new Http Scheduler job
/// </summary>
/// <param name="jobRequest">Request values</param>
/// <param name="status">Status of create action</param>
/// <returns>Created Http Scheduler job</returns>
public PSJobDetail CreateHttpJob(PSCreateJobParams jobRequest, out string status)
{
if (!this.AvailableRegions.Contains(jobRequest.Region, StringComparer.OrdinalIgnoreCase))
throw new Exception(Resources.SchedulerInvalidLocation);
SchedulerClient schedulerClient = AzureSession.ClientFactory.CreateCustomClient<SchedulerClient>(jobRequest.Region.ToCloudServiceName(), jobRequest.JobCollectionName, csmClient.Credentials, schedulerManagementClient.BaseUri);
JobCreateOrUpdateParameters jobCreateParams = new JobCreateOrUpdateParameters
{
Action = new JobAction
{
Request = new JobHttpRequest
{
Uri = jobRequest.Uri,
Method = jobRequest.Method
},
}
};
if (jobRequest.Headers != null)
{
jobCreateParams.Action.Request.Headers = jobRequest.Headers.ToDictionary();
}
if (jobRequest.HttpAuthType.Equals("ClientCertificate", StringComparison.OrdinalIgnoreCase))
{
if (jobRequest.ClientCertPfx != null && jobRequest.ClientCertPassword != null)
{
jobCreateParams.Action.Request.Authentication = new ClientCertAuthentication
{
Type = HttpAuthenticationType.ClientCertificate,
Password = jobRequest.ClientCertPassword,
Pfx = jobRequest.ClientCertPfx
};
}
else
{
throw new InvalidOperationException(Resources.SchedulerInvalidClientCertAuthRequest);
}
}
if (jobRequest.HttpAuthType.Equals("None", StringComparison.OrdinalIgnoreCase))
{
if (!string.IsNullOrEmpty(jobRequest.ClientCertPfx) || !string.IsNullOrEmpty(jobRequest.ClientCertPassword))
{
throw new InvalidOperationException(Resources.SchedulerInvalidNoneAuthRequest);
}
}
if (jobRequest.Method.Equals("PUT", StringComparison.OrdinalIgnoreCase) || jobRequest.Method.Equals("POST", StringComparison.OrdinalIgnoreCase))
jobCreateParams.Action.Request.Body = jobRequest.Body;
//Populate job error action
jobCreateParams.Action.ErrorAction = PopulateErrorAction(jobRequest);
jobCreateParams.StartTime = jobRequest.StartTime ?? default(DateTime?);
if (jobRequest.Interval != null || jobRequest.ExecutionCount != null || !string.IsNullOrEmpty(jobRequest.Frequency) || jobRequest.EndTime != null)
{
jobCreateParams.Recurrence = new JobRecurrence();
jobCreateParams.Recurrence.Count = jobRequest.ExecutionCount ?? default(int?);
if (!string.IsNullOrEmpty(jobRequest.Frequency))
jobCreateParams.Recurrence.Frequency = (JobRecurrenceFrequency)Enum.Parse(typeof(JobRecurrenceFrequency), jobRequest.Frequency);
jobCreateParams.Recurrence.Interval = jobRequest.Interval ?? default(int?);
jobCreateParams.Recurrence.EndTime = jobRequest.EndTime ?? default(DateTime?);
}
JobCreateOrUpdateResponse jobCreateResponse = schedulerClient.Jobs.CreateOrUpdate(jobRequest.JobName, jobCreateParams);
if (!string.IsNullOrEmpty(jobRequest.JobState) && jobRequest.JobState.Equals("DISABLED", StringComparison.OrdinalIgnoreCase))
schedulerClient.Jobs.UpdateState(jobRequest.JobName, new JobUpdateStateParameters { State = JobState.Disabled });
status = jobCreateResponse.StatusCode.ToString().Equals("OK") ? "Job has been updated" : jobCreateResponse.StatusCode.ToString();
return GetJobDetail(jobRequest.JobCollectionName, jobRequest.JobName, jobRequest.Region.ToCloudServiceName());
}
/// <summary>
/// Creates a Storage Queue Scheduler job
/// </summary>
/// <param name="jobRequest">Request values</param>
/// <param name="status">Status of create action</param>
/// <returns>Created Storage Queue Scheduler job</returns>
public PSJobDetail CreateStorageJob(PSCreateJobParams jobRequest, out string status)
{
if (!this.AvailableRegions.Contains(jobRequest.Region, StringComparer.OrdinalIgnoreCase))
throw new Exception(Resources.SchedulerInvalidLocation);
SchedulerClient schedulerClient = AzureSession.ClientFactory.CreateCustomClient<SchedulerClient>(jobRequest.Region.ToCloudServiceName(), jobRequest.JobCollectionName, csmClient.Credentials, schedulerManagementClient.BaseUri);
JobCreateOrUpdateParameters jobCreateParams = new JobCreateOrUpdateParameters
{
Action = new JobAction
{
Type = JobActionType.StorageQueue,
QueueMessage = new JobQueueMessage
{
Message = jobRequest.Body ?? string.Empty,
StorageAccountName = jobRequest.StorageAccount,
QueueName = jobRequest.QueueName,
SasToken = jobRequest.SasToken
},
}
};
//Populate job error action
jobCreateParams.Action.ErrorAction = PopulateErrorAction(jobRequest);
jobCreateParams.StartTime = jobRequest.StartTime ?? default(DateTime?);
if (jobRequest.Interval != null || jobRequest.ExecutionCount != null || !string.IsNullOrEmpty(jobRequest.Frequency) || jobRequest.EndTime != null)
{
jobCreateParams.Recurrence = new JobRecurrence();
jobCreateParams.Recurrence.Count = jobRequest.ExecutionCount ?? default(int?);
if (!string.IsNullOrEmpty(jobRequest.Frequency))
jobCreateParams.Recurrence.Frequency = (JobRecurrenceFrequency)Enum.Parse(typeof(JobRecurrenceFrequency), jobRequest.Frequency);
jobCreateParams.Recurrence.Interval = jobRequest.Interval ?? default(int?);
jobCreateParams.Recurrence.EndTime = jobRequest.EndTime ?? default(DateTime?);
}
JobCreateOrUpdateResponse jobCreateResponse = schedulerClient.Jobs.CreateOrUpdate(jobRequest.JobName, jobCreateParams);
if (!string.IsNullOrEmpty(jobRequest.JobState) && jobRequest.JobState.Equals("DISABLED", StringComparison.OrdinalIgnoreCase))
schedulerClient.Jobs.UpdateState(jobRequest.JobName, new JobUpdateStateParameters { State = JobState.Disabled });
status = jobCreateResponse.StatusCode.ToString().Equals("OK") ? "Job has been updated" : jobCreateResponse.StatusCode.ToString();
return GetJobDetail(jobRequest.JobCollectionName, jobRequest.JobName, jobRequest.Region.ToCloudServiceName());
}
#endregion
/// <summary>
/// Updates given Http Scheduler job
/// </summary>
/// <param name="jobRequest">Request values</param>
/// <param name="status">Status of uodate operation</param>
/// <returns>Updated Http Scheduler job</returns>
public PSJobDetail PatchHttpJob(PSCreateJobParams jobRequest, out string status)
{
if (!this.AvailableRegions.Contains(jobRequest.Region, StringComparer.OrdinalIgnoreCase))
throw new Exception(Resources.SchedulerInvalidLocation);
SchedulerClient schedulerClient = AzureSession.ClientFactory.CreateCustomClient<SchedulerClient>(jobRequest.Region.ToCloudServiceName(), jobRequest.JobCollectionName, csmClient.Credentials, schedulerManagementClient.BaseUri);
//Get Existing job
Job job = schedulerClient.Jobs.Get(jobRequest.JobName).Job;
JobCreateOrUpdateParameters jobUpdateParams = PopulateExistingJobParams(job, jobRequest, job.Action.Type);
JobCreateOrUpdateResponse jobUpdateResponse = schedulerClient.Jobs.CreateOrUpdate(jobRequest.JobName, jobUpdateParams);
if (!string.IsNullOrEmpty(jobRequest.JobState))
schedulerClient.Jobs.UpdateState(jobRequest.JobName, new JobUpdateStateParameters
{
State = jobRequest.JobState.Equals("Enabled", StringComparison.OrdinalIgnoreCase) ? JobState.Enabled
: JobState.Disabled
});
status = jobUpdateResponse.StatusCode.ToString().Equals("OK") ? "Job has been updated" : jobUpdateResponse.StatusCode.ToString();
return GetJobDetail(jobRequest.JobCollectionName, jobRequest.JobName, jobRequest.Region.ToCloudServiceName());
}
/// <summary>
/// If a scheduler job already exists, this will merge the existing job config values with the request
/// </summary>
/// <param name="job">Existing Scheduler job</param>
/// <param name="jobRequest">Request values</param>
/// <param name="type">Http or Storage</param>
/// <returns>JobCreateOrUpdateParameters object to use when updating Scheduler job</returns>
private JobCreateOrUpdateParameters PopulateExistingJobParams(Job job, PSCreateJobParams jobRequest, JobActionType type)
{
JobCreateOrUpdateParameters jobUpdateParams = new JobCreateOrUpdateParameters();
if (type.Equals(JobActionType.StorageQueue))
{
if (jobRequest.IsStorageActionSet())
{
jobUpdateParams.Action = new JobAction();
jobUpdateParams.Action.QueueMessage = new JobQueueMessage();
if (job.Action != null)
{
jobUpdateParams.Action.Type = job.Action.Type;
if (job.Action.QueueMessage != null)
{
jobUpdateParams.Action.QueueMessage.Message = string.IsNullOrEmpty(jobRequest.StorageQueueMessage) ? job.Action.QueueMessage.Message : jobRequest.StorageQueueMessage;
jobUpdateParams.Action.QueueMessage.QueueName = jobRequest.QueueName ?? job.Action.QueueMessage.QueueName;
jobUpdateParams.Action.QueueMessage.SasToken = jobRequest.SasToken ?? job.Action.QueueMessage.SasToken;
jobUpdateParams.Action.QueueMessage.StorageAccountName = job.Action.QueueMessage.StorageAccountName;
}
else if (job.Action.QueueMessage == null)
{
jobUpdateParams.Action.QueueMessage.Message = string.IsNullOrEmpty(jobRequest.StorageQueueMessage) ? string.Empty : jobRequest.StorageQueueMessage;
jobUpdateParams.Action.QueueMessage.QueueName = jobRequest.QueueName;
jobUpdateParams.Action.QueueMessage.SasToken = jobRequest.SasToken;
jobUpdateParams.Action.QueueMessage.StorageAccountName = jobRequest.StorageAccount;
}
}
else
{
jobUpdateParams.Action.QueueMessage.Message = string.IsNullOrEmpty(jobRequest.StorageQueueMessage) ? string.Empty : jobRequest.StorageQueueMessage;
jobUpdateParams.Action.QueueMessage.QueueName = jobRequest.QueueName;
jobUpdateParams.Action.QueueMessage.SasToken = jobRequest.SasToken;
jobUpdateParams.Action.QueueMessage.StorageAccountName = jobRequest.StorageAccount;
}
}
else
{
jobUpdateParams.Action = job.Action;
}
}
else //If it is a HTTP job action type
{
if (jobRequest.IsActionSet())
{
jobUpdateParams.Action = new JobAction();
jobUpdateParams.Action.Request = new JobHttpRequest();
if (job.Action != null)
{
jobUpdateParams.Action.Type = job.Action.Type;
if (job.Action.Request != null)
{
jobUpdateParams.Action.Request.Uri = jobRequest.Uri ?? job.Action.Request.Uri;
jobUpdateParams.Action.Request.Method = jobRequest.Method ?? job.Action.Request.Method;
jobUpdateParams.Action.Request.Headers = jobRequest.Headers == null ? job.Action.Request.Headers : jobRequest.Headers.ToDictionary();
jobUpdateParams.Action.Request.Body = jobRequest.Body ?? job.Action.Request.Body;
//Job has existing authentication
if (job.Action.Request.Authentication != null)
{
if (!string.IsNullOrEmpty(jobRequest.HttpAuthType))
{
jobUpdateParams.Action.Request.Authentication = SetHttpAuthentication(jobRequest, jobUpdateParams);
}
//the new request doesn't have any changes to auth, so preserve it
else
{
jobUpdateParams.Action.Request.Authentication = job.Action.Request.Authentication;
}
}
else if (job.Action.Request.Authentication == null)
{
jobUpdateParams.Action.Request.Authentication = SetHttpAuthentication(jobRequest, jobUpdateParams);
}
}
else if (job.Action.Request == null)
{
jobUpdateParams.Action.Request.Uri = jobRequest.Uri;
jobUpdateParams.Action.Request.Method = jobRequest.Method;
jobUpdateParams.Action.Request.Headers = jobRequest.Headers.ToDictionary();
jobUpdateParams.Action.Request.Body = jobRequest.Body;
jobUpdateParams.Action.Request.Authentication = SetHttpAuthentication(jobRequest, jobUpdateParams);
}
}
else
{
jobUpdateParams.Action.Request.Uri = jobRequest.Uri;
jobUpdateParams.Action.Request.Method = jobRequest.Method;
jobUpdateParams.Action.Request.Headers = jobRequest.Headers.ToDictionary();
jobUpdateParams.Action.Request.Body = jobRequest.Body;
jobUpdateParams.Action.Request.Authentication = SetHttpAuthentication(jobRequest, jobUpdateParams);
}
}
else
{
jobUpdateParams.Action = job.Action;
}
}
if (jobRequest.IsErrorActionSet())
{
jobUpdateParams.Action.ErrorAction = new JobErrorAction();
jobUpdateParams.Action.ErrorAction.Request = new JobHttpRequest();
jobUpdateParams.Action.ErrorAction.QueueMessage = new JobQueueMessage();
if (job.Action.ErrorAction != null)
{
if (job.Action.ErrorAction.Request != null)
{
jobUpdateParams.Action.ErrorAction.Request.Uri = jobRequest.ErrorActionUri ?? job.Action.ErrorAction.Request.Uri;
jobUpdateParams.Action.ErrorAction.Request.Method = jobRequest.ErrorActionMethod ?? job.Action.ErrorAction.Request.Method;
jobUpdateParams.Action.ErrorAction.Request.Headers = jobRequest.ErrorActionHeaders == null ? job.Action.ErrorAction.Request.Headers : jobRequest.Headers.ToDictionary();
jobUpdateParams.Action.ErrorAction.Request.Body = jobRequest.ErrorActionBody ?? job.Action.ErrorAction.Request.Body;
}
else if (job.Action.ErrorAction.Request == null)
{
jobUpdateParams.Action.ErrorAction.Request.Uri = jobRequest.ErrorActionUri;
jobUpdateParams.Action.ErrorAction.Request.Method = jobRequest.ErrorActionMethod;
jobUpdateParams.Action.ErrorAction.Request.Headers = jobRequest.ErrorActionHeaders.ToDictionary();
jobUpdateParams.Action.ErrorAction.Request.Body = jobRequest.ErrorActionBody;
}
if (job.Action.ErrorAction.QueueMessage != null)
{
jobUpdateParams.Action.ErrorAction.QueueMessage.Message = jobRequest.ErrorActionQueueBody ?? job.Action.ErrorAction.QueueMessage.Message;
jobUpdateParams.Action.ErrorAction.QueueMessage.QueueName = jobRequest.ErrorActionQueueName ?? job.Action.ErrorAction.QueueMessage.QueueName;
jobUpdateParams.Action.ErrorAction.QueueMessage.SasToken = jobRequest.ErrorActionSasToken ?? job.Action.ErrorAction.QueueMessage.SasToken;
jobUpdateParams.Action.ErrorAction.QueueMessage.StorageAccountName = jobRequest.ErrorActionStorageAccount ?? job.Action.ErrorAction.QueueMessage.StorageAccountName;
}
else if (job.Action.ErrorAction.QueueMessage == null)
{
jobUpdateParams.Action.ErrorAction.QueueMessage.Message = jobRequest.ErrorActionQueueBody;
jobUpdateParams.Action.ErrorAction.QueueMessage.QueueName = jobRequest.ErrorActionQueueName;
jobUpdateParams.Action.ErrorAction.QueueMessage.SasToken = jobRequest.ErrorActionSasToken;
jobUpdateParams.Action.ErrorAction.QueueMessage.StorageAccountName = jobRequest.ErrorActionStorageAccount;
}
}
else if (job.Action.ErrorAction == null)
{
jobUpdateParams.Action.ErrorAction.Request.Uri = jobRequest.ErrorActionUri;
jobUpdateParams.Action.ErrorAction.Request.Method = jobRequest.ErrorActionMethod;
jobUpdateParams.Action.ErrorAction.Request.Headers = jobRequest.ErrorActionHeaders.ToDictionary();
jobUpdateParams.Action.ErrorAction.Request.Body = jobRequest.ErrorActionBody;
jobUpdateParams.Action.ErrorAction.QueueMessage.Message = jobRequest.ErrorActionQueueBody;
jobUpdateParams.Action.ErrorAction.QueueMessage.QueueName = jobRequest.ErrorActionQueueName;
jobUpdateParams.Action.ErrorAction.QueueMessage.SasToken = jobRequest.ErrorActionSasToken;
jobUpdateParams.Action.ErrorAction.QueueMessage.StorageAccountName = jobRequest.ErrorActionStorageAccount;
}
}
else
{
jobUpdateParams.Action.ErrorAction = job.Action.ErrorAction;
}
if (jobRequest.IsRecurrenceSet())
{
jobUpdateParams.Recurrence = new JobRecurrence();
if (job.Recurrence != null)
{
jobUpdateParams.Recurrence.Count = jobRequest.ExecutionCount ?? job.Recurrence.Count;
jobUpdateParams.Recurrence.EndTime = jobRequest.EndTime ?? job.Recurrence.EndTime;
jobUpdateParams.Recurrence.Frequency = string.IsNullOrEmpty(jobRequest.Frequency) ? job.Recurrence.Frequency : (JobRecurrenceFrequency)Enum.Parse(typeof(JobRecurrenceFrequency), jobRequest.Frequency);
jobUpdateParams.Recurrence.Interval = jobRequest.Interval ?? job.Recurrence.Interval;
jobUpdateParams.Recurrence.Schedule = SetRecurrenceSchedule(job.Recurrence.Schedule);
}
else if (job.Recurrence == null)
{
jobUpdateParams.Recurrence.Count = jobRequest.ExecutionCount;
jobUpdateParams.Recurrence.EndTime = jobRequest.EndTime;
jobUpdateParams.Recurrence.Frequency = string.IsNullOrEmpty(jobRequest.Frequency) ? default(JobRecurrenceFrequency) : (JobRecurrenceFrequency)Enum.Parse(typeof(JobRecurrenceFrequency), jobRequest.Frequency);
jobUpdateParams.Recurrence.Interval = jobRequest.Interval;
jobUpdateParams.Recurrence.Schedule = null;
}
}
else
{
jobUpdateParams.Recurrence = job.Recurrence;
if (jobUpdateParams.Recurrence != null)
{
jobUpdateParams.Recurrence.Schedule = SetRecurrenceSchedule(job.Recurrence.Schedule);
}
}
jobUpdateParams.Action.RetryPolicy = job.Action.RetryPolicy;
jobUpdateParams.StartTime = jobRequest.StartTime ?? job.StartTime;
return jobUpdateParams;
}
private HttpAuthentication SetHttpAuthentication(PSCreateJobParams jobRequest, JobCreateOrUpdateParameters jobUpdateParams)
{
HttpAuthentication httpAuthentication = null;
if (!string.IsNullOrEmpty(jobRequest.HttpAuthType))
{
switch (jobRequest.HttpAuthType.ToLower())
{
case "clientcertificate":
if (jobRequest.ClientCertPfx != null && jobRequest.ClientCertPassword != null)
{
httpAuthentication = new ClientCertAuthentication
{
Type = HttpAuthenticationType.ClientCertificate,
Password = jobRequest.ClientCertPassword,
Pfx = jobRequest.ClientCertPfx
};
}
else
{
throw new InvalidOperationException(Resources.SchedulerInvalidClientCertAuthRequest);
}
break;
case "activedirectoryoauth":
if (jobRequest.Tenant != null && jobRequest.Audience != null && jobRequest.ClientId != null && jobRequest.Secret != null)
{
httpAuthentication = new AADOAuthAuthentication
{
Type = HttpAuthenticationType.ActiveDirectoryOAuth,
Tenant = jobRequest.Tenant,
Audience = jobRequest.Audience,
ClientId = jobRequest.ClientId,
Secret = jobRequest.Secret
};
}
else
{
throw new InvalidOperationException(Resources.SchedulerInvalidAADOAuthRequest);
}
break;
case "basic":
if (jobRequest.Username != null && jobRequest.Password != null)
{
httpAuthentication = new BasicAuthentication
{
Type = HttpAuthenticationType.Basic,
Username = jobRequest.Username,
Password = jobRequest.Password
};
}
else
{
throw new InvalidOperationException(Resources.SchedulerInvalidBasicAuthRequest);
}
break;
case "none":
if (!string.IsNullOrEmpty(jobRequest.ClientCertPfx) || !string.IsNullOrEmpty(jobRequest.ClientCertPassword) ||
!string.IsNullOrEmpty(jobRequest.Tenant) || !string.IsNullOrEmpty(jobRequest.Secret) || !string.IsNullOrEmpty(jobRequest.ClientId) || !string.IsNullOrEmpty(jobRequest.Audience) ||
!string.IsNullOrEmpty(jobRequest.Username) || !string.IsNullOrEmpty(jobRequest.Password))
{
throw new InvalidOperationException(Resources.SchedulerInvalidNoneAuthRequest);
}
break;
}
}
return httpAuthentication;
}
/// <summary>
/// Existing bug in SDK where recurrence counts are set to 0 instead of null
/// </summary>
/// <param name="jobRecurrenceSchedule">The JobRecurrenceSchedule</param>
/// <returns>The JobRecurrenceSchedule</returns>
private JobRecurrenceSchedule SetRecurrenceSchedule(JobRecurrenceSchedule jobRecurrenceSchedule)
{
if (jobRecurrenceSchedule != null)
{
JobRecurrenceSchedule schedule = new JobRecurrenceSchedule();
schedule.Days = jobRecurrenceSchedule.Days.Count == 0 ? null : jobRecurrenceSchedule.Days;
schedule.Hours = jobRecurrenceSchedule.Hours.Count == 0 ? null : jobRecurrenceSchedule.Hours;
schedule.Minutes = jobRecurrenceSchedule.Minutes.Count == 0 ? null : jobRecurrenceSchedule.Minutes;
schedule.MonthDays = jobRecurrenceSchedule.MonthDays.Count == 0 ? null : jobRecurrenceSchedule.MonthDays;
schedule.MonthlyOccurrences = jobRecurrenceSchedule.MonthlyOccurrences.Count == 0 ? null : jobRecurrenceSchedule.MonthlyOccurrences;
schedule.Months = jobRecurrenceSchedule.Months.Count == 0 ? null : jobRecurrenceSchedule.Months;
return schedule;
}
else
{
return null;
}
}
/// <summary>
/// Updates given Storage Queue Scheduler job
/// </summary>
/// <param name="jobRequest">Request values</param>
/// <param name="status">Status of uodate operation</param>
/// <returns>Updated Storage Queue Scheduler job</returns>
public PSJobDetail PatchStorageJob(PSCreateJobParams jobRequest, out string status)
{
if (!this.AvailableRegions.Contains(jobRequest.Region, StringComparer.OrdinalIgnoreCase))
throw new Exception(Resources.SchedulerInvalidLocation);
SchedulerClient schedulerClient = AzureSession.ClientFactory.CreateCustomClient<SchedulerClient>(jobRequest.Region.ToCloudServiceName(), jobRequest.JobCollectionName, csmClient.Credentials, schedulerManagementClient.BaseUri);
//Get Existing job
Job job = schedulerClient.Jobs.Get(jobRequest.JobName).Job;
JobCreateOrUpdateParameters jobUpdateParams = PopulateExistingJobParams(job, jobRequest, job.Action.Type);
JobCreateOrUpdateResponse jobUpdateResponse = schedulerClient.Jobs.CreateOrUpdate(jobRequest.JobName, jobUpdateParams);
if (!string.IsNullOrEmpty(jobRequest.JobState))
schedulerClient.Jobs.UpdateState(jobRequest.JobName, new JobUpdateStateParameters
{
State = jobRequest.JobState.Equals("Enabled", StringComparison.OrdinalIgnoreCase) ? JobState.Enabled
: JobState.Disabled
});
status = jobUpdateResponse.StatusCode.ToString().Equals("OK") ? "Job has been updated" : jobUpdateResponse.StatusCode.ToString();
return GetJobDetail(jobRequest.JobCollectionName, jobRequest.JobName, jobRequest.Region.ToCloudServiceName());
}
}
}
| |
namespace Nancy.Bootstrapper
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using Nancy.Extensions;
/// <summary>
/// Scans the app domain for assemblies and types
/// </summary>
public static class AppDomainAssemblyTypeScanner
{
static AppDomainAssemblyTypeScanner()
{
LoadAssembliesWithNancyReferences();
}
/// <summary>
/// Nancy core assembly
/// </summary>
private static Assembly nancyAssembly = typeof(NancyEngine).Assembly;
/// <summary>
/// App domain type cache
/// </summary>
private static IEnumerable<Type> types;
/// <summary>
/// App domain assemblies cache
/// </summary>
private static IEnumerable<Assembly> assemblies;
/// <summary>
/// Indicates whether the all Assemblies, that references a Nancy assembly, have already been loaded
/// </summary>
private static bool nancyReferencingAssembliesLoaded;
private static IEnumerable<Func<Assembly, bool>> assembliesToScan;
/// <summary>
/// The default assemblies for scanning.
/// Includes the nancy assembly and anything referencing a nancy assembly
/// </summary>
public static Func<Assembly, bool>[] DefaultAssembliesToScan = new Func<Assembly, bool>[]
{
x => x == nancyAssembly,
x =>
{
return !x.GetName().Name.StartsWith("Nancy.Testing",StringComparison.OrdinalIgnoreCase) &&
x.GetReferencedAssemblies().Any(r => r.Name.StartsWith("Nancy", StringComparison.OrdinalIgnoreCase));
}
};
/// <summary>
/// Gets or sets a set of rules for which assemblies are scanned
/// Defaults to just assemblies that have references to nancy, and nancy
/// itself.
/// Each item in the enumerable is a delegate that takes the assembly and
/// returns true if it is to be included. Returning false doesn't mean it won't
/// be included as a true from another delegate will take precedence.
/// </summary>
public static IEnumerable<Func<Assembly, bool>> AssembliesToScan
{
private get
{
return assembliesToScan ?? (assembliesToScan = DefaultAssembliesToScan);
}
set
{
assembliesToScan = value;
UpdateTypes();
}
}
/// <summary>
/// Gets app domain types.
/// </summary>
public static IEnumerable<Type> Types
{
get
{
return types;
}
}
/// <summary>
/// Gets app domain types.
/// </summary>
public static IEnumerable<Assembly> Assemblies
{
get
{
return assemblies;
}
}
/// <summary>
/// Add assemblies to the list of assemblies to scan for Nancy types
/// </summary>
/// <param name="assemblyNames">One or more assembly names</param>
public static void AddAssembliesToScan(params string[] assemblyNames)
{
var normalisedNames = GetNormalisedAssemblyNames(assemblyNames).ToArray();
foreach (var assemblyName in normalisedNames)
{
LoadAssemblies(assemblyName + ".dll");
LoadAssemblies(assemblyName + ".exe");
}
var scanningPredicates = normalisedNames.Select(s =>
{
return (Func<Assembly, bool>)(a => a.GetName().Name == s);
});
AssembliesToScan = AssembliesToScan.Union(scanningPredicates);
}
/// <summary>
/// Add assemblies to the list of assemblies to scan for Nancy types
/// </summary>
/// <param name="assemblies">One of more assemblies</param>
public static void AddAssembliesToScan(params Assembly[] assemblies)
{
foreach (var assembly in assemblies)
{
LoadAssemblies(assembly.GetName() + ".dll");
LoadAssemblies(assembly.GetName() + ".exe");
}
var scanningPredicates = assemblies.Select(an => (Func<Assembly, bool>)(a => a == an));
AssembliesToScan = AssembliesToScan.Union(scanningPredicates);
}
/// <summary>
/// Add predicates for determining which assemblies to scan for Nancy types
/// </summary>
/// <param name="predicates">One or more predicates</param>
public static void AddAssembliesToScan(params Func<Assembly, bool>[] predicates)
{
AssembliesToScan = AssembliesToScan.Union(predicates);
}
/// <summary>
/// Load assemblies from a the app domain base directory matching a given wildcard.
/// Assemblies will only be loaded if they aren't already in the appdomain.
/// </summary>
/// <param name="wildcardFilename">Wildcard to match the assemblies to load</param>
public static void LoadAssemblies(string wildcardFilename)
{
foreach (var directory in GetAssemblyDirectories())
{
LoadAssemblies(directory, wildcardFilename);
}
}
/// <summary>
/// Load assemblies from a given directory matching a given wildcard.
/// Assemblies will only be loaded if they aren't already in the appdomain.
/// </summary>
/// <param name="containingDirectory">Directory containing the assemblies</param>
/// <param name="wildcardFilename">Wildcard to match the assemblies to load</param>
public static void LoadAssemblies(string containingDirectory, string wildcardFilename)
{
UpdateAssemblies();
var existingAssemblyPaths = assemblies.Select(a => a.Location).ToArray();
var unloadedAssemblies =
Directory.GetFiles(containingDirectory, wildcardFilename).Where(
f => !existingAssemblyPaths.Contains(f, StringComparer.InvariantCultureIgnoreCase)).ToArray();
foreach (var unloadedAssembly in unloadedAssemblies)
{
Assembly.Load(AssemblyName.GetAssemblyName(unloadedAssembly));
}
UpdateTypes();
}
/// <summary>
/// Refreshes the type cache if additional assemblies have been loaded.
/// Note: This is called automatically if assemblies are loaded using LoadAssemblies.
/// </summary>
public static void UpdateTypes()
{
UpdateAssemblies();
types = (from assembly in assemblies
from type in assembly.SafeGetExportedTypes()
where !type.IsAbstract
select type).ToArray();
}
/// <summary>
/// Updates the assembly cache from the appdomain
/// </summary>
private static void UpdateAssemblies()
{
assemblies = (from assembly in AppDomain.CurrentDomain.GetAssemblies()
where AssembliesToScan.Any(asm => asm(assembly))
where !assembly.IsDynamic
where !assembly.ReflectionOnly
select assembly).ToArray();
}
/// <summary>
/// Loads any assembly that references a Nancy assembly.
/// </summary>
public static void LoadAssembliesWithNancyReferences()
{
if (nancyReferencingAssembliesLoaded)
{
return;
}
UpdateAssemblies();
var existingAssemblyPaths =
assemblies.Select(a => a.Location).ToArray();
foreach (var directory in GetAssemblyDirectories())
{
var unloadedAssemblies = Directory
.GetFiles(directory, "*.dll")
.Where(f => !existingAssemblyPaths.Contains(f, StringComparer.InvariantCultureIgnoreCase)).ToArray();
foreach (var unloadedAssembly in unloadedAssemblies)
{
Assembly inspectedAssembly = null;
try
{
inspectedAssembly = Assembly.ReflectionOnlyLoadFrom(unloadedAssembly);
}
catch (BadImageFormatException biEx)
{
//the assembly maybe it's not managed code
}
catch (FileLoadException)
{
//the assembly might already be loaded
}
if (inspectedAssembly != null && inspectedAssembly.GetReferencedAssemblies().Any(r => r.Name.StartsWith("Nancy", StringComparison.OrdinalIgnoreCase)))
{
try
{
Assembly.Load(inspectedAssembly.GetName());
}
catch
{
}
}
}
}
UpdateTypes();
nancyReferencingAssembliesLoaded = true;
}
/// <summary>
/// Gets all types implementing a particular interface/base class
/// </summary>
/// <param name="type">Type to search for</param>
/// <returns>An <see cref="IEnumerable{T}"/> of types.</returns>
/// <remarks>Will scan with <see cref="ScanMode.All"/>.</remarks>
public static IEnumerable<Type> TypesOf(Type type)
{
return TypesOf(type, ScanMode.All);
}
/// <summary>
/// Gets all types implementing a particular interface/base class
/// </summary>
/// <param name="type">Type to search for</param>
/// <param name="mode">A <see cref="ScanMode"/> value to determine which type set to scan in.</param>
/// <returns>An <see cref="IEnumerable{T}"/> of types.</returns>
public static IEnumerable<Type> TypesOf(Type type, ScanMode mode)
{
var returnTypes =
Types.Where(type.IsAssignableFrom);
switch (mode)
{
case ScanMode.OnlyNancy:
return returnTypes.Where(t => t.Assembly == nancyAssembly);
case ScanMode.ExcludeNancy:
return returnTypes.Where(t => t.Assembly != nancyAssembly);
case ScanMode.OnlyNancyNamespace:
return returnTypes.Where(t => t.Namespace.StartsWith("Nancy"));
case ScanMode.ExcludeNancyNamespace:
return returnTypes.Where(t => !t.Namespace.StartsWith("Nancy"));
default://mode == ScanMode.All
return returnTypes;
}
}
/// <summary>
/// Gets all types implementing a particular interface/base class
/// </summary>
/// <typeparam name="TType">Type to search for</typeparam>
/// <returns>An <see cref="IEnumerable{T}"/> of types.</returns>
/// <remarks>Will scan with <see cref="ScanMode.All"/>.</remarks>
public static IEnumerable<Type> TypesOf<TType>()
{
return TypesOf<TType>(ScanMode.All);
}
/// <summary>
/// Gets all types implementing a particular interface/base class
/// </summary>
/// <typeparam name="TType">Type to search for</typeparam>
/// <param name="mode">A <see cref="ScanMode"/> value to determine which type set to scan in.</param>
/// <returns>An <see cref="IEnumerable{T}"/> of types.</returns>
public static IEnumerable<Type> TypesOf<TType>(ScanMode mode)
{
return TypesOf(typeof(TType), mode);
}
/// <summary>
/// Returns the directories containing dll files. It uses the default convention as stated by microsoft.
/// </summary>
/// <see cref="http://msdn.microsoft.com/en-us/library/system.appdomainsetup.privatebinpathprobe.aspx"/>
private static IEnumerable<string> GetAssemblyDirectories()
{
var privateBinPathDirectories = AppDomain.CurrentDomain.SetupInformation.PrivateBinPath == null
? new string[] { }
: AppDomain.CurrentDomain.SetupInformation.PrivateBinPath.Split(';');
foreach (var privateBinPathDirectory in privateBinPathDirectories)
{
if (!string.IsNullOrWhiteSpace(privateBinPathDirectory))
{
yield return privateBinPathDirectory;
}
}
if (AppDomain.CurrentDomain.SetupInformation.PrivateBinPathProbe == null)
{
yield return AppDomain.CurrentDomain.SetupInformation.ApplicationBase;
}
}
private static IEnumerable<string> GetNormalisedAssemblyNames(string[] assemblyNames)
{
foreach (var assemblyName in assemblyNames)
{
if (assemblyName.EndsWith(".dll") || assemblyName.EndsWith(".exe"))
{
yield return Path.GetFileNameWithoutExtension(assemblyName);
}
else
{
yield return assemblyName;
}
}
}
}
public static class AppDomainAssemblyTypeScannerExtensions
{
public static IEnumerable<Type> NotOfType<TType>(this IEnumerable<Type> types)
{
return types.Where(t => !typeof(TType).IsAssignableFrom(t));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using JsonApiDotNetCore.Configuration;
using JsonApiDotNetCore.Serialization.Objects;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using TestBuildingBlocks;
using Xunit;
namespace JsonApiDotNetCoreTests.IntegrationTests.ZeroKeys
{
public sealed class ZeroAsKeyTests : IClassFixture<IntegrationTestContext<TestableStartup<ZeroKeyDbContext>, ZeroKeyDbContext>>
{
private readonly IntegrationTestContext<TestableStartup<ZeroKeyDbContext>, ZeroKeyDbContext> _testContext;
private readonly ZeroKeyFakers _fakers = new();
public ZeroAsKeyTests(IntegrationTestContext<TestableStartup<ZeroKeyDbContext>, ZeroKeyDbContext> testContext)
{
_testContext = testContext;
testContext.UseController<GamesController>();
testContext.UseController<MapsController>();
testContext.UseController<PlayersController>();
var options = (JsonApiOptions)testContext.Factory.Services.GetRequiredService<IJsonApiOptions>();
options.UseRelativeLinks = true;
options.AllowClientGeneratedIds = true;
}
[Fact]
public async Task Can_filter_by_zero_ID_on_primary_resources()
{
// Arrange
List<Game> games = _fakers.Game.Generate(2);
games[0].Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Games.AddRange(games);
await dbContext.SaveChangesAsync();
});
const string route = "/games?filter=equals(id,'0')";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(1);
responseDocument.Data.ManyValue[0].Id.Should().Be("0");
responseDocument.Data.ManyValue[0].Links.Self.Should().Be("/games/0");
}
[Fact]
public async Task Can_get_primary_resource_by_zero_ID_with_include()
{
// Arrange
Game game = _fakers.Game.Generate();
game.Id = 0;
game.ActivePlayers = _fakers.Player.Generate(1);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Games.Add(game);
await dbContext.SaveChangesAsync();
});
const string route = "/games/0?include=activePlayers";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be("0");
responseDocument.Data.SingleValue.Links.Self.Should().Be("/games/0");
responseDocument.Included.Should().HaveCount(1);
responseDocument.Included[0].Id.Should().Be(game.ActivePlayers.ElementAt(0).StringId);
}
[Fact]
public async Task Can_create_resource_with_zero_ID()
{
// Arrange
string newTitle = _fakers.Game.Generate().Title;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
});
var requestBody = new
{
data = new
{
type = "games",
id = "0",
attributes = new
{
title = newTitle
}
}
};
const string route = "/games";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePostAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.Created);
httpResponse.Headers.Location.Should().Be("/games/0");
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be("0");
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Game gameInDatabase = await dbContext.Games.FirstWithIdAsync((int?)0);
gameInDatabase.Should().NotBeNull();
gameInDatabase.Title.Should().Be(newTitle);
});
}
[Fact]
public async Task Can_update_resource_with_zero_ID()
{
// Arrange
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
string newTitle = _fakers.Game.Generate().Title;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Games.Add(existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new
{
type = "games",
id = "0",
attributes = new
{
title = newTitle
}
}
};
const string route = "/games/0";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePatchAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be("0");
responseDocument.Data.SingleValue.Attributes["title"].Should().Be(newTitle);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Game gameInDatabase = await dbContext.Games.FirstWithIdAsync((int?)0);
gameInDatabase.Should().NotBeNull();
gameInDatabase.Title.Should().Be(newTitle);
});
}
[Fact]
public async Task Can_clear_ToOne_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.ActiveGame = _fakers.Game.Generate();
existingPlayer.ActiveGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Players.Add(existingPlayer);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = (object)null
};
string route = $"/players/{existingPlayer.StringId}/relationships/activeGame";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.ActiveGame).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.ActiveGame.Should().BeNull();
});
}
[Fact]
public async Task Can_assign_ToOne_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.AddInRange(existingPlayer, existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new
{
type = "games",
id = "0"
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/activeGame";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.ActiveGame).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.ActiveGame.Id.Should().Be(0);
});
}
[Fact]
public async Task Can_replace_ToOne_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.ActiveGame = _fakers.Game.Generate();
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.AddInRange(existingPlayer, existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new
{
type = "games",
id = "0"
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/activeGame";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.ActiveGame).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.ActiveGame.Id.Should().Be(0);
});
}
[Fact]
public async Task Can_clear_ToMany_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.RecentlyPlayed = _fakers.Game.Generate(2);
existingPlayer.RecentlyPlayed.ElementAt(0).Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Players.Add(existingPlayer);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = Array.Empty<object>()
};
string route = $"/players/{existingPlayer.StringId}/relationships/recentlyPlayed";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.RecentlyPlayed).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.RecentlyPlayed.Should().BeEmpty();
});
}
[Fact]
public async Task Can_assign_ToMany_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.AddInRange(existingPlayer, existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new[]
{
new
{
type = "games",
id = "0"
}
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/recentlyPlayed";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.RecentlyPlayed).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.RecentlyPlayed.Should().HaveCount(1);
playerInDatabase.RecentlyPlayed.ElementAt(0).Id.Should().Be(0);
});
}
[Fact]
public async Task Can_replace_ToMany_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.RecentlyPlayed = _fakers.Game.Generate(2);
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.AddInRange(existingPlayer, existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new[]
{
new
{
type = "games",
id = "0"
}
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/recentlyPlayed";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePatchAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.RecentlyPlayed).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.RecentlyPlayed.Should().HaveCount(1);
playerInDatabase.RecentlyPlayed.ElementAt(0).Id.Should().Be(0);
});
}
[Fact]
public async Task Can_add_to_ToMany_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.RecentlyPlayed = _fakers.Game.Generate(1);
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.AddInRange(existingPlayer, existingGame);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new[]
{
new
{
type = "games",
id = "0"
}
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/recentlyPlayed";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePostAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.RecentlyPlayed).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.RecentlyPlayed.Should().HaveCount(2);
playerInDatabase.RecentlyPlayed.Should().ContainSingle(game => game.Id == 0);
});
}
[Fact]
public async Task Can_remove_from_ToMany_relationship_with_zero_ID()
{
// Arrange
Player existingPlayer = _fakers.Player.Generate();
existingPlayer.RecentlyPlayed = _fakers.Game.Generate(2);
existingPlayer.RecentlyPlayed.ElementAt(0).Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Players.Add(existingPlayer);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
data = new[]
{
new
{
type = "games",
id = "0"
}
}
};
string route = $"/players/{existingPlayer.StringId}/relationships/recentlyPlayed";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecuteDeleteAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Player playerInDatabase = await dbContext.Players.Include(player => player.RecentlyPlayed).FirstWithIdAsync(existingPlayer.Id);
playerInDatabase.Should().NotBeNull();
playerInDatabase.RecentlyPlayed.Should().HaveCount(1);
playerInDatabase.RecentlyPlayed.Should().ContainSingle(game => game.Id != 0);
});
}
[Fact]
public async Task Can_delete_resource_with_zero_ID()
{
// Arrange
Game existingGame = _fakers.Game.Generate();
existingGame.Id = 0;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Game>();
dbContext.Games.Add(existingGame);
await dbContext.SaveChangesAsync();
});
const string route = "/games/0";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecuteDeleteAsync<string>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
Game gameInDatabase = await dbContext.Games.FirstWithIdOrDefaultAsync(existingGame.Id);
gameInDatabase.Should().BeNull();
});
}
}
}
| |
#region Licence...
/*
The MIT License (MIT)
Copyright (c) 2014 Oleg Shilo
Permission is hereby granted,
free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#endregion Licence...
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using Microsoft.Deployment.WindowsInstaller;
using WixSharp.CommonTasks;
using IO = System.IO;
using Reflection = System.Reflection;
namespace WixSharp
{
class ProjectValidator
{
static bool IsValidVersion(string versionText)
{
if (string.IsNullOrEmpty(versionText))
return true;
if (versionText == "%this%")
return true;
try
{
new Version(versionText);
return true;
}
catch
{ return false; }
}
public static void Validate(Project project)
{
if (project.Media.Any() && project.GenericItems.Any(item => item is MediaTemplate))
{
throw new ValidationException("Project contains both Media and MediaTemplate elements. Use only one or another (e.g. call project.Media.Clear()).");
}
if (project.MajorUpgradeStrategy != null)
{
if (project.MajorUpgradeStrategy.UpgradeVersions == null && project.MajorUpgradeStrategy.PreventDowngradingVersions == null)
{
throw new UpgradeStrategyValidationException("Project MajorUpgradeStrategy.UpgradeVersions and PreventDowngradingVersions are not defined.");
}
if (project.MajorUpgradeStrategy.UpgradeVersions != null)
{
if (!IsValidVersion(project.MajorUpgradeStrategy.UpgradeVersions.Minimum))
throw new UpgradeStrategyValidationException("Project MajorUpgradeStrategy.UpgradeVersions.Minimum value is invalid.");
if (!IsValidVersion(project.MajorUpgradeStrategy.UpgradeVersions.Maximum))
throw new UpgradeStrategyValidationException("Project MajorUpgradeStrategy.UpgradeVersions.Maximum value is invalid.");
}
if (project.MajorUpgradeStrategy.PreventDowngradingVersions != null)
{
if (!IsValidVersion(project.MajorUpgradeStrategy.PreventDowngradingVersions.Minimum))
throw new UpgradeStrategyValidationException("Project MajorUpgradeStrategy.PreventDowngradingVersions.Minimum value is invalid.");
if (!IsValidVersion(project.MajorUpgradeStrategy.PreventDowngradingVersions.Maximum))
throw new UpgradeStrategyValidationException("Project MajorUpgradeStrategy.PreventDowngradingVersions.Maximum value is invalid.");
}
}
if (project is ManagedProject && !project.BackgroundImage.IsEmpty() && project.ValidateBackgroundImage)
{
bool invalidAspectRatio = false;
try
{
string imageFile = Utils.PathCombine(project.SourceBaseDir, project.BackgroundImage);
if (IO.File.Exists(imageFile))
{
var img = Bitmap.FromFile(imageFile);
if (img.Width > img.Height)
invalidAspectRatio = true;
}
}
catch { }
if (invalidAspectRatio)
throw new ValidationException(
"Project.BackgroundImage has incompatible (with ManagedUI default dialogs) aspect ratio. The expected ratio is close to W(156)/H(312). " +
"The background image (left side banner) in ManagedUI dialogs is left-docked at runtime and if it's too wide it can push away (to right) " +
"the all other UI elements. " +
"You can suppress image validation by setting Project.ValidateBackgroundImage to 'false'.");
}
//important to use RawId to avoid triggering Id premature auto-generation
if (project.AllDirs.Count(x => (x.RawId == Compiler.AutoGeneration.InstallDirDefaultId && Compiler.AutoGeneration.InstallDirDefaultId != null) || x.IsInstallDir) > 1)
throw new ValidationException("More than a single Dir marked as InstallDir. Ensure that only a single directory marked as InstallDir with Dir.IsInstallDir property or with the id 'INSTALLDIR' value");
foreach (Dir dir in project.AllDirs)
if (dir.Name.StartsWith("%") || dir.Name.EndsWith("%"))
if (!Compiler.EnvironmentConstantsMapping.ContainsKey(dir.Name))
throw new ValidationException("WixSharp.Dir.Name is set to unknown environment constant '" + dir.Name + "'.\n" +
"For the list of supported constants analyze WixSharp.Compiler.EnvironmentConstantsMapping.Keys.");
var incosnistentRefAsmActions =
project.Actions.OfType<ManagedAction>()
.GroupBy(a => a.ActionAssembly)
.Where(g => g.Count() > 1)
.Select(g => new
{
Assembly = g.Key,
Info = g.Select(a => new { Name = a.MethodName, RefAsms = a.RefAssemblies.Select(r => Path.GetFileName(r)).ToArray() }).ToArray(),
IsInconsistent = g.Select(action => action.GetRefAssembliesHashCode(project.DefaultRefAssemblies)).Distinct().Count() > 1,
})
.Where(x => x.IsInconsistent)
.FirstOrDefault();
if (incosnistentRefAsmActions != null)
{
var errorInfo = new StringBuilder();
errorInfo.Append(">>>>>>>>>>>>\n");
errorInfo.Append("Asm: " + incosnistentRefAsmActions.Assembly + "\n");
foreach (var item in incosnistentRefAsmActions.Info)
{
errorInfo.Append(" ----------\n");
errorInfo.Append(" Action: " + item.Name + "\n");
errorInfo.AppendFormat(" RefAsms: {0} items\n", item.RefAsms.Length);
foreach (var name in item.RefAsms)
errorInfo.Append(" - " + name + "\n");
}
errorInfo.Append(">>>>>>>>>>>>\n");
throw new ApplicationException(string.Format("Assembly '{0}' is used by multiple ManagedActions but with the inconsistent set of referenced assemblies. " +
"Ensure that all declarations have the same referenced assemblies by either using identical declarations or by using " +
"Project.DefaultRefAssemblies.\n{1}", incosnistentRefAsmActions.Assembly, errorInfo));
}
// https://wixsharp.codeplex.com/discussions/646085
// Have to disable validation as it only considers 'install' but not 'uninstall'.
// Possible solution is to analyse the action.condition and determine if it is
// install /uninstall but it is impossible to do. At least with the adequate accuracy.
// var incosnistentInstalledFileActions = project.Actions
// .OfType<InstalledFileAction>()
// .Where(x => x.When != When.After || x.Step != Step.InstallExecute)
// .Any();
// if (incosnistentInstalledFileActions)
// try
// {
// var msg = "Warning: InstalledFileAction should be scheduled for after InstallExecute. Otherwise it may produce undesired side effects.";
// Debug.WriteLine(msg);
// Console.WriteLine(msg);
// }
// catch { }
}
public static void ValidateCAAssembly(string caAssembly)
{
string dtfAssembly = typeof(CustomActionAttribute).Assembly.Location;
// need to do it in a separate domain as we do not want to lock the assembly and
// `ReflectionOnlyLoadFrom` is incompatible with the task
if (Compiler.AutoGeneration.ValidateCAAssemblies == CAValidation.InRemoteAppDomain)
{
// will not lock the file and will unload the assembly
Utils.ExecuteInTempDomain<AsmReflector>(asmReflector =>
{
asmReflector.ValidateCAAssembly(caAssembly, dtfAssembly);
});
}
else if (Compiler.AutoGeneration.ValidateCAAssemblies == CAValidation.InCurrentAppDomain)
{
// will not lock the file but will not unload the assembly
new AsmReflector().ValidateCAAssemblyLocally(caAssembly, dtfAssembly);
}
else
{
// disabled
}
}
}
class AsmReflector : MarshalByRefObject
{
public string OriginalAssemblyFile(string file)
{
return Utils.OriginalAssemblyFile(file);
}
public string AssemblyScopeName(string file)
{
return Reflection.Assembly.ReflectionOnlyLoad(System.IO.File.ReadAllBytes(file)).ManifestModule.ScopeName;
}
public void ValidateCAAssembly(string file, string dtfAsm)
{
// `ValidateCAAssemblyImpl` will load assembly from `file` for validation. Though for this to happen
// the AppDomain will need to be able resolve the only dependence assembly `file` has - dtfAsm.
// Thus always resolve it to dtfAsm (regardless of `args.Name` value) when AssemblyResolve is fired.
Reflection.Assembly resolver(object sender, ResolveEventArgs args)
{
return System.Reflection.Assembly.LoadFrom(dtfAsm);
}
AppDomain.CurrentDomain.AssemblyResolve += resolver;
ValidateCAAssemblyImpl(file, dtfAsm, loadFromMemory: false);
AppDomain.CurrentDomain.AssemblyResolve -= resolver;
}
internal void ValidateCAAssemblyLocally(string file, string dtfAsm)
{
ValidateCAAssemblyImpl(file, dtfAsm, loadFromMemory: true);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "<Pending>")]
internal void ValidateCAAssemblyImpl(string file, string dtfAsm, bool loadFromMemory)
{
//Debug.Assert(false);
try
{
var bf = BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.InvokeMethod | BindingFlags.Static;
// `ReflectionOnlyLoadFrom` cannot preload all required assemblies and triggers
// "System.InvalidOperationException: 'It is illegal to reflect on the custom attributes
// of a Type loaded via ReflectionOnlyGetType (see Assembly.ReflectionOnly) -- use CustomAttributeData
// instead.'" exception. Thus need to use `LoadFrom`, which locks the assembly unless the operation is
// performed in the temp AppDomain, which is unloaded after at the end.
// Unfortunately `AppDomain.CurrentDomain.ReflectionOnlyAssemblyResolve` does not help (does not get fired).
// var assembly = System.Reflection.Assembly.ReflectionOnlyLoadFrom(file);
var assembly = loadFromMemory ?
Reflection.Assembly.Load(System.IO.File.ReadAllBytes(file)) :
Reflection.Assembly.LoadFrom(file);
var caMembers = assembly.GetTypes()
.SelectMany(t => t.GetMembers(bf)
.Where(mem => mem.GetCustomAttributes(false)
.Where(x => x.ToString() == "Microsoft.Deployment.WindowsInstaller.CustomActionAttribute")
.Any()))
.ToArray();
var invalidMembers = new List<string>();
foreach (MemberInfo mi in caMembers)
{
string fullName = mi.DeclaringType.FullName + "." + mi.Name;
if (!mi.DeclaringType.IsPublic)
if (!invalidMembers.Contains(fullName))
invalidMembers.Add(fullName);
if (mi.MemberType != MemberTypes.Method)
{
if (!invalidMembers.Contains(fullName))
invalidMembers.Add(fullName);
}
else
{
var method = (mi as MethodInfo);
if (!method.IsPublic || !method.IsStatic)
if (!invalidMembers.Contains(fullName))
invalidMembers.Add(fullName);
}
}
if (invalidMembers.Any())
{
Compiler.OutputWriteLine("Warning: some of the type members are marked with [CustomAction] attribute but they don't meet the MakeSfxCA criteria of being public static method of a public type:\n");
foreach (var member in invalidMembers)
Compiler.OutputWriteLine(" " + member);
Compiler.OutputWriteLine("");
}
}
catch { }
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.IO;
using System.Diagnostics;
using Microsoft.Research.DataStructures;
using System.Text;
using System.Diagnostics.Contracts;
using System.Linq;
using System.Reflection;
namespace Microsoft.Research.CodeAnalysis
{
#region Enumerations
public enum WarningLevelOptions { low, mediumlow, medium, full }
public enum InferenceMode { Normal, Aggressive}
public enum InferOptions { arrayrequires, arraypurity, methodensures, nonnullreturn, symbolicreturn, propertyensures, requires, objectinvariants, objectinvariantsforward, assumefalse, autopropertiesensures }
public enum SuggestOptions { requires, propertyensures, methodensures, nonnullreturn, necessaryensures, arrayrequires, arraypurity, objectinvariants, objectinvariantsforward, assumes, codefixes, codefixesshort, readonlyfields, requiresbase, callinvariants, calleeassumes, asserttocontracts }
public enum SuggestionsAsWarnings { requires, propertyensures, methodensures, nonnullreturn, necessaryensures, arrayrequires, arraypurity, objectinvariants, objectinvariantsforward, assumes, codefixes, codefixesshort, readonlyfields, requiresbase, callinvariants, calleeassumes, redundantassume, unusedsuppress, asserttocontracts }
public enum StatOptions { valid, time, mem, perMethod, arithmetic, asserts, methodasserts, slowmethods, abstractdomains, program, egraph, phases, inference, timeperMethod }
public enum CheckOptions { assertions, exists, assumptions, falseassumptions, inferredrequires, conditionsvalidity, falsepostconditions, entrycontradictions }
public enum AnalyzeOptions { closures, movenext, compilergenerated }
public enum TraceOptions { dfa, heap, expressions, egraph, assumptions, partitions, wp, arrays, numerical, timings, memory, cache, checks, inference, loading, cachehashing, warningcontexts, movenext }
public enum ShowOptions { progress, il, errors, validations, unreached, progressnum, progressbar, obligations, paths, invariants, warnranks, analysisphases, scores, inferencetrace, externallyvisiblemembersonly, cachemisses }
public enum AssemblyMode { standard, legacy }
public enum PreconditionInferenceMode { aggressive, allPaths, backwards, combined}
public enum BaseLiningOptions { mixed, ilBased, typeBased }
// Experiment with different implementations for the sparsearrays
public enum SparseArrayOptions { time, mem, exp }
#endregion
/// <summary>
/// Put any option as a public field. The name is what gets parsed on the command line.
/// It can be bool, int, string, or List<string>, or List<int> or enum type
///
/// Derived options are public const strings
/// </summary>
public class GeneralOptions : OptionParsing, ILogOptions, Caching.IClousotCacheOptions {
#region Parsing
public static GeneralOptions ParseCommandLineArguments(string[] args, Dictionary<string, IMethodAnalysis> analyzers, Dictionary<string, IClassAnalysis> classanalyzers)
{
Contract.Requires(args != null);
Contract.Requires(analyzers != null);
Contract.Requires(classanalyzers != null);
Contract.Ensures(Contract.Result<GeneralOptions>() != null);
var options = new GeneralOptions(analyzers, classanalyzers);
options.Parse(args);
#if DEBUG && false
Console.WriteLine("[Slicer @ {0}] Slice TimeStamp: {1}", DateTime.Now, DateTime.FromBinary(options.sliceTime));
#endif
return options;
}
private readonly Dictionary<string, IMethodAnalysis> analyzers;
private readonly Dictionary<string, IClassAnalysis> classanalyzers;
protected GeneralOptions(Dictionary<string, IMethodAnalysis> analyzers, Dictionary<string, IClassAnalysis> classanalyzers)
{
this.analyzers = analyzers;
this.classanalyzers = classanalyzers;
this.sliceTime = DateTime.Now.ToBinary(); // F: setting it here so that we can debug
}
protected override bool ParseUnknown(string arg, string[] args, ref int index, string equalArgument)
{
// don't care about case
arg = arg.ToLower();
// see if it is a break
if (arg == "break")
{
System.Diagnostics.Debugger.Launch();
return true;
}
if (arg == "echo")
{
int j = 0;
foreach (var s in args)
{
Console.WriteLine("arg{0} = '{1}'", j, s);
j++;
}
Console.WriteLine("current dir = '{0}'", Environment.CurrentDirectory);
return true;
}
if (arg == "echocmd")
{
Console.WriteLine(Environment.CommandLine);
return true;
}
// lookup in analyzers
// find arguments
string options = equalArgument ?? "";
IMethodAnalysis candidate;
if (analyzers.TryGetValue(arg, out candidate)) {
// found analyzer
bool success = candidate.Initialize(this, options.Split(','));
if (!success)
{
this.AddError("Analysis '{0}' failed to initialize", candidate.Name);
}
else
{
if (!this.Analyses.Contains(candidate))
{
this.Analyses.Add(candidate);
}
}
return true;
}
// lookup for class analyzer
IClassAnalysis clcandidate;
if (classanalyzers.TryGetValue(arg, out clcandidate))
{
// found analyzer
bool success = clcandidate.Initialize(this, options.Split(','));
if (!success)
{
this.errors++;
}
else
{
if (!this.ClassAnalyses.Contains(clcandidate))
{
this.ClassAnalyses.Add(clcandidate);
}
}
return true;
}
return false;
}
public void PrintUsage(TextWriter output)
{
output.WriteLine("usage: <general-option>* [<analysis> <analysis-options>]+ <assembly>+");
output.WriteLine(Environment.NewLine + "where <general-option> is one of");
this.PrintOptions("", output);
output.WriteLine(Environment.NewLine + "where derived options are of");
this.PrintDerivedOptions("", output);
output.WriteLine(Environment.NewLine + "where <analysis> is one of");
foreach (string key in analyzers.Keys)
{
output.WriteLine(" -{0}[:<comma-separated-options>]", key);
IMethodAnalysis analysis = analyzers[key];
analysis.PrintOptions(" ", output);
}
foreach (string key in classanalyzers.Keys)
{
output.WriteLine(" -{0}[:<comma-separated-options>]", key);
IClassAnalysis analysis = classanalyzers[key];
analysis.PrintOptions(" ", output);
}
}
#endregion
#region Atomic Options, i.e. actual state of options
[OptionDescription("Filters the warnings according to their score")]
[DoNotHashInCache]
public WarningLevelOptions warninglevel = WarningLevelOptions.full;
[OptionDescription("Optional Checks")]
public List<CheckOptions> check = new List<CheckOptions>() { CheckOptions.assertions, CheckOptions.exists, CheckOptions.entrycontradictions };
[OptionDescription("Special methods")]
[DoNotHashInCache]
public List<AnalyzeOptions> analyze = new List<AnalyzeOptions>() { };
[DoNotHashInCache]
public List<TraceOptions> trace = new List<TraceOptions>();
[DoNotHashInCache]
public List<ShowOptions> show = new List<ShowOptions>(new ShowOptions[] { ShowOptions.errors });
[DoNotHashInCache]
public List<StatOptions> stats = new List<StatOptions>(new StatOptions[]{StatOptions.valid, StatOptions.time, StatOptions.inference});
#region Inference options
[OptionDescription("Infer preconditions from exit states")]
public bool prefrompost = false;
public InferenceMode inferencemode = InferenceMode.Normal;
public List<InferOptions> infer = new List<InferOptions>() { InferOptions.propertyensures, InferOptions.nonnullreturn, InferOptions.symbolicreturn, InferOptions.arraypurity };
public List<SuggestOptions> suggest = new List<SuggestOptions>() {};
[OptionDescription("Disable the inference of object invariants from constructors. Only useful to recover from bugs in the analyzer")]
public bool disableForwardObjectInvariantInference = false;
[OptionDescription("Emit a warning instead of a suggestion")]
[DoNotHashInCache]
public List<SuggestionsAsWarnings> warnIfSuggest = new List<SuggestionsAsWarnings>();
[OptionDescription("Allow inference of disjunctive preconditions")]
public bool infdisjunctions = true;
[OptionDescription("When -suggest callee assumes, show also disjunctions")]
public bool suggestcalleeassumeswithdisjunctions = false;
[OptionDescription("Generate object invariants only for readonly fields")]
public bool infreadonlyonly = true;
[OptionDescription("Allow inference of requires from throws of ArgumentException")]
public bool throwArgExceptionAsAssert = false;
[OptionDescription("Missing Requires for public surface methods generate warnings")]
public bool missingPublicRequiresAreErrors = false;
[OptionDescription("Suggest ensures for externally visible members only")]
public bool suggestionsForExternalVisibleOnly = false;
#endregion
#region Abstract domains options
[OptionDescription("Select the precondition inference algorithm")]
public PreconditionInferenceMode premode = PreconditionInferenceMode.allPaths;
[OptionDescription("Hints to infer bounds for the analysis")]
public List<Int32> thresholds = new List<int>() { -1, 1 };
[OptionDescription("Optimized representation")]
public SparseArrayOptions rep = SparseArrayOptions.time;
[OptionDescription("Caching of expressions during fixpoint computation")]
public ExpressionCacheMode expcache = ExpressionCacheMode.Time;
[OptionDescription("Enable decompilation of disjunctions")]
public bool refinedisjunctions = true;
[OptionDescription("Run in the extract method refactoring mode to discover (Ps, Qs)")]
public bool extractmethodmode = false;
[OptionDescription("Run in the extract method mode to refine (Pm, Qm)")]
public string extractmethodmodeRefine = null;
[OptionDescription("Run in the suggest invariant at mode")]
public bool invariantsuggestmode = false;
[OptionDescription("Run some abstract domains algorithms in parallel")]
public bool adpar = false;
[OptionDescription("Internal cache size for fact queries")]
public int cachesize = 10000;
[OptionDescription("Number of joins before applying the widening")]
public int joinsBeforeWiden = 1;
[OptionDescription("Enforce the at lease one join for each loop path")]
public bool enforceFairJoin = false;
[OptionDescription("Threshold to for Octagonal constraints")]
public int maxVarsForOctagon = 8;
[OptionDescription("Threshold for the renamings")]
public int maxVarsInRenaming = 20;
[OptionWitness]
[OptionDescription("Number of closure steps while checking assertions")]
public int steps=0;
#endregion
#region WPs
[OptionDescription("Use weakest preconditions")]
public bool wp = true;
[OptionDescription("Limit backward WP computation length")]
public int maxPathSize = 50;
[OptionDescription("Emit the path condition we cannot prove in the SMT-LIB format")]
public bool emitSMT2Formula = false;
#endregion
#region Egraph
[OptionDescription("Use incremental joins in egraph computation (internal)")]
public bool incrementalEgraphJoin = false;
#endregion
#region Paths and output files
[DoNotHashInCache]
[OptionDescription("Set .NET core library")]
public string platform;
[DoNotHashInCache]
[OptionDescription(".NET framework used")]
public string framework = "v4.0";
[DoNotHashInCache]
public List<string> define = new List<string>();
[OptionDescription("Search paths for reference assemblies")]
[DoNotHashInCache]
[OptionWithPaths]
public List<string> libPaths = new List<string>(new string[] { "." });
[OptionDescription("Assemblies needed to compile the input if it is a source file", ShortForm="r")]
[DoNotHashInCache]
[OptionWithPaths]
public List<string> reference = new List<string>();
[OptionDescription("Candidate paths to dlls/exes for resolving references")]
[DoNotHashInCache]
[OptionWithPaths]
public List<string> resolvedPaths = new List<string>();
[OptionDescription("Shared contract class library")]
[DoNotHashInCache]
public string cclib = "Microsoft.Contracts";
[OptionDescription("Extract the source text for the contracts")]
public bool extractSourceText = true;
[OptionDescription("Redirect the output to this output file")]
[DoNotHashInCache]
public string outFile;
[OptionDescription("Send the output also to Console.Out -- to be used with the outfile option")]
[DoNotHashInCache]
public bool WriteAlsoOnOutput = true;
[OptionDescription("use baseline file, or create if absent")]
[DoNotHashInCache]
public string baseLine;
[OptionDescription("clear exisiting baseline file before starting (default: false)")]
[DoNotHashInCache]
public bool clearBaseLine = false;
[OptionDescription("set this analysis as the baseline in the cache")]
public bool setCacheBaseLine;
[OptionDescription("Strategy for suppressing warnings")]
[DoNotHashInCache]
public BaseLiningOptions baseLineStrategy = BaseLiningOptions.mixed;
[OptionDescription("If a method is identical to baseline, suppress all its warnings")]
[DoNotHashInCache]
public bool skipIdenticalMethods = true;
[OptionDescription("Skip methods without a baseline")]
[DoNotHashInCache]
public bool skipNewMethods = false;
[OptionDescription("Use semantic baseline from cache")]
[DoNotHashInCache]
public string useSemanticBaseline = null;
[OptionDescription("Use semantic baseline for method classification but don't apply the baseline")]
[DoNotHashInCache]
public bool ignoreBaselineAssumptions;
[OptionDescription("Save semantic baseline to cache")]
[DoNotHashInCache]
public string saveSemanticBaseline = null;
[OptionDescription("For testing automatic suppression inference")]
public bool ignoreExplicitAssumptions = false;
[OptionDescription("Write xml output")]
[DoNotHashInCache]
public bool xml = false;
[OptionDescription("Use contract reference assembly")]
[DoNotHashInCache]
public List<string> contract = new List<string>();
[OptionDescription("The filename of the custom basic scores for warnings")]
[DoNotHashInCache]
public string customScores = null;
[OptionDescription("Be optimistic on external API? We will assign proof obligations depending on that a low score")]
[DoNotHashInCache]
public bool lowScoreForExternal = true;
#endregion
#region Method selection
[OptionDescription("Build the call graph, and use it to determine analysis order")]
[DoNotHashInCache] // We do not hash it, as we want to reuse results already in the cache
public bool usecallgraph = true;
[OptionDescription("Analyze only selected methods (adds dependencies).")]
[DoNotHashInCache]
public List<int> select;
[DoNotHashInCache]
public int analyzeFrom = 0;
[DoNotHashInCache]
public int analyzeTo = Int32.MaxValue;
[DoNotHashInCache]
[OptionDescription("Split the analysis in several processes")]
public bool splitanalysis = false;
[DoNotHashInCache]
[OptionDescription("Bucket size for the parallel analysis (negative ==> let the analyzer pick)")]
public int bucketSize = -1;
[OptionDescription("Analyse only the members with this full name (adds dependencies).")]
[DoNotHashInCache]
public string memberNameSelect;
[OptionDescription("Analyse only the methods in this type, given its full name (adds dependencies).")]
[DoNotHashInCache]
public string typeNameSelect;
[OptionDescription("Analyse only the methods in this namespace (adds dependencies).")]
[DoNotHashInCache]
public string namespaceSelect;
[OptionDescription("Break at selected methods")]
[DoNotHashInCache]
public List<int> breakAt;
[OptionDescription("Include (transitively) the callees of the selected methods")]
[DoNotHashInCache]
public bool includeCalleesTransitively = true;
[OptionDescription("Show il for focused methods")]
[DoNotHashInCache]
public List<int> focus;
[OptionDescription("Show the hash only for the focused methods")]
[DoNotHashInCache]
public int focusHash = -1;
#endregion
#region Analyses selection
[DoNotHashInCache]
List<IMethodAnalysis> analyses = new List<IMethodAnalysis>(); // Analyses to run
[DoNotHashInCache]
List<IClassAnalysis> classanalyses = new List<IClassAnalysis>(); // Class analyses to run
#endregion
#region Trade offs
[OptionDescription("Analysis timeout per method (in seconds)")]
public int timeout = 180;
[OptionDescription("Analysis timeout per method (in symbolic ticks)")]
public int symbolicTimeout = -1;
[OptionDescription("Adaptive analyses (Use weaker domains for huge methods)")]
public bool adaptive = false;
[OptionDescription("Remove a method from the internal method cache when all the method callers have been analyzed")]
public bool gcMethodCache = false;
#endregion
#region Output CSharp
[OptionDescription("Output inferred contracts as C# code")]
public bool outputPrettycs;
[OptionDescription("Output folder for inferred contracts as C# code")]
[DoNotHashInCache]
public string outputPrettycsFolder = ".";
[OptionDescription("Output contracts as C# code, one file per class (default)")]
[DoNotHashInCache]
public bool outputPrettyFileClass;
[OptionDescription("Output contracts as C# code, one file per namespace")]
[DoNotHashInCache]
public bool outputPrettyFileNamespace;
[OptionDescription("Output contracts as C# code, one file per toplevel classes (other classes nested)")]
[DoNotHashInCache]
public bool outputPrettyFileToplevelClass;
[OptionDescription("Output all members as C# code, not just members visible outside assembly")]
public bool outputPrettyFull;
#endregion
#region Caching
[OptionDescription("Clear the warnings cache")]
[DoNotHashInCache]
public bool clearCache = false;
[OptionDescription("Use the cache to avoid analysis when possible.")]
[DoNotHashInCache]
public bool useCache = false;
[OptionDescription("Write the outcome of the analysis to the cache, so it can be used in a future analysis.")]
[DoNotHashInCache]
public bool saveToCache = false;
[OptionDescription("The name for the cache database (defaults to assembly name)")]
[DoNotHashInCache]
public string cacheName = null;
[OptionDescription("The directory in which the cache database will be written (unless -cacheserver is used)")]
[DoNotHashInCache]
public string cacheDirectory = null;
[OptionDescription("The SQL Server to use for the cache (SQL Server Compact Edition is used locally otherwise, unless forceCacheServer=true)")]
[DoNotHashInCache]
public string cacheServer = Environment.GetEnvironmentVariable("CODECONTRACTS_CACHESERVER");
[OptionDescription("The connection timeout for cache servers")]
[DoNotHashInCache]
public int cacheServerTimeout = 5;
[OptionDescription("Abort the analysis if cannot connect to the cacheserver")]
[DoNotHashInCache]
public bool forceCacheServer = false;
[OptionDescription("Emit an error when we read the cache (for regressions)")]
[DoNotHashInCache]
public bool emitErrorOnCacheLookup = false;
[OptionDescription("The maximum number of methods for which warnings are cached")]
[DoNotHashInCache]
public int cacheMaxSize = Int32.MaxValue;
[OptionDescription("Version identifier for assembly information in database")]
[DoNotHashInCache]
public string sourceControlInfo = null;
[OptionDescription("Name the cache database using a version prefix to guard against version mismatches")]
[DoNotHashInCache]
public bool cacheDBVersionPrefix = true;
[OptionDescription("DateTime.ToBinary() of the slice being analyzed")]
[DoNotHashInCache]
public long sliceTime /*= DateTime.Now.ToBinary()*/;
#endregion
#region Basic switches
[DoNotHashInCache]
public bool nologo;
[OptionDescription("Don't pop-up IDE boxes")]
[DoNotHashInCache]
public bool nobox;
[OptionDescription("Run regression test on input assemblies")]
[DoNotHashInCache]
public bool regression;
[OptionDescription("Compute scores for warnings")]
[DoNotHashInCache]
public bool warnscores = true;
[OptionDescription("Include suggestions in regression")]
[DoNotHashInCache]
public bool includesuggestionsinregression = false;
[OptionDescription("Prioritize the warnings")]
[DoNotHashInCache]
public bool sortwarns = true;
[OptionDescription("Enable suppression of warnings")]
[DoNotHashInCache]
public bool maskwarns = true;
[OptionDescription("Mask the suggestions from the verified repairs")]
[DoNotHashInCache]
public bool maskverifiedrepairs = true;
[OptionDescription("Outputs the masks to suppress warnings")]
[DoNotHashInCache]
public bool outputwarnmasks = false;
[OptionDescription("Outputs the warnings with the related fixes")]
[DoNotHashInCache]
public bool groupactions = false;
[OptionDescription("Don't try to talk to VS Pex")]
[DoNotHashInCache]
public bool nopex;
[OptionDescription("Limit number of issued warnings overall")]
[DoNotHashInCache]
public int maxWarnings = Int32.MaxValue;
[OptionDescription("Write output formatted for remoting")]
[DoNotHashInCache]
public bool remote;
[OptionDescription("Select whether legacy if-then-throw or Requires<E> are supported")]
public AssemblyMode assemblyMode = AssemblyMode.legacy;
[OptionDescription("Write repro.bat for debugging")]
[DoNotHashInCache]
public bool repro = false;
[OptionDescription("produce non-zero return code when warnings are found")]
[DoNotHashInCache]
public bool failOnWarnings = false;
#endregion
#endregion
#region Derived options, define as const strings
public const string statsOnly = "-show=!! -suggest=!!";
public const string ide = "-stats=!! -trace=!!";
public const string silent = "-show=!! -stats=!! -trace=!! -nologo";
public const string cache = "-useCache -saveToCache";
public const string repairs = "-suggest codefixes -maskverifiedrepairs=false";
public const string missingPublicEnsuresAreErrors = "-suggestionsForExternalVisibleOnly=true -suggest nonnullreturn -warnifSuggest nonnullreturn";
public const string scores = "-show warnranks -trace warningcontexts";
#endregion
#region Public accessors
public WarningLevelOptions WarningLevel { get { return this.warninglevel; } }
#region Tracing
public bool TraceChecks { get { return this.trace.Contains(TraceOptions.checks); } }
public bool TraceDFA { get { return this.trace.Contains(TraceOptions.dfa); } }
public bool TraceHeapAnalysis { get { return this.trace.Contains(TraceOptions.heap); } }
public bool TraceExpressionAnalysis { get { return this.trace.Contains(TraceOptions.expressions); } }
public bool TraceEGraph { get { return this.trace.Contains(TraceOptions.egraph); } }
public bool TraceAssumptions { get { return this.trace.Contains(TraceOptions.assumptions); } }
public bool TraceWP { get { return this.trace.Contains(TraceOptions.wp); } }
public bool TracePartitionAnalysis { get { return this.trace.Contains(TraceOptions.partitions); } }
public bool TraceTimings { get { return this.trace.Contains(TraceOptions.timings); } }
public bool TraceMemoryConsumption { get { return this.trace.Contains(TraceOptions.memory); } }
public bool TraceMoveNext { get { return this.trace.Contains(TraceOptions.movenext); } }
public bool TraceNumericalAnalysis { get { return this.trace.Contains(TraceOptions.numerical); } }
public bool TraceArrayAnalysis { get { return this.trace.Contains(TraceOptions.arrays); } }
public bool TraceCache { get { return this.trace.Contains(TraceOptions.cache); } }
public bool TraceCacheHashing { get { return this.trace.Contains(TraceOptions.cachehashing); } }
//public bool TraceCacheHashing(int methodNumber) { return this.trace.Contains(TraceOptions.cachehashing) || methodNumber == this.focusHash; }
public bool TraceInference { get { return this.trace.Contains(TraceOptions.inference); } }
public bool TraceLoading { get { return this.trace.Contains(TraceOptions.loading); } }
#endregion
public bool EmitSMT2Formula { get { return this.emitSMT2Formula;} }
public bool EmitErrorOnCacheLookup { get { return this.emitErrorOnCacheLookup; } }
public bool PrintIL { get { return this.show.Contains(ShowOptions.il); } }
public int Timeout { get { return this.timeout; } }
// TODO(wuestholz): Propagate this value just like 'Timeout'.
public int SymbolicTimeout { get { return this.symbolicTimeout; } }
public int AnalyzeTo { get { return this.analyzeTo; } }
public int AnalyzeFrom { get { return this.analyzeFrom; } }
public int IterationsBeforeWidening { get { return this.joinsBeforeWiden; } }
public bool EnforceFairJoin { get { return this.enforceFairJoin; } }
public int MaxVarsForOctagonInference { get { return this.maxVarsForOctagon; } }
public int MaxVarsInSingleRenaming { get { return this.maxVarsInRenaming; } }
public bool IsAdaptiveAnalysis { get { return this.adaptive; } }
public int Steps { get { return this.steps; } }
public List<IMethodAnalysis> Analyses { get { return this.analyses; } }
public List<IClassAnalysis> ClassAnalyses { get { return this.classanalyses; } }
public List<string> Assemblies { get { return this.GeneralArguments; } }
public List<string> ContractAssemblies { get { return this.contract; } }
public bool TurnArgumentExceptionThrowsIntoAssertFalse { get { return this.throwArgExceptionAsAssert; } }
public bool IgnoreExplicitAssumptions { get { return this.ignoreExplicitAssumptions; } }
public bool WantToBreak(int methodNumber)
{
if (this.breakAt == null)
return false;
return this.breakAt.Contains(methodNumber);
}
#region Show*
public bool ShowProgress
{
get { return this.show.Contains(ShowOptions.progress); }
}
public bool ShowProgressBar
{
get { return this.show.Contains(ShowOptions.progressbar); }
}
public bool ShowProgressNum
{
get { return this.show.Contains(ShowOptions.progressnum); }
}
public bool ShowInvariants
{
get { return this.show.Contains(ShowOptions.invariants); }
}
public bool ShowInferenceTrace
{
get
{
return this.show.Contains(ShowOptions.inferencetrace);
}
}
#endregion
#region Output
public bool IsXMLOutput
{
get { return this.xml; }
}
public bool IsRemoteOutput
{
get { return remote; }
}
TextWriter outWriter = null;
public TextWriter OutFile
{
get
{
if (outWriter == null)
{
if (outFile != null)
{
outWriter = new StreamWriter(outFile);
if (this.WriteAlsoOnOutput)
{
outWriter = new TextWriterWithDoubleWrite<TextWriter, TextWriter>(Console.Out, outWriter);
}
}
else
{
outWriter = Console.Out;
}
}
return outWriter;
}
}
public const string DefaultOutFileName = "Console.Out";
public string OutFileName
{
get
{
if (outFile != null)
{
return outFile;
}
else
{
return DefaultOutFileName;
}
}
}
#endregion
#region Regression
public bool IsRegression { get { return this.regression; } }
public bool IncludeSuggestionMessagesInRegression { get { return this.IsRegression && this.includesuggestionsinregression; } }
#endregion
#region Warnings
public bool PrioritizeWarnings { get { return this.sortwarns; } }
public bool MaskedWarnings { get { return this.maskwarns; } }
public bool MaskedVerifiedRepairs { get { return this.maskverifiedrepairs; } }
public bool OutputWarningMasks { get { return this.outputwarnmasks; } }
public bool WarningsWithSuggestions { get { return this.groupactions; } }
#endregion
public bool PrintOutcome(ProofOutcome outcome)
{
switch (outcome) {
case ProofOutcome.True:
return this.show.Contains(ShowOptions.validations);
case ProofOutcome.Bottom:
return
this.show.Contains(ShowOptions.validations) ||
this.show.Contains(ShowOptions.unreached);
case ProofOutcome.False:
case ProofOutcome.Top:
return this.show.Contains(ShowOptions.errors);
default:
return true;
}
}
#region Inference and suggestions
public bool AllowInferenceOfDisjunctions
{
get
{
return this.infdisjunctions;
}
}
public bool InferObjectInvariantsOnlyForReadonlyFields
{
get
{
return this.infreadonlyonly;
}
}
public bool InferObjectInvariantsForward
{
get
{
return this.infer.Contains(InferOptions.objectinvariantsforward);
}
}
public bool SuggestAssumes
{
get
{
return this.suggest.Contains(SuggestOptions.assumes);
}
}
public bool SuggestAssumesForCallees
{
get
{
return this.suggest.Contains(SuggestOptions.calleeassumes);
}
}
public bool SuggestNecessaryPostconditions
{
get
{
return this.suggest.Contains(SuggestOptions.necessaryensures);
}
}
public bool SuggestCodeFixes
{
get
{
return this.suggest.Contains(SuggestOptions.codefixes) || this.suggest.Contains(SuggestOptions.codefixesshort);
}
}
public bool SuggestRequires
{
get
{
return this.suggest.Contains(SuggestOptions.requires);
}
}
public bool SuggestRequiresBase
{
get
{
return this.suggest.Contains(SuggestOptions.requiresbase);
}
}
public bool SuggestAssertToContracts
{
get
{
return this.suggest.Contains(SuggestOptions.asserttocontracts);
}
}
public bool SuggestRequiresForArrays
{
get
{
return this.suggest.Contains(SuggestOptions.arrayrequires);
}
}
public bool SuggestRequiresPurityForArrays
{
get
{
return this.suggest.Contains(SuggestOptions.arraypurity);
}
}
public bool SuggestObjectInvariants
{
get
{
return this.suggest.Contains(SuggestOptions.objectinvariants);
}
}
public bool SuggestObjectInvariantsForward
{
get
{
return this.suggest.Contains(SuggestOptions.objectinvariantsforward);
}
}
public bool SuggestCallInvariants
{
get
{
return this.suggest.Contains(SuggestOptions.callinvariants);
}
}
public bool SuggestEnsures(bool isProperty)
{
return this.suggest.Contains(SuggestOptions.methodensures) ||
(isProperty ? this.suggest.Contains(SuggestOptions.propertyensures) : false);
}
public bool SuggestNonNullReturn
{
get
{
return this.suggest.Contains(SuggestOptions.nonnullreturn);
}
}
public bool SuggestReadonlyFields
{
get
{
return this.suggest.Contains(SuggestOptions.readonlyfields);
}
}
public bool CheckInferredRequires
{
get
{
return this.check.Contains(CheckOptions.inferredrequires);
}
}
public bool TreatMissingPublicRequiresAsErrors
{
get
{
return this.missingPublicRequiresAreErrors;
}
}
public PreconditionInferenceMode PreconditionInferenceAlgorithm
{
get
{
return this.premode;
}
}
public bool MayPropagateInferredRequiresOrEnsures
{
get
{
return
this.infer.Contains(InferOptions.requires) ||
this.infer.Contains(InferOptions.propertyensures) ||
this.infer.Contains(InferOptions.methodensures) ||
this.infer.Contains(InferOptions.nonnullreturn) ||
this.infer.Contains(InferOptions.symbolicreturn) ||
this.infer.Contains(InferOptions.arraypurity) ||
this.infer.Contains(InferOptions.arrayrequires) ||
this.infer.Contains(InferOptions.objectinvariants);
}
}
public bool PropagateInferredRequires(bool isCurrentMethodAProperty)
{
return this.infer.Contains(InferOptions.requires);
}
public bool PropagateInferredEnsures(bool isCurrentMethodAProperty)
{
return this.infer.Contains(InferOptions.methodensures) ||
(isCurrentMethodAProperty ? this.infer.Contains(InferOptions.propertyensures) : false);
}
public bool PropagateInferredInvariants
{
get
{
return this.infer.Contains(InferOptions.objectinvariants);
}
}
public bool PropagateInferredNonNullReturn
{
get
{
return this.infer.Contains(InferOptions.nonnullreturn);
}
}
public bool PropagateInferredSymbolicReturn
{
get
{
return this.infer.Contains(InferOptions.symbolicreturn);
}
}
public bool PropagateInferredEnsuresForProperties
{
get
{
return this.infer.Contains(InferOptions.autopropertiesensures);
}
}
public bool PropagateInferredArrayRequires
{
get
{
return this.infer.Contains(InferOptions.arrayrequires);
}
}
public bool PropagateRequiresPurityForArrays
{
get
{
return this.infer.Contains(InferOptions.arraypurity);
}
}
public bool PropagateObjectInvariants
{
get { return this.infer.Contains(InferOptions.objectinvariants); }
}
public bool PropagatedRequiresAreSufficient
{
get
{
// The AllPaths is the only precondition inference analysis to be guaranteed to be sufficient
return this.premode == PreconditionInferenceMode.allPaths;
}
}
public bool InferPreconditionsFromPostconditions { get { return this.prefrompost; } }
#endregion
public bool NoLogo { get { return this.nologo; } }
public bool NoBox { get { return this.nobox || this.regression; } }
#region Checking
public bool CheckAssertions { get { return this.check.Contains(CheckOptions.assertions); } }
public bool CheckExistentials { get { return this.check.Contains(CheckOptions.exists); } }
public bool CheckAssumptions
{
get { return this.check.Contains(CheckOptions.assumptions) || this.check.Contains(CheckOptions.falseassumptions); }
}
public bool CheckConditions
{
get { return this.check.Contains(CheckOptions.conditionsvalidity); }
}
public bool CheckAssumptionsAndContradictions
{
get
{
return this.check.Contains(CheckOptions.falseassumptions);
}
}
public bool CheckFalsePostconditions
{
get
{
return this.check.Contains(CheckOptions.falsepostconditions);
}
}
public bool CheckEntryContradictions
{
get
{
return !this.UseSemanticBaseline && this.check.Contains(CheckOptions.entrycontradictions);
}
}
public bool InferAssumesForBaseLining
{
get
{
return this.UseSemanticBaseline || this.SaveSemanticBaseline;
}
}
#endregion
public ExpressionCacheMode ExpCaching
{
get
{
return this.expcache;
}
}
#region Print
public bool PrintPerMethodStatistics { get { return this.stats.Contains(StatOptions.perMethod); } }
public bool PrintValidationStats { get { return this.stats.Contains(StatOptions.valid); } }
public bool PrintTimeStats { get { return this.stats.Contains(StatOptions.time); } }
public bool PrintProgramStats { get { return this.stats.Contains(StatOptions.program); } }
public bool PrintSlowMethods { get { return this.stats.Contains(StatOptions.slowmethods); } }
public bool PrintPerMethodAnalysisTime { get { return this.stats.Contains(StatOptions.timeperMethod); } }
public bool PrintMemStats { get { return this.stats.Contains(StatOptions.mem); } }
public bool PrintArithmeticStats { get { return this.stats.Contains(StatOptions.arithmetic); } }
public bool PrintAssertStats { get { return this.stats.Contains(StatOptions.asserts); } }
public bool PrintMethodAssertStats { get { return this.stats.Contains(StatOptions.methodasserts); } }
public bool PrintAbstractDomainsStats { get { return this.stats.Contains(StatOptions.abstractdomains); } }
public bool PrintEGraphStats { get { return this.stats.Contains(StatOptions.egraph); } }
public bool PrintPhaseStats { get { return this.stats.Contains(StatOptions.phases); } }
public bool PrintInferenceStats { get { return this.stats.Contains(StatOptions.inference); } }
#endregion
public bool OutputOnlyExternallyVisibleMembers { get { return !this.outputPrettyFull; } }
public bool ShowOnlyExternallyVisibleMethods { get { return this.show.Contains(ShowOptions.externallyvisiblemembersonly); } }
public bool UseWeakestPreconditions { get { return this.wp; } }
public bool ShowUnprovenObligations { get { return this.show.Contains(ShowOptions.obligations); } }
public bool ShowPaths { get { return this.show.Contains(ShowOptions.paths); } }
public bool ShowPhases { get { return this.show.Contains(ShowOptions.analysisphases); } }
public bool ShowCacheMisses { get { return this.show.Contains(ShowOptions.cachemisses); } }
public int MaxPathSize { get { return this.maxPathSize; } }
public int MaxWarnings { get { return this.maxWarnings; } }
#region Cache
public bool NeedCache
{
get
{
return this.UseCache || this.SaveToCache || this.ClearCache || this.UseSemanticBaseline || this.SaveSemanticBaseline;
}
}
public bool ClearCache { get { return this.clearCache; } }
public bool UseCache { get { return this.useCache; } }
public bool SaveToCache { get { return this.saveToCache; } }
bool Caching.IClousotCacheOptions.SaveToCache { get { return this.SaveToCache || this.SaveSemanticBaseline; } }
public int CacheMaxSize { get { return this.cacheMaxSize; } }
public string SourceControlInfo { get { return this.sourceControlInfo; } }
public string CacheServer { get { return this.cacheServer; } }
public int CacheServerTimeout { get { return this.cacheServerTimeout; } }
public bool SetCacheBaseLine { get { return this.setCacheBaseLine; } }
public string TypeNameSelect { get { return this.typeNameSelect; } }
public string NamespaceSelect { get { return this.namespaceSelect; } }
public string MemberNameSelect { get { return this.memberNameSelect; } }
#endregion
public bool IsFocused(int methodNumber)
{
return (this.focus != null && this.focus.Contains(methodNumber));
}
private List<ShowOptions> savedShow;
private List<TraceOptions> savedTrace;
private Stack<WarningLevelOptions> savedWarningLevel;
public void Save()
{
savedShow = this.show;
savedTrace = this.trace;
this.show = new List<ShowOptions>(this.show);
this.trace = new List<TraceOptions>(this.trace);
}
public void Restore()
{
this.show = savedShow;
this.trace = savedTrace;
}
public void Add(ShowOptions option)
{
this.show.Add(option);
}
public void Add(TraceOptions option)
{
this.trace.Add(option);
}
public void Push(WarningLevelOptions level)
{
EnsureWarningLevel();
#if DEBUG
Console.WriteLine("Changing the warning level to {0}", level);
#endif
this.savedWarningLevel.Push(this.warninglevel);
this.warninglevel = level;
}
public void Pop()
{
Contract.Assume(this.savedWarningLevel != null);
this.warninglevel = this.savedWarningLevel.Pop();
#if DEBUG
Console.WriteLine("Warning level restored to {0}", this.warninglevel);
#endif
}
private void EnsureWarningLevel()
{
Contract.Ensures(this.savedWarningLevel != null);
if(this.savedWarningLevel == null)
{
this.savedWarningLevel = new Stack<WarningLevelOptions>();
}
}
public bool IsLegacyAssemblyMode { get { return this.assemblyMode == AssemblyMode.legacy; } }
#endregion
#region CacheFileName
private readonly string clousotVersion = typeof(GeneralOptions).Assembly.GetName().Version.ToString();
public string ClousotVersion { get { return this.clousotVersion; } }
public string CacheDirectory
{
get
{
if (!string.IsNullOrWhiteSpace(this.cacheDirectory))
{
return this.cacheDirectory;
}
try
{
var folder = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData, Environment.SpecialFolderOption.Create);
folder = Path.Combine(folder, "CodeContracts");
if (!Directory.Exists(folder))
{
Directory.CreateDirectory(folder);
}
return folder;
}
catch
{
return null;
}
}
}
[ContractVerification(true)]
public string GetCacheDBName()
{
Contract.Ensures(Contract.Result<string>() != null);
var prefix = this.cacheDBVersionPrefix ? "cccheck" + clousotVersion + "Cache." : "";
// A different name was set by a command line option
if (this.cacheName != null)
{
return prefix + this.cacheName;
}
var result = prefix;
Contract.Assume(this.Assemblies != null, "Missing postcondition");
foreach (var assembly in this.Assemblies)
{
try
{
var name = Path.GetFileNameWithoutExtension(assembly);
result += name;
}
catch
{
}
}
return result;
}
bool Caching.IClousotCacheOptions.Trace { get { return this.TraceCache; } }
#endregion
public bool UseSemanticBaseline
{
get
{
return this.useSemanticBaseline != null;
}
}
public bool SaveSemanticBaseline
{
get
{
return this.saveSemanticBaseline != null;
}
}
public string SemanticBaselineReadName
{
get
{
return this.useSemanticBaseline;
}
}
public string SemanticBaselineSaveName
{
get
{
return this.saveSemanticBaseline;
}
}
public bool SkipIdenticalMethods { get { return this.skipIdenticalMethods; } }
public bool SufficientConditions
{
get
{
return this.infer.Contains(InferOptions.assumefalse);
}
}
public bool AnalyzeClosures
{
get
{
return this.analyze.Contains(AnalyzeOptions.closures);
}
}
public bool AnalyzeMoveNext
{
get
{
return this.analyze.Contains(AnalyzeOptions.movenext);
}
}
public bool AnalyzeCompilerGeneratedCode
{
get
{
return this.analyze.Contains(AnalyzeOptions.compilergenerated);
}
}
public bool WarningsAsErrors { get { return failOnWarnings; } }
public static List<string> GetClousotOptionsWithPaths()
{
var result = new List<string>();
foreach (var field in typeof(GeneralOptions).GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance))
{
if (field.IsDefined(typeof(OptionWithPathsAttribute), false))
{
result.Add(field.Name);
}
}
return result;
}
}
}
| |
// $Id$
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Org.Apache.Etch.Bindings.Csharp.Util;
namespace org.apache.etch.examples.chat
{
public class ImplChatServer : BaseChatServer
{
private readonly RemoteChatClient _client;
private Dictionary<String, ImplChatServer> _whoIsOnline;
/// <summary>
/// Constructs the ImplChatServer.
/// </summary>
/// <param name="client">the client to use for callbacks.</param>
/// <param name="whoIsOnline">the database of who is online.</param>
public ImplChatServer( RemoteChatClient client, Dictionary<String, ImplChatServer> whoIsOnline )
{
_client = client;
_whoIsOnline = whoIsOnline;
}
#region ChatServer Members
private string _user;
[ MethodImpl ( MethodImplOptions.Synchronized ) ]
public override void login( string name, string pwd )
{
if ( (bool) isLoggedIn() )
throw new org.apache.etch.examples.chat.types.Chat.Failure( "Already logged in" );
if ( name == null )
throw new types.Chat.Failure("Name is null");
if ( name.StartsWith("bad"))
throw new types.Chat.Failure("Username is not valid");
if ( name.Length == 0 )
throw new types.Chat.Failure("Username is empty");
if ( pwd == null )
throw new types.Chat.Failure("Password is not valid");
if ( pwd.StartsWith("bad"))
throw new types.Chat.Failure("Password is not valid");
// TODO check user authentication.
// add this user to the who is online map.
lock ( _whoIsOnline )
{
ImplChatServer other ;
// user is logged in already
if ( _whoIsOnline.ContainsKey( name ) )
{
other = _whoIsOnline[ name ];
try
{
other.Deliver( "SYSTEM", "You have been logged out because you logged in somewhere else." );
}
catch (Exception)
{
// ignore failure to send this message
}
other.logout();
other = null;
}
// key is not present, i.e., user isn't logged in already
else
_whoIsOnline.Add( name, this );
}
// mark as logged in
_user = name;
// say we're online
SayStatusChanged( _user, true );
// tell the client who's online
List<String> who;
lock ( _whoIsOnline )
{
who = new List<string>( _whoIsOnline.Keys );
}
who.Remove( name );
_client.whoIsOnline( who.ToArray() );
Console.WriteLine( "login : " + name );
}
[MethodImpl( MethodImplOptions.Synchronized )]
public override void logout()
{
if ( _user != null )
{
bool tookUsOffline = false;
lock ( _whoIsOnline )
{
if ( _whoIsOnline.ContainsKey( _user ) )
{
if ( _whoIsOnline[ _user ] == this )
{
// remove us from the online map
_whoIsOnline.Remove( _user );
tookUsOffline = true;
}
}
}
if ( tookUsOffline )
{
// say we're offline
SayStatusChanged( _user, false );
Console.WriteLine( "logout : " + _user );
}
}
_user = null;
}
#endregion
#region Chat Members
public override bool? isLoggedIn()
{
return ( _user != null );
}
private void SayStatusChanged( String u, bool isOnline )
{
Dictionary<String, ImplChatServer>.ValueCollection who = _whoIsOnline.Values;
ImplChatServer[] cArray = new ImplChatServer[ _whoIsOnline.Count ];
who.CopyTo( cArray, 0 );
foreach ( ImplChatServer other in cArray )
{
if ( other != this )
other._client.statusChange( u, isOnline );
}
}
// send
public override void send( string who, string msg )
{
String me = _user;
if ( me == null )
return;
ImplChatServer other;
lock ( _whoIsOnline )
{
if ( _whoIsOnline.ContainsKey( who ) )
other = _whoIsOnline[ who ];
else
other = null;
}
if ( other == null )
throw new types.Chat.Failure( "user is not online : " + who );
other.Deliver( me, msg );
}
#endregion
private void Deliver( String from, String msg )
{
try
{
_client.send(from, msg );
}
catch( types.Chat.Failure e )
{
Console.WriteLine( e );
throw e;
}
catch ( Exception e )
{
Console.WriteLine( e );
throw e;
}
}
#region Session Members
public override void _SessionNotify(object eventObj)
{
if (eventObj.Equals(SessionConsts.UP))
return;
if (eventObj.Equals(SessionConsts.DOWN))
{
logout();
return;
}
if (eventObj is Exception)
Console.WriteLine(((Exception) eventObj).StackTrace);
}
#endregion
}
}
| |
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
using System;
using System.Threading;
using System.Threading.Tasks;
using Windows.Media.MediaProperties;
using Windows.Media.Transcoding;
using Windows.Storage;
using Windows.Storage.Pickers;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
namespace SDKTemplate
{
public sealed partial class Scenario2_Custom : Page
{
MainPage rootPage = MainPage.Current;
Windows.UI.Core.CoreDispatcher _dispatcher = Window.Current.Dispatcher;
CancellationTokenSource _cts;
string _OutputFileName = "TranscodeSampleOutput";
string _OutputFileExtension = ".mp4";
string _OutputType = "MP4";
Windows.Media.MediaProperties.MediaEncodingProfile _Profile;
Windows.Storage.StorageFile _InputFile = null;
Windows.Storage.StorageFile _OutputFile = null;
Windows.Media.Transcoding.MediaTranscoder _Transcoder = new Windows.Media.Transcoding.MediaTranscoder();
bool _UseMp4 = true;
public Scenario2_Custom()
{
this.InitializeComponent();
_cts = new CancellationTokenSource();
// Hook up UI
PickFileButton.Click += new RoutedEventHandler(PickFile);
SetOutputButton.Click += new RoutedEventHandler(PickOutput);
TargetFormat.SelectionChanged += new SelectionChangedEventHandler(OnTargetFormatChanged);
Transcode.Click += new RoutedEventHandler(TranscodeCustom);
Cancel.Click += new RoutedEventHandler(TranscodeCancel);
// Media Controls
InputPlayButton.Click += new RoutedEventHandler(InputPlayButton_Click);
InputPauseButton.Click += new RoutedEventHandler(InputPauseButton_Click);
InputStopButton.Click += new RoutedEventHandler(InputStopButton_Click);
OutputPlayButton.Click += new RoutedEventHandler(OutputPlayButton_Click);
OutputPauseButton.Click += new RoutedEventHandler(OutputPauseButton_Click);
OutputStopButton.Click += new RoutedEventHandler(OutputStopButton_Click);
// Initialize UI with default settings
MediaEncodingProfile defaultProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Wvga);
VideoW.Text = defaultProfile.Video.Width.ToString();
VideoH.Text = defaultProfile.Video.Height.ToString();
VideoBR.Text = defaultProfile.Video.Bitrate.ToString();
VideoFR.Text = defaultProfile.Video.FrameRate.Numerator.ToString();
AudioBPS.Text = defaultProfile.Audio.BitsPerSample.ToString();
AudioCC.Text = defaultProfile.Audio.ChannelCount.ToString();
AudioBR.Text = defaultProfile.Audio.Bitrate.ToString();
AudioSR.Text = defaultProfile.Audio.SampleRate.ToString();
// File is not selected, disable all buttons but PickFileButton
DisableButtons();
SetPickFileButton(true);
SetOutputFileButton(false);
SetCancelButton(false);
}
public void Dispose()
{
_cts.Dispose();
}
/// <summary>
/// Invoked when this page is about to be displayed in a Frame.
/// </summary>
/// <param name="e">Event data that describes how this page was reached. The Parameter
/// property is typically used to configure the page.</param>
protected override void OnNavigatedTo(NavigationEventArgs e)
{
}
async void TranscodeCustom(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
StopPlayers();
DisableButtons();
GetCustomProfile();
// Clear messages
StatusMessage.Text = "";
try
{
if (_InputFile != null && _Profile != null && _OutputFile != null)
{
var preparedTranscodeResult = await _Transcoder.PrepareFileTranscodeAsync(_InputFile, _OutputFile, _Profile);
if (EnableMrfCrf444.IsChecked.HasValue && (bool)EnableMrfCrf444.IsChecked)
{
_Transcoder.VideoProcessingAlgorithm = MediaVideoProcessingAlgorithm.MrfCrf444;
}
else
{
_Transcoder.VideoProcessingAlgorithm = MediaVideoProcessingAlgorithm.Default;
}
if (preparedTranscodeResult.CanTranscode)
{
SetCancelButton(true);
var progress = new Progress<double>(TranscodeProgress);
await preparedTranscodeResult.TranscodeAsync().AsTask(_cts.Token, progress);
TranscodeComplete();
}
else
{
TranscodeFailure(preparedTranscodeResult.FailureReason);
}
}
}
catch (TaskCanceledException)
{
OutputText("");
TranscodeError("Transcode Canceled");
}
catch (Exception exception)
{
TranscodeError(exception.Message);
}
}
void GetCustomProfile()
{
if (_UseMp4)
{
_Profile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Wvga);
}
else
{
_Profile = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Wvga);
}
try
{
_Profile.Video.Width = UInt32.Parse(VideoW.Text);
_Profile.Video.Height = UInt32.Parse(VideoH.Text);
_Profile.Video.Bitrate = UInt32.Parse(VideoBR.Text);
_Profile.Video.FrameRate.Numerator = UInt32.Parse(VideoFR.Text);
_Profile.Video.FrameRate.Denominator = 1;
_Profile.Audio.BitsPerSample = UInt32.Parse(AudioBPS.Text);
_Profile.Audio.ChannelCount = UInt32.Parse(AudioCC.Text);
_Profile.Audio.Bitrate = UInt32.Parse(AudioBR.Text);
_Profile.Audio.SampleRate = UInt32.Parse(AudioSR.Text);
// Video sources providing more than about 250 megapixels per second require the
// H.264 encoder to be set to level 5.2. Information about H.264 encoding levels:
// https://en.wikipedia.org/wiki/Advanced_Video_Coding#Levels
// Windows doesn't always set the higher level automatically so it should be set
// explicitly to avoid encoding failures. Constants needed to set the encoding level:
// https://docs.microsoft.com/en-us/windows/win32/medfound/mf-mt-video-level
// https://docs.microsoft.com/en-us/windows/win32/api/codecapi/ne-codecapi-eavench264vlevel
if (_UseMp4)
{
const int c_PixelsPerSecondRequiringLevel52 = 250000000;
if (_Profile.Video.Width * _Profile.Video.Height *
_Profile.Video.FrameRate.Numerator / _Profile.Video.FrameRate.Denominator >
c_PixelsPerSecondRequiringLevel52)
{
_Profile.Video.Properties[MediaFoundationConstants.MF_MT_VIDEO_LEVEL] =
(UInt32)MediaFoundationConstants.eAVEncH264VLevel.eAVEncH264VLevel5_2;
}
}
}
catch (Exception exception)
{
TranscodeError(exception.Message);
_Profile = null;
}
}
void TranscodeProgress(double percent)
{
OutputText("Progress: " + percent.ToString().Split('.')[0] + "%");
}
async void TranscodeComplete()
{
OutputText("Transcode completed.");
OutputPathText("Output (" + _OutputFile.Path + ")");
Windows.Storage.Streams.IRandomAccessStream stream = await _OutputFile.OpenAsync(Windows.Storage.FileAccessMode.Read);
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
OutputVideo.SetSource(stream, _OutputFile.ContentType);
});
EnableButtons();
SetCancelButton(false);
}
async void TranscodeCancel(object sender, RoutedEventArgs e)
{
try
{
_cts.Cancel();
_cts.Dispose();
_cts = new CancellationTokenSource();
if (_OutputFile != null)
{
await _OutputFile.DeleteAsync();
}
}
catch (Exception exception)
{
TranscodeError(exception.Message);
}
}
async void TranscodeFailure(TranscodeFailureReason reason)
{
try
{
if (_OutputFile != null)
{
await _OutputFile.DeleteAsync();
}
}
catch (Exception exception)
{
TranscodeError(exception.Message);
}
switch (reason)
{
case TranscodeFailureReason.CodecNotFound:
TranscodeError("Codec not found.");
break;
case TranscodeFailureReason.InvalidProfile:
TranscodeError("Invalid profile.");
break;
default:
TranscodeError("Unknown failure.");
break;
}
}
async void PickFile(object sender, RoutedEventArgs e)
{
Windows.Storage.Pickers.FileOpenPicker picker = new Windows.Storage.Pickers.FileOpenPicker();
picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary;
picker.FileTypeFilter.Add(".wmv");
picker.FileTypeFilter.Add(".mp4");
Windows.Storage.StorageFile file = await picker.PickSingleFileAsync();
if (file != null)
{
Windows.Storage.Streams.IRandomAccessStream stream = await file.OpenAsync(Windows.Storage.FileAccessMode.Read);
_InputFile = file;
InputVideo.SetSource(stream, file.ContentType);
InputVideo.Play();
// Enable buttons
EnableButtons();
}
}
async void PickOutput(object sender, RoutedEventArgs e)
{
FileSavePicker picker = new FileSavePicker();
picker.SuggestedStartLocation = PickerLocationId.VideosLibrary;
picker.SuggestedFileName = _OutputFileName;
picker.FileTypeChoices.Add(_OutputType, new System.Collections.Generic.List<string>() { _OutputFileExtension });
_OutputFile = await picker.PickSaveFileAsync();
if (_OutputFile != null)
{
SetTranscodeButton(true);
}
}
void OnTargetFormatChanged(object sender, SelectionChangedEventArgs e)
{
if (TargetFormat.SelectedIndex > 0)
{
_OutputFileExtension = ".wmv";
_OutputType = "WMV";
_UseMp4 = false;
}
else
{
_OutputFileExtension = ".mp4";
_OutputType = "MP4";
_UseMp4 = true;
}
}
void InputPlayButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
if (InputVideo.DefaultPlaybackRate == 0)
{
InputVideo.DefaultPlaybackRate = 1.0;
InputVideo.PlaybackRate = 1.0;
}
InputVideo.Play();
}
void InputStopButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
InputVideo.Stop();
}
void InputPauseButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
InputVideo.Pause();
}
void OutputPlayButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
if (OutputVideo.DefaultPlaybackRate == 0)
{
OutputVideo.DefaultPlaybackRate = 1.0;
OutputVideo.PlaybackRate = 1.0;
}
OutputVideo.Play();
}
void OutputStopButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
OutputVideo.Stop();
}
void OutputPauseButton_Click(Object sender, Windows.UI.Xaml.RoutedEventArgs e)
{
OutputVideo.Pause();
}
async void SetPickFileButton(bool isEnabled)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
PickFileButton.IsEnabled = isEnabled;
});
}
async void SetOutputFileButton(bool isEnabled)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
SetOutputButton.IsEnabled = isEnabled;
});
}
async void SetTranscodeButton(bool isEnabled)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
Transcode.IsEnabled = isEnabled;
});
}
async void SetCancelButton(bool isEnabled)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
Cancel.IsEnabled = isEnabled;
});
}
async void EnableButtons()
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
PickFileButton.IsEnabled = true;
SetOutputButton.IsEnabled = true;
TargetFormat.IsEnabled = true;
EnableMrfCrf444.IsEnabled = true;
// The transcode button's initial state should be disabled until an output
// file has been set.
Transcode.IsEnabled = false;
});
}
async void DisableButtons()
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
Transcode.IsEnabled = false;
PickFileButton.IsEnabled = false;
SetOutputButton.IsEnabled = false;
TargetFormat.IsEnabled = false;
EnableMrfCrf444.IsEnabled = false;
});
}
async void StopPlayers()
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
if (InputVideo.CurrentState != MediaElementState.Paused)
{
InputVideo.Pause();
}
if (OutputVideo.CurrentState != MediaElementState.Paused)
{
OutputVideo.Pause();
}
});
}
async void PlayFile(Windows.Storage.StorageFile MediaFile)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
{
try
{
Windows.Storage.Streams.IRandomAccessStream stream = await MediaFile.OpenAsync(FileAccessMode.Read);
OutputVideo.SetSource(stream, MediaFile.ContentType);
OutputVideo.Play();
}
catch (Exception exception)
{
TranscodeError(exception.Message);
}
});
}
async void TranscodeError(string error)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
StatusMessage.Foreground = new Windows.UI.Xaml.Media.SolidColorBrush(Windows.UI.Colors.Red);
StatusMessage.Text = error;
});
EnableButtons();
SetCancelButton(false);
}
async void OutputText(string text)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
OutputMsg.Foreground = new Windows.UI.Xaml.Media.SolidColorBrush(Windows.UI.Colors.Green);
OutputMsg.Text = text;
});
}
async void OutputPathText(string text)
{
await _dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
OutputPath.Text = text;
});
}
}
}
| |
//
// COPYRIGHT: Copyright 2007
// Infralution
// www.infralution.com
//
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Resources;
using System.Windows.Data;
namespace Infralution.Localization.Wpf
{
/// <summary>
/// Defines a type converter for enum values that converts enum values to
/// and from string representations using resources
/// </summary>
/// <remarks>
/// This class makes localization of display values for enums in a project easy. Simply
/// derive a class from this class and pass the ResourceManagerin the constructor.
///
/// <code lang="C#" escaped="true" >
/// class LocalizedEnumConverter : ResourceEnumConverter
/// {
/// public LocalizedEnumConverter(Type type)
/// : base(type, Properties.Resources.ResourceManager)
/// {
/// }
/// }
/// </code>
///
/// <code lang="Visual Basic" escaped="true" >
/// Public Class LocalizedEnumConverter
///
/// Inherits ResourceEnumConverter
/// Public Sub New(ByVal sType as Type)
/// MyBase.New(sType, My.Resources.ResourceManager)
/// End Sub
/// End Class
/// </code>
///
/// Then define the enum values in the resource editor. The names of
/// the resources are simply the enum value prefixed by the enum type name with an
/// underscore separator eg MyEnum_MyValue. You can then use the TypeConverter attribute
/// to make the LocalizedEnumConverter the default TypeConverter for the enums in your
/// project.
/// </remarks>
public class ResourceEnumConverter : EnumConverter, IValueConverter
{
private class LookupTable : Dictionary<string, object> { }
private Dictionary<CultureInfo, LookupTable> _lookupTables = new Dictionary<CultureInfo, LookupTable>();
private ResourceManager _resourceManager;
private bool _isFlagEnum = false;
private Array _flagValues;
/// <summary>
/// Get the lookup table for the given culture (creating if necessary)
/// </summary>
/// <param name="culture"></param>
/// <returns></returns>
private LookupTable GetLookupTable(CultureInfo culture)
{
LookupTable result = null;
if (culture == null)
culture = CultureInfo.CurrentCulture;
if (!_lookupTables.TryGetValue(culture, out result))
{
result = new LookupTable();
foreach (object value in GetStandardValues())
{
string text = GetValueText(culture, value);
if (text != null)
{
result.Add(text, value);
}
}
_lookupTables.Add(culture, result);
}
return result;
}
/// <summary>
/// Return the name of the resource to use
/// </summary>
/// <param name="value">The value to get</param>
/// <returns>The name of the resource to use</returns>
protected virtual string GetResourceName(object value)
{
Type type = value.GetType();
return string.Format("{0}_{1}", type.Name, value.ToString());
}
/// <summary>
/// Return the text to display for a simple value in the given culture
/// </summary>
/// <param name="culture">The culture to get the text for</param>
/// <param name="value">The enum value to get the text for</param>
/// <returns>The localized text</returns>
private string GetValueText(CultureInfo culture, object value)
{
string resourceName = GetResourceName(value);
string result = _resourceManager.GetString(resourceName, culture);
if (result == null)
result = resourceName;
return result;
}
/// <summary>
/// Return true if the given value is can be represented using a single bit
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
private bool IsSingleBitValue(ulong value)
{
switch (value)
{
case 0:
return false;
case 1:
return true;
}
return ((value & (value - 1)) == 0);
}
/// <summary>
/// Return the text to display for a flag value in the given culture
/// </summary>
/// <param name="culture">The culture to get the text for</param>
/// <param name="value">The flag enum value to get the text for</param>
/// <returns>The localized text</returns>
private string GetFlagValueText(CultureInfo culture, object value)
{
// if there is a standard value then use it
//
if (Enum.IsDefined(value.GetType(), value))
{
return GetValueText(culture, value);
}
// otherwise find the combination of flag bit values
// that makes up the value
//
ulong lValue = Convert.ToUInt32(value);
string result = null;
foreach (object flagValue in _flagValues)
{
ulong lFlagValue = Convert.ToUInt32(flagValue);
if (IsSingleBitValue(lFlagValue))
{
if ((lFlagValue & lValue) == lFlagValue)
{
string valueText = GetValueText(culture, flagValue);
if (result == null)
{
result = valueText;
}
else
{
result = string.Format("{0}, {1}", result, valueText);
}
}
}
}
return result;
}
/// <summary>
/// Return the Enum value for a simple (non-flagged enum)
/// </summary>
/// <param name="culture">The culture to convert using</param>
/// <param name="text">The text to convert</param>
/// <returns>The enum value</returns>
private object GetValue(CultureInfo culture, string text)
{
LookupTable lookupTable = GetLookupTable(culture);
object result = null;
lookupTable.TryGetValue(text, out result);
return result;
}
/// <summary>
/// Return the Enum value for a flagged enum
/// </summary>
/// <param name="culture">The culture to convert using</param>
/// <param name="text">The text to convert</param>
/// <returns>The enum value</returns>
private object GetFlagValue(CultureInfo culture, string text)
{
LookupTable lookupTable = GetLookupTable(culture);
string[] textValues = text.Split(',');
ulong result = 0;
foreach (string textValue in textValues)
{
object value = null;
string trimmedTextValue = textValue.Trim();
if (!lookupTable.TryGetValue(trimmedTextValue, out value))
{
return null;
}
result |= Convert.ToUInt32(value);
}
return Enum.ToObject(EnumType, result);
}
/// <summary>
/// Create a new instance of the converter using translations from the given resource manager
/// </summary>
/// <param name="type"></param>
/// <param name="resourceManager"></param>
public ResourceEnumConverter(Type type, ResourceManager resourceManager)
: base(type)
{
_resourceManager = resourceManager;
object[] flagAttributes = type.GetCustomAttributes(typeof(FlagsAttribute), true);
_isFlagEnum = flagAttributes.Length > 0;
if (_isFlagEnum)
{
_flagValues = Enum.GetValues(type);
}
}
/// <summary>
/// Convert string values to enum values
/// </summary>
/// <param name="context"></param>
/// <param name="culture"></param>
/// <param name="value"></param>
/// <returns></returns>
public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value)
{
if (culture == null)
culture = CultureInfo.CurrentCulture;
if (value is string)
{
object result = (_isFlagEnum) ?
GetFlagValue(culture, (string)value): GetValue(culture, (string)value);
if (result == null)
{
result = base.ConvertFrom(context, culture, value);
}
return result;
}
else
{
return base.ConvertFrom(context, culture, value);
}
}
/// <summary>
/// Convert the enum value to a string
/// </summary>
/// <param name="context"></param>
/// <param name="culture"></param>
/// <param name="value"></param>
/// <param name="destinationType"></param>
/// <returns></returns>
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
{
if (culture == null)
culture = CultureInfo.CurrentCulture;
if (value == null) return null;
if (destinationType == typeof(string) || destinationType == typeof(object))
{
object result = (_isFlagEnum) ?
GetFlagValueText(culture, value) : GetValueText(culture, value);
return result;
}
else
{
return base.ConvertTo(context, culture, value, destinationType);
}
}
/// <summary>
/// Convert the given enum value to string using the registered type converter
/// </summary>
/// <param name="value">The enum value to convert to string</param>
/// <returns>The localized string value for the enum</returns>
static public string ConvertToString(Enum value)
{
TypeConverter converter = TypeDescriptor.GetConverter(value.GetType());
return converter.ConvertToString(value);
}
/// <summary>
/// Return a list of the enum values and their associated display text for the given enum type
/// </summary>
/// <param name="enumType">The enum type to get the values for</param>
/// <param name="culture">The culture to get the text for</param>
/// <returns>
/// A list of KeyValuePairs where the key is the enum value and the value is the text to display
/// </returns>
/// <remarks>
/// This method can be used to provide localized binding to enums in ASP.NET applications. Unlike
/// windows forms the standard ASP.NET controls do not use TypeConverters to convert from enum values
/// to the displayed text. You can bind an ASP.NET control to the list returned by this method by setting
/// the DataValueField to "Key" and theDataTextField to "Value".
/// </remarks>
static public List<KeyValuePair<Enum, string>> GetValues(Type enumType, CultureInfo culture)
{
List<KeyValuePair<Enum, string>> result = new List<KeyValuePair<Enum, string>>();
TypeConverter converter = TypeDescriptor.GetConverter(enumType);
foreach (Enum value in Enum.GetValues(enumType))
{
KeyValuePair<Enum, string> pair = new KeyValuePair<Enum, string>(value, converter.ConvertToString(null, culture, value));
result.Add(pair);
}
return result;
}
/// <summary>
/// Return a list of the enum values and their associated display text for the given enum type in the current UI Culture
/// </summary>
/// <param name="enumType">The enum type to get the values for</param>
/// <returns>
/// A list of KeyValuePairs where the key is the enum value and the value is the text to display
/// </returns>
/// <remarks>
/// This method can be used to provide localized binding to enums in ASP.NET applications. Unlike
/// windows forms the standard ASP.NET controls do not use TypeConverters to convert from enum values
/// to the displayed text. You can bind an ASP.NET control to the list returned by this method by setting
/// the DataValueField to "Key" and theDataTextField to "Value".
/// </remarks>
static public List<KeyValuePair<Enum, string>> GetValues(Type enumType)
{
return GetValues(enumType, CultureInfo.CurrentUICulture);
}
/// <summary>
/// Handle XAML Conversion from this type to other types
/// </summary>
/// <param name="value">The value to convert</param>
/// <param name="targetType">The target type</param>
/// <param name="parameter">not used</param>
/// <param name="culture">The culture to convert</param>
/// <returns>The converted value</returns>
object IValueConverter.Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
return ConvertTo(null, culture, value, targetType);
}
/// <summary>
/// Handle XAML Conversion from other types back to this type
/// </summary>
/// <param name="value">The value to convert</param>
/// <param name="targetType">The target type</param>
/// <param name="parameter">not used</param>
/// <param name="culture">The culture to convert</param>
/// <returns>The converted value</returns>
object IValueConverter.ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
{
return ConvertFrom(null, culture, value);
}
}
}
| |
using System;
using System.Collections;
using System.Globalization;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Utilities;
namespace Org.BouncyCastle.Crypto.Engines
{
/**
* implementation of GOST 28147-89
*/
public class Gost28147Engine
: IBlockCipher
{
private const int BlockSize = 8;
private int[] workingKey = null;
private bool forEncryption;
private byte[] S = Sbox_Default;
// these are the S-boxes given in Applied Cryptography 2nd Ed., p. 333
// This is default S-box!
private static readonly byte[] Sbox_Default = {
0x4,0xA,0x9,0x2,0xD,0x8,0x0,0xE,0x6,0xB,0x1,0xC,0x7,0xF,0x5,0x3,
0xE,0xB,0x4,0xC,0x6,0xD,0xF,0xA,0x2,0x3,0x8,0x1,0x0,0x7,0x5,0x9,
0x5,0x8,0x1,0xD,0xA,0x3,0x4,0x2,0xE,0xF,0xC,0x7,0x6,0x0,0x9,0xB,
0x7,0xD,0xA,0x1,0x0,0x8,0x9,0xF,0xE,0x4,0x6,0xC,0xB,0x2,0x5,0x3,
0x6,0xC,0x7,0x1,0x5,0xF,0xD,0x8,0x4,0xA,0x9,0xE,0x0,0x3,0xB,0x2,
0x4,0xB,0xA,0x0,0x7,0x2,0x1,0xD,0x3,0x6,0x8,0x5,0x9,0xC,0xF,0xE,
0xD,0xB,0x4,0x1,0x3,0xF,0x5,0x9,0x0,0xA,0xE,0x7,0x6,0x8,0x2,0xC,
0x1,0xF,0xD,0x0,0x5,0x7,0xA,0x4,0x9,0x2,0x3,0xE,0x6,0xB,0x8,0xC
};
/*
* class content S-box parameters for encrypting
* getting from, see: http://tools.ietf.org/id/draft-popov-cryptopro-cpalgs-01.txt
* http://tools.ietf.org/id/draft-popov-cryptopro-cpalgs-02.txt
*/
private static readonly byte[] ESbox_Test = {
0x4,0x2,0xF,0x5,0x9,0x1,0x0,0x8,0xE,0x3,0xB,0xC,0xD,0x7,0xA,0x6,
0xC,0x9,0xF,0xE,0x8,0x1,0x3,0xA,0x2,0x7,0x4,0xD,0x6,0x0,0xB,0x5,
0xD,0x8,0xE,0xC,0x7,0x3,0x9,0xA,0x1,0x5,0x2,0x4,0x6,0xF,0x0,0xB,
0xE,0x9,0xB,0x2,0x5,0xF,0x7,0x1,0x0,0xD,0xC,0x6,0xA,0x4,0x3,0x8,
0x3,0xE,0x5,0x9,0x6,0x8,0x0,0xD,0xA,0xB,0x7,0xC,0x2,0x1,0xF,0x4,
0x8,0xF,0x6,0xB,0x1,0x9,0xC,0x5,0xD,0x3,0x7,0xA,0x0,0xE,0x2,0x4,
0x9,0xB,0xC,0x0,0x3,0x6,0x7,0x5,0x4,0x8,0xE,0xF,0x1,0xA,0x2,0xD,
0xC,0x6,0x5,0x2,0xB,0x0,0x9,0xD,0x3,0xE,0x7,0xA,0xF,0x4,0x1,0x8
};
private static readonly byte[] ESbox_A = {
0x9,0x6,0x3,0x2,0x8,0xB,0x1,0x7,0xA,0x4,0xE,0xF,0xC,0x0,0xD,0x5,
0x3,0x7,0xE,0x9,0x8,0xA,0xF,0x0,0x5,0x2,0x6,0xC,0xB,0x4,0xD,0x1,
0xE,0x4,0x6,0x2,0xB,0x3,0xD,0x8,0xC,0xF,0x5,0xA,0x0,0x7,0x1,0x9,
0xE,0x7,0xA,0xC,0xD,0x1,0x3,0x9,0x0,0x2,0xB,0x4,0xF,0x8,0x5,0x6,
0xB,0x5,0x1,0x9,0x8,0xD,0xF,0x0,0xE,0x4,0x2,0x3,0xC,0x7,0xA,0x6,
0x3,0xA,0xD,0xC,0x1,0x2,0x0,0xB,0x7,0x5,0x9,0x4,0x8,0xF,0xE,0x6,
0x1,0xD,0x2,0x9,0x7,0xA,0x6,0x0,0x8,0xC,0x4,0x5,0xF,0x3,0xB,0xE,
0xB,0xA,0xF,0x5,0x0,0xC,0xE,0x8,0x6,0x2,0x3,0x9,0x1,0x7,0xD,0x4
};
private static readonly byte[] ESbox_B = {
0x8,0x4,0xB,0x1,0x3,0x5,0x0,0x9,0x2,0xE,0xA,0xC,0xD,0x6,0x7,0xF,
0x0,0x1,0x2,0xA,0x4,0xD,0x5,0xC,0x9,0x7,0x3,0xF,0xB,0x8,0x6,0xE,
0xE,0xC,0x0,0xA,0x9,0x2,0xD,0xB,0x7,0x5,0x8,0xF,0x3,0x6,0x1,0x4,
0x7,0x5,0x0,0xD,0xB,0x6,0x1,0x2,0x3,0xA,0xC,0xF,0x4,0xE,0x9,0x8,
0x2,0x7,0xC,0xF,0x9,0x5,0xA,0xB,0x1,0x4,0x0,0xD,0x6,0x8,0xE,0x3,
0x8,0x3,0x2,0x6,0x4,0xD,0xE,0xB,0xC,0x1,0x7,0xF,0xA,0x0,0x9,0x5,
0x5,0x2,0xA,0xB,0x9,0x1,0xC,0x3,0x7,0x4,0xD,0x0,0x6,0xF,0x8,0xE,
0x0,0x4,0xB,0xE,0x8,0x3,0x7,0x1,0xA,0x2,0x9,0x6,0xF,0xD,0x5,0xC
};
private static readonly byte[] ESbox_C = {
0x1,0xB,0xC,0x2,0x9,0xD,0x0,0xF,0x4,0x5,0x8,0xE,0xA,0x7,0x6,0x3,
0x0,0x1,0x7,0xD,0xB,0x4,0x5,0x2,0x8,0xE,0xF,0xC,0x9,0xA,0x6,0x3,
0x8,0x2,0x5,0x0,0x4,0x9,0xF,0xA,0x3,0x7,0xC,0xD,0x6,0xE,0x1,0xB,
0x3,0x6,0x0,0x1,0x5,0xD,0xA,0x8,0xB,0x2,0x9,0x7,0xE,0xF,0xC,0x4,
0x8,0xD,0xB,0x0,0x4,0x5,0x1,0x2,0x9,0x3,0xC,0xE,0x6,0xF,0xA,0x7,
0xC,0x9,0xB,0x1,0x8,0xE,0x2,0x4,0x7,0x3,0x6,0x5,0xA,0x0,0xF,0xD,
0xA,0x9,0x6,0x8,0xD,0xE,0x2,0x0,0xF,0x3,0x5,0xB,0x4,0x1,0xC,0x7,
0x7,0x4,0x0,0x5,0xA,0x2,0xF,0xE,0xC,0x6,0x1,0xB,0xD,0x9,0x3,0x8
};
private static readonly byte[] ESbox_D = {
0xF,0xC,0x2,0xA,0x6,0x4,0x5,0x0,0x7,0x9,0xE,0xD,0x1,0xB,0x8,0x3,
0xB,0x6,0x3,0x4,0xC,0xF,0xE,0x2,0x7,0xD,0x8,0x0,0x5,0xA,0x9,0x1,
0x1,0xC,0xB,0x0,0xF,0xE,0x6,0x5,0xA,0xD,0x4,0x8,0x9,0x3,0x7,0x2,
0x1,0x5,0xE,0xC,0xA,0x7,0x0,0xD,0x6,0x2,0xB,0x4,0x9,0x3,0xF,0x8,
0x0,0xC,0x8,0x9,0xD,0x2,0xA,0xB,0x7,0x3,0x6,0x5,0x4,0xE,0xF,0x1,
0x8,0x0,0xF,0x3,0x2,0x5,0xE,0xB,0x1,0xA,0x4,0x7,0xC,0x9,0xD,0x6,
0x3,0x0,0x6,0xF,0x1,0xE,0x9,0x2,0xD,0x8,0xC,0x4,0xB,0xA,0x5,0x7,
0x1,0xA,0x6,0x8,0xF,0xB,0x0,0x4,0xC,0x3,0x5,0x9,0x7,0xD,0x2,0xE
};
//S-box for digest
private static readonly byte[] DSbox_Test = {
0x4,0xA,0x9,0x2,0xD,0x8,0x0,0xE,0x6,0xB,0x1,0xC,0x7,0xF,0x5,0x3,
0xE,0xB,0x4,0xC,0x6,0xD,0xF,0xA,0x2,0x3,0x8,0x1,0x0,0x7,0x5,0x9,
0x5,0x8,0x1,0xD,0xA,0x3,0x4,0x2,0xE,0xF,0xC,0x7,0x6,0x0,0x9,0xB,
0x7,0xD,0xA,0x1,0x0,0x8,0x9,0xF,0xE,0x4,0x6,0xC,0xB,0x2,0x5,0x3,
0x6,0xC,0x7,0x1,0x5,0xF,0xD,0x8,0x4,0xA,0x9,0xE,0x0,0x3,0xB,0x2,
0x4,0xB,0xA,0x0,0x7,0x2,0x1,0xD,0x3,0x6,0x8,0x5,0x9,0xC,0xF,0xE,
0xD,0xB,0x4,0x1,0x3,0xF,0x5,0x9,0x0,0xA,0xE,0x7,0x6,0x8,0x2,0xC,
0x1,0xF,0xD,0x0,0x5,0x7,0xA,0x4,0x9,0x2,0x3,0xE,0x6,0xB,0x8,0xC
};
private static readonly byte[] DSbox_A = {
0xA,0x4,0x5,0x6,0x8,0x1,0x3,0x7,0xD,0xC,0xE,0x0,0x9,0x2,0xB,0xF,
0x5,0xF,0x4,0x0,0x2,0xD,0xB,0x9,0x1,0x7,0x6,0x3,0xC,0xE,0xA,0x8,
0x7,0xF,0xC,0xE,0x9,0x4,0x1,0x0,0x3,0xB,0x5,0x2,0x6,0xA,0x8,0xD,
0x4,0xA,0x7,0xC,0x0,0xF,0x2,0x8,0xE,0x1,0x6,0x5,0xD,0xB,0x9,0x3,
0x7,0x6,0x4,0xB,0x9,0xC,0x2,0xA,0x1,0x8,0x0,0xE,0xF,0xD,0x3,0x5,
0x7,0x6,0x2,0x4,0xD,0x9,0xF,0x0,0xA,0x1,0x5,0xB,0x8,0xE,0xC,0x3,
0xD,0xE,0x4,0x1,0x7,0x0,0x5,0xA,0x3,0xC,0x8,0xF,0x6,0x2,0x9,0xB,
0x1,0x3,0xA,0x9,0x5,0xB,0x4,0xF,0x8,0x6,0x7,0xE,0xD,0x0,0x2,0xC
};
//
// pre-defined sbox table
//
private static readonly IDictionary sBoxes = Platform.CreateHashtable();
static Gost28147Engine()
{
AddSBox("Default", Sbox_Default);
AddSBox("E-TEST", ESbox_Test);
AddSBox("E-A", ESbox_A);
AddSBox("E-B", ESbox_B);
AddSBox("E-C", ESbox_C);
AddSBox("E-D", ESbox_D);
AddSBox("D-TEST", DSbox_Test);
AddSBox("D-A", DSbox_A);
}
private static void AddSBox(string sBoxName, byte[] sBox)
{
sBoxes.Add(Platform.StringToUpper(sBoxName), sBox);
}
/**
* standard constructor.
*/
public Gost28147Engine()
{
}
/**
* initialise an Gost28147 cipher.
*
* @param forEncryption whether or not we are for encryption.
* @param parameters the parameters required to set up the cipher.
* @exception ArgumentException if the parameters argument is inappropriate.
*/
public void Init(
bool forEncryption,
ICipherParameters parameters)
{
if (parameters is ParametersWithSBox)
{
ParametersWithSBox param = (ParametersWithSBox)parameters;
//
// Set the S-Box
//
byte[] sBox = param.GetSBox();
if (sBox.Length != Sbox_Default.Length)
throw new ArgumentException("invalid S-box passed to GOST28147 init");
this.S = Arrays.Clone(sBox);
//
// set key if there is one
//
if (param.Parameters != null)
{
workingKey = generateWorkingKey(forEncryption,
((KeyParameter)param.Parameters).GetKey());
}
}
else if (parameters is KeyParameter)
{
workingKey = generateWorkingKey(forEncryption,
((KeyParameter)parameters).GetKey());
}
else if (parameters != null)
{
throw new ArgumentException("invalid parameter passed to Gost28147 init - " + parameters.GetType().Name);
}
}
public string AlgorithmName
{
get { return "Gost28147"; }
}
public bool IsPartialBlockOkay
{
get { return false; }
}
public int GetBlockSize()
{
return BlockSize;
}
public int ProcessBlock(
byte[] input,
int inOff,
byte[] output,
int outOff)
{
if (workingKey == null)
{
throw new InvalidOperationException("Gost28147 engine not initialised");
}
if ((inOff + BlockSize) > input.Length)
{
throw new DataLengthException("input buffer too short");
}
if ((outOff + BlockSize) > output.Length)
{
throw new DataLengthException("output buffer too short");
}
Gost28147Func(workingKey, input, inOff, output, outOff);
return BlockSize;
}
public void Reset()
{
}
private int[] generateWorkingKey(
bool forEncryption,
byte[] userKey)
{
this.forEncryption = forEncryption;
if (userKey.Length != 32)
{
throw new ArgumentException("Key length invalid. Key needs to be 32 byte - 256 bit!!!");
}
int[] key = new int[8];
for(int i=0; i!=8; i++)
{
key[i] = bytesToint(userKey,i*4);
}
return key;
}
private int Gost28147_mainStep(int n1, int key)
{
int cm = (key + n1); // CM1
// S-box replacing
int om = S[ 0 + ((cm >> (0 * 4)) & 0xF)] << (0 * 4);
om += S[ 16 + ((cm >> (1 * 4)) & 0xF)] << (1 * 4);
om += S[ 32 + ((cm >> (2 * 4)) & 0xF)] << (2 * 4);
om += S[ 48 + ((cm >> (3 * 4)) & 0xF)] << (3 * 4);
om += S[ 64 + ((cm >> (4 * 4)) & 0xF)] << (4 * 4);
om += S[ 80 + ((cm >> (5 * 4)) & 0xF)] << (5 * 4);
om += S[ 96 + ((cm >> (6 * 4)) & 0xF)] << (6 * 4);
om += S[112 + ((cm >> (7 * 4)) & 0xF)] << (7 * 4);
// return om << 11 | om >>> (32-11); // 11-leftshift
int omLeft = om << 11;
int omRight = (int)(((uint) om) >> (32 - 11)); // Note: Casts required to get unsigned bit rotation
return omLeft | omRight;
}
private void Gost28147Func(
int[] workingKey,
byte[] inBytes,
int inOff,
byte[] outBytes,
int outOff)
{
int N1, N2, tmp; //tmp -> for saving N1
N1 = bytesToint(inBytes, inOff);
N2 = bytesToint(inBytes, inOff + 4);
if (this.forEncryption)
{
for(int k = 0; k < 3; k++) // 1-24 steps
{
for(int j = 0; j < 8; j++)
{
tmp = N1;
int step = Gost28147_mainStep(N1, workingKey[j]);
N1 = N2 ^ step; // CM2
N2 = tmp;
}
}
for(int j = 7; j > 0; j--) // 25-31 steps
{
tmp = N1;
N1 = N2 ^ Gost28147_mainStep(N1, workingKey[j]); // CM2
N2 = tmp;
}
}
else //decrypt
{
for(int j = 0; j < 8; j++) // 1-8 steps
{
tmp = N1;
N1 = N2 ^ Gost28147_mainStep(N1, workingKey[j]); // CM2
N2 = tmp;
}
for(int k = 0; k < 3; k++) //9-31 steps
{
for(int j = 7; j >= 0; j--)
{
if ((k == 2) && (j==0))
{
break; // break 32 step
}
tmp = N1;
N1 = N2 ^ Gost28147_mainStep(N1, workingKey[j]); // CM2
N2 = tmp;
}
}
}
N2 = N2 ^ Gost28147_mainStep(N1, workingKey[0]); // 32 step (N1=N1)
intTobytes(N1, outBytes, outOff);
intTobytes(N2, outBytes, outOff + 4);
}
//array of bytes to type int
private static int bytesToint(
byte[] inBytes,
int inOff)
{
return (int)((inBytes[inOff + 3] << 24) & 0xff000000) + ((inBytes[inOff + 2] << 16) & 0xff0000) +
((inBytes[inOff + 1] << 8) & 0xff00) + (inBytes[inOff] & 0xff);
}
//int to array of bytes
private static void intTobytes(
int num,
byte[] outBytes,
int outOff)
{
outBytes[outOff + 3] = (byte)(num >> 24);
outBytes[outOff + 2] = (byte)(num >> 16);
outBytes[outOff + 1] = (byte)(num >> 8);
outBytes[outOff] = (byte)num;
}
/**
* Return the S-Box associated with SBoxName
* @param sBoxName name of the S-Box
* @return byte array representing the S-Box
*/
public static byte[] GetSBox(
string sBoxName)
{
byte[] sBox = (byte[])sBoxes[Platform.StringToUpper(sBoxName)];
if (sBox == null)
{
throw new ArgumentException("Unknown S-Box - possible types: "
+ "\"Default\", \"E-Test\", \"E-A\", \"E-B\", \"E-C\", \"E-D\", \"D-Test\", \"D-A\".");
}
return Arrays.Clone(sBox);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
namespace System.Reflection.Metadata.Ecma335
{
public sealed class MetadataAggregator
{
// For each heap handle and each delta contains aggregate heap lengths.
// heapSizes[heap kind][reader index] == Sum { 0..index | reader[i].XxxHeapLength }
private readonly ImmutableArray<ImmutableArray<int>> _heapSizes;
private readonly ImmutableArray<ImmutableArray<RowCounts>> _rowCounts;
// internal for testing
internal struct RowCounts : IComparable<RowCounts>
{
public int AggregateInserts;
public int Updates;
public int CompareTo(RowCounts other)
{
return AggregateInserts - other.AggregateInserts;
}
public override string ToString()
{
return string.Format("+0x{0:x} ~0x{1:x}", AggregateInserts, Updates);
}
}
public MetadataAggregator(MetadataReader baseReader, IReadOnlyList<MetadataReader> deltaReaders)
: this(baseReader, null, null, deltaReaders)
{
}
public MetadataAggregator(
IReadOnlyList<int> baseTableRowCounts,
IReadOnlyList<int> baseHeapSizes,
IReadOnlyList<MetadataReader> deltaReaders)
: this(null, baseTableRowCounts, baseHeapSizes, deltaReaders)
{
}
private MetadataAggregator(
MetadataReader baseReader,
IReadOnlyList<int> baseTableRowCounts,
IReadOnlyList<int> baseHeapSizes,
IReadOnlyList<MetadataReader> deltaReaders)
{
if (baseTableRowCounts == null)
{
if (baseReader == null)
{
throw new ArgumentNullException("baseReader");
}
if (baseReader.GetTableRowCount(TableIndex.EncMap) != 0)
{
throw new ArgumentException("Base reader must be a full metadata reader.", "baseReader");
}
CalculateBaseCounts(baseReader, out baseTableRowCounts, out baseHeapSizes);
Debug.Assert(baseTableRowCounts != null);
}
else
{
if (baseTableRowCounts.Count != MetadataTokens.TableCount)
{
throw new ArgumentException("Must have " + MetadataTokens.TableCount + " elements", "baseTableRowCounts");
}
if (baseHeapSizes == null)
{
throw new ArgumentNullException("baseHeapSizes");
}
if (baseHeapSizes.Count != MetadataTokens.HeapCount)
{
throw new ArgumentException("Must have " + MetadataTokens.HeapCount + " elements", "baseTableRowCounts");
}
}
if (deltaReaders == null || deltaReaders.Count == 0)
{
throw new ArgumentException("Must not be empty.", "deltaReaders");
}
for (int i = 0; i < deltaReaders.Count; i++)
{
if (deltaReaders[i].GetTableRowCount(TableIndex.EncMap) == 0 || !deltaReaders[i].IsMinimalDelta)
{
throw new ArgumentException("All delta readers must be minimal delta metadata readers.", "deltaReaders");
}
}
_heapSizes = CalculateHeapSizes(baseHeapSizes, deltaReaders);
_rowCounts = CalculateRowCounts(baseTableRowCounts, deltaReaders);
}
// for testing only
internal MetadataAggregator(RowCounts[][] rowCounts, int[][] heapSizes)
{
_rowCounts = ToImmutable(rowCounts);
_heapSizes = ToImmutable(heapSizes);
}
private static void CalculateBaseCounts(
MetadataReader baseReader,
out IReadOnlyList<int> baseTableRowCounts,
out IReadOnlyList<int> baseHeapSizes)
{
int[] rowCounts = new int[MetadataTokens.TableCount];
int[] heapSizes = new int[MetadataTokens.HeapCount];
for (int i = 0; i < rowCounts.Length; i++)
{
rowCounts[i] = baseReader.GetTableRowCount((TableIndex)i);
}
for (int i = 0; i < heapSizes.Length; i++)
{
heapSizes[i] = baseReader.GetHeapSize((HeapIndex)i);
}
baseTableRowCounts = rowCounts;
baseHeapSizes = heapSizes;
}
private static ImmutableArray<ImmutableArray<int>> CalculateHeapSizes(
IReadOnlyList<int> baseSizes,
IReadOnlyList<MetadataReader> deltaReaders)
{
// GUID heap index is multiple of sizeof(Guid) == 16
const int guidSize = 16;
int generationCount = 1 + deltaReaders.Count;
var userStringSizes = new int[generationCount];
var stringSizes = new int[generationCount];
var blobSizes = new int[generationCount];
var guidSizes = new int[generationCount];
userStringSizes[0] = baseSizes[(int)HeapIndex.UserString];
stringSizes[0] = baseSizes[(int)HeapIndex.String];
blobSizes[0] = baseSizes[(int)HeapIndex.Blob];
guidSizes[0] = baseSizes[(int)HeapIndex.Guid] / guidSize;
for (int r = 0; r < deltaReaders.Count; r++)
{
userStringSizes[r + 1] = userStringSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.UserString);
stringSizes[r + 1] = stringSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.String);
blobSizes[r + 1] = blobSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.Blob);
guidSizes[r + 1] = guidSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.Guid) / guidSize;
}
return ImmutableArray.Create(
userStringSizes.ToImmutableArray(),
stringSizes.ToImmutableArray(),
blobSizes.ToImmutableArray(),
guidSizes.ToImmutableArray());
}
private static ImmutableArray<ImmutableArray<RowCounts>> CalculateRowCounts(
IReadOnlyList<int> baseRowCounts,
IReadOnlyList<MetadataReader> deltaReaders)
{
// TODO: optimize - we don't need to allocate all these arrays
var rowCounts = GetBaseRowCounts(baseRowCounts, generations: 1 + deltaReaders.Count);
for (int generation = 1; generation <= deltaReaders.Count; generation++)
{
CalculateDeltaRowCountsForGeneration(rowCounts, generation, ref deltaReaders[generation - 1].EncMapTable);
}
return ToImmutable(rowCounts);
}
private static ImmutableArray<ImmutableArray<T>> ToImmutable<T>(T[][] array)
{
var immutable = new ImmutableArray<T>[array.Length];
for (int i = 0; i < array.Length; i++)
{
immutable[i] = array[i].ToImmutableArray();
}
return immutable.ToImmutableArray();
}
// internal for testing
internal static RowCounts[][] GetBaseRowCounts(IReadOnlyList<int> baseRowCounts, int generations)
{
var rowCounts = new RowCounts[TableIndexExtensions.Count][];
for (int t = 0; t < rowCounts.Length; t++)
{
rowCounts[t] = new RowCounts[generations];
rowCounts[t][0].AggregateInserts = baseRowCounts[t];
}
return rowCounts;
}
// internal for testing
internal static void CalculateDeltaRowCountsForGeneration(RowCounts[][] rowCounts, int generation, ref EnCMapTableReader encMapTable)
{
foreach (var tableRowCounts in rowCounts)
{
tableRowCounts[generation].AggregateInserts = tableRowCounts[generation - 1].AggregateInserts;
}
int mapRowCount = encMapTable.NumberOfRows;
for (int mapRid = 1; mapRid <= mapRowCount; mapRid++)
{
uint token = encMapTable.GetToken(mapRid);
int rid = (int)(token & TokenTypeIds.RIDMask);
var tableRowCounts = rowCounts[token >> TokenTypeIds.RowIdBitCount];
if (rid > tableRowCounts[generation].AggregateInserts)
{
if (rid != tableRowCounts[generation].AggregateInserts + 1)
{
throw new BadImageFormatException(SR.EnCMapNotSorted);
}
// insert:
tableRowCounts[generation].AggregateInserts = rid;
}
else
{
// update:
tableRowCounts[generation].Updates++;
}
}
}
public Handle GetGenerationHandle(Handle handle, out int generation)
{
if (handle.IsVirtual)
{
// TODO: if a virtual handle is connected to real handle then translate the rid,
// otherwise return vhandle and base.
throw new NotSupportedException();
}
if (handle.IsHeapHandle)
{
int heapOffset = handle.Offset;
HeapIndex heapIndex;
MetadataTokens.TryGetHeapIndex(handle.Kind, out heapIndex);
var sizes = _heapSizes[(int)heapIndex];
generation = sizes.BinarySearch(heapOffset);
if (generation >= 0)
{
Debug.Assert(sizes[generation] == heapOffset);
// the index points to the start of the next generation that added data to the heap:
do
{
generation++;
}
while (generation < sizes.Length && sizes[generation] == heapOffset);
}
else
{
generation = ~generation;
}
if (generation >= sizes.Length)
{
throw new ArgumentException(SR.HandleBelongsToFutureGeneration, "handle");
}
// GUID heap accumulates - previous heap is copied to the next generation
int relativeHeapOffset = (handle.Type == HandleType.Guid || generation == 0) ? heapOffset : heapOffset - sizes[generation - 1];
return new Handle((byte)handle.Type, relativeHeapOffset);
}
else
{
int rowId = handle.RowId;
var sizes = _rowCounts[(int)handle.Type];
generation = sizes.BinarySearch(new RowCounts { AggregateInserts = rowId });
if (generation >= 0)
{
Debug.Assert(sizes[generation].AggregateInserts == rowId);
// the row is in a generation that inserted exactly one row -- the one that we are looking for;
// or it's in a preceding generation if the current one didn't insert any rows of the kind:
while (generation > 0 && sizes[generation - 1].AggregateInserts == rowId)
{
generation--;
}
}
else
{
// the row is in a generation that inserted multiple new rows:
generation = ~generation;
if (generation >= sizes.Length)
{
throw new ArgumentException(SR.HandleBelongsToFutureGeneration, "handle");
}
}
// In each delta table updates always precede inserts.
int relativeRowId = (generation == 0) ? rowId :
rowId -
sizes[generation - 1].AggregateInserts +
sizes[generation].Updates;
return new Handle((byte)handle.Type, relativeRowId);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.StubHelpers;
using System.Reflection;
using System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
using Internal.Runtime.CompilerServices;
namespace System.Runtime.InteropServices.WindowsRuntime
{
//
// ICustomProperty Implementation helpers
//
internal static class ICustomPropertyProviderImpl
{
//
// Creates a ICustomProperty implementation for Jupiter
// Called from ICustomPropertyProvider_GetProperty from within runtime
//
internal static ICustomProperty? CreateProperty(object target, string propertyName)
{
Debug.Assert(target != null);
Debug.Assert(propertyName != null);
if (target is IGetProxyTarget proxy)
target = proxy.GetTarget();
// Only return public instance/static properties
PropertyInfo? propertyInfo = target.GetType().GetProperty(
propertyName,
BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public);
if (propertyInfo == null)
return null;
else
return new CustomPropertyImpl(propertyInfo);
}
//
// Creates a ICustomProperty implementation for Jupiter
// Called from ICustomPropertyProvider_GetIndexedProperty from within runtime
//
internal static unsafe ICustomProperty? CreateIndexedProperty(object target, string propertyName, TypeNameNative* pIndexedParamType)
{
Debug.Assert(target != null);
Debug.Assert(propertyName != null);
Type? indexedParamType = null;
SystemTypeMarshaler.ConvertToManaged(pIndexedParamType, ref indexedParamType);
return CreateIndexedProperty(target, propertyName, indexedParamType!);
}
internal static ICustomProperty? CreateIndexedProperty(object target, string propertyName, Type indexedParamType)
{
Debug.Assert(target != null);
Debug.Assert(propertyName != null);
if (target is IGetProxyTarget proxy)
target = proxy.GetTarget();
// Only return public instance/static properties
PropertyInfo? propertyInfo = target.GetType().GetProperty(
propertyName,
BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public,
null, // default binder
null, // ignore return type
new Type[] { indexedParamType }, // indexed parameter type
null // ignore type modifier
);
if (propertyInfo == null)
return null;
else
return new CustomPropertyImpl(propertyInfo);
}
internal static unsafe void GetType(object target, TypeNameNative* pIndexedParamType)
{
if (target is IGetProxyTarget proxy)
target = proxy.GetTarget();
SystemTypeMarshaler.ConvertToNative(target.GetType(), pIndexedParamType);
}
}
[Flags]
internal enum InterfaceForwardingSupport
{
None = 0,
IBindableVector = 0x1, // IBindableVector -> IBindableVector
IVector = 0x2, // IBindableVector -> IVector<T>
IBindableVectorView = 0x4, // IBindableVectorView -> IBindableVectorView
IVectorView = 0x8, // IBindableVectorView -> IVectorView<T>
IBindableIterableOrIIterable = 0x10 // IBindableIterable -> IBindableIterable/IIterable<T>
}
//
// Interface for data binding code (CustomPropertyImpl) to retrieve the target object
// See CustomPropertyImpl.InvokeInternal for details
//
internal interface IGetProxyTarget
{
object GetTarget();
}
//
// Proxy that supports data binding on another object
//
// This serves two purposes:
//
// 1. Delegate data binding interfaces to another object
// Note that this proxy implements the native interfaces directly to avoid unnecessary overhead
// (such as the adapter code that addresses behavior differences between IBindableVector & List
// as well as simplify forwarding code (except for IEnumerable)
//
// 2. ICLRServices.GetTrackerTarget will hand out ICCW* of a new instance of this object
// and will hold the other object alive
//
//
internal class ICustomPropertyProviderProxy<T1, T2> : IGetProxyTarget,
ICustomQueryInterface,
IEnumerable, // IBindableIterable -> IBindableIterable/IIterable<T>
IBindableVector, // IBindableVector -> IBindableVector/IVector<T>
IBindableVectorView // IBindableVectorView -> IBindableVectorView/IVectorView<T>
{
private readonly object _target;
private readonly InterfaceForwardingSupport _flags;
internal ICustomPropertyProviderProxy(object target, InterfaceForwardingSupport flags)
{
_target = target;
_flags = flags;
}
//
// Creates a new instance of ICustomPropertyProviderProxy<T1, T2> and assign appropriate
// flags
//
internal static object CreateInstance(object target)
{
InterfaceForwardingSupport supportFlags = InterfaceForwardingSupport.None;
//
// QI and figure out the right flags
//
if (target is IList)
supportFlags |= InterfaceForwardingSupport.IBindableVector;
// NOTE: We need to use the directed type here
// If we use IVector_Raw<T1> here, it derives from a different IIterable<T> which the runtime
// doesn't recognize, and therefore IEnumerable cast won't be able to take advantage of this QI
if (target is IList<T1>)
supportFlags |= InterfaceForwardingSupport.IVector;
if (target is IBindableVectorView)
supportFlags |= InterfaceForwardingSupport.IBindableVectorView;
// NOTE: We need to use the redirected type here
// If we use IVector_Raw<T1> here, it derives from a different IIterable<T> which the runtime
// doesn't recognize, and therefore IEnumerable cast won't be able to take advantage of this QI
if (target is IReadOnlyList<T2>)
supportFlags |= InterfaceForwardingSupport.IVectorView;
// Verify IEnumerable last because the first few QIs might succeed and we need
// IEnumerable cast to use that cache (instead of having ICustomPropertyProvider to
// forward it manually)
// For example, if we try to shoot in the dark by trying IVector<IInspectable> and it
// succeeded, IEnumerable needs to know that
if (target is IEnumerable)
supportFlags |= InterfaceForwardingSupport.IBindableIterableOrIIterable;
return new ICustomPropertyProviderProxy<T1, T2>(target, supportFlags);
}
//
// override ToString() to make sure callers get correct IStringable.ToString() behavior in native code
//
public override string? ToString()
{
return WindowsRuntime.IStringableHelper.ToString(_target);
}
//
// IGetProxyTarget - unwraps the target object and use it for data binding
//
object IGetProxyTarget.GetTarget()
{
return _target;
}
//
// ICustomQueryInterface methods
//
public CustomQueryInterfaceResult GetInterface([In]ref Guid iid, out IntPtr ppv)
{
ppv = IntPtr.Zero;
if (iid == typeof(IBindableIterable).GUID)
{
// Reject the QI if target doesn't implement IEnumerable
if ((_flags & (InterfaceForwardingSupport.IBindableIterableOrIIterable)) == 0)
return CustomQueryInterfaceResult.Failed;
}
if (iid == typeof(IBindableVector).GUID)
{
// Reject the QI if target doesn't implement IBindableVector/IVector
if ((_flags & (InterfaceForwardingSupport.IBindableVector | InterfaceForwardingSupport.IVector)) == 0)
return CustomQueryInterfaceResult.Failed;
}
if (iid == typeof(IBindableVectorView).GUID)
{
// Reject the QI if target doesn't implement IBindableVectorView/IVectorView
if ((_flags & (InterfaceForwardingSupport.IBindableVectorView | InterfaceForwardingSupport.IVectorView)) == 0)
return CustomQueryInterfaceResult.Failed;
}
return CustomQueryInterfaceResult.NotHandled;
}
//
// IEnumerable methods
//
public IEnumerator GetEnumerator()
{
return ((IEnumerable)_target).GetEnumerator();
}
//
// IBindableVector implementation (forwards to IBindableVector / IVector<T>)
//
object? IBindableVector.GetAt(uint index)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
return bindableVector.GetAt(index);
}
else
{
// IBindableVector -> IVector<T>
return GetVectorOfT().GetAt(index);
}
}
uint IBindableVector.Size
{
get
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
return bindableVector.Size;
}
else
{
// IBindableVector -> IVector<T>
return GetVectorOfT().Size;
}
}
}
IBindableVectorView IBindableVector.GetView()
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
return bindableVector.GetView();
}
else
{
// IBindableVector -> IVector<T>
return new IVectorViewToIBindableVectorViewAdapter<T1>(GetVectorOfT().GetView());
}
}
private sealed class IVectorViewToIBindableVectorViewAdapter<T> : IBindableVectorView
{
private readonly IVectorView<T> _vectorView;
public IVectorViewToIBindableVectorViewAdapter(IVectorView<T> vectorView)
{
_vectorView = vectorView;
}
object? IBindableVectorView.GetAt(uint index)
{
return _vectorView.GetAt(index);
}
uint IBindableVectorView.Size => _vectorView.Size;
bool IBindableVectorView.IndexOf(object value, out uint index)
{
return _vectorView.IndexOf(ConvertTo<T>(value), out index);
}
IBindableIterator IBindableIterable.First()
{
return new IteratorOfTToIteratorAdapter<T>(_vectorView.First());
}
}
bool IBindableVector.IndexOf(object value, out uint index)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
return bindableVector.IndexOf(value, out index);
}
else
{
// IBindableVector -> IVector<T>
return GetVectorOfT().IndexOf(ConvertTo<T1>(value), out index);
}
}
void IBindableVector.SetAt(uint index, object value)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.SetAt(index, value);
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().SetAt(index, ConvertTo<T1>(value));
}
}
void IBindableVector.InsertAt(uint index, object value)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.InsertAt(index, value);
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().InsertAt(index, ConvertTo<T1>(value));
}
}
void IBindableVector.RemoveAt(uint index)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.RemoveAt(index);
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().RemoveAt(index);
}
}
void IBindableVector.Append(object value)
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.Append(value);
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().Append(ConvertTo<T1>(value));
}
}
void IBindableVector.RemoveAtEnd()
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.RemoveAtEnd();
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().RemoveAtEnd();
}
}
void IBindableVector.Clear()
{
IBindableVector? bindableVector = GetIBindableVectorNoThrow();
if (bindableVector != null)
{
// IBindableVector -> IBindableVector
bindableVector.Clear();
}
else
{
// IBindableVector -> IVector<T>
GetVectorOfT().Clear();
}
}
private IBindableVector? GetIBindableVectorNoThrow()
{
if ((_flags & InterfaceForwardingSupport.IBindableVector) != 0)
return Unsafe.As<IBindableVector>(_target);
else
return null;
}
private IVector_Raw<T1> GetVectorOfT()
{
if ((_flags & InterfaceForwardingSupport.IVector) != 0)
return Unsafe.As<IVector_Raw<T1>>(_target);
else
throw new InvalidOperationException(); // We should not go down this path, unless Jupiter pass this out to managed code
// and managed code use reflection to do the cast
}
//
// IBindableVectorView implementation (forwarding to IBindableVectorView or IVectorView<T>)
//
object? IBindableVectorView.GetAt(uint index)
{
IBindableVectorView? bindableVectorView = GetIBindableVectorViewNoThrow();
if (bindableVectorView != null)
return bindableVectorView.GetAt(index);
else
return GetVectorViewOfT().GetAt(index);
}
uint IBindableVectorView.Size
{
get
{
IBindableVectorView? bindableVectorView = GetIBindableVectorViewNoThrow();
if (bindableVectorView != null)
return bindableVectorView.Size;
else
return GetVectorViewOfT().Size;
}
}
bool IBindableVectorView.IndexOf(object value, out uint index)
{
IBindableVectorView? bindableVectorView = GetIBindableVectorViewNoThrow();
if (bindableVectorView != null)
return bindableVectorView.IndexOf(value, out index);
else
return GetVectorViewOfT().IndexOf(ConvertTo<T2>(value), out index);
}
IBindableIterator IBindableIterable.First()
{
IBindableVectorView? bindableVectorView = GetIBindableVectorViewNoThrow();
if (bindableVectorView != null)
return bindableVectorView.First();
else
return new IteratorOfTToIteratorAdapter<T2>(GetVectorViewOfT().First());
}
private sealed class IteratorOfTToIteratorAdapter<T> : IBindableIterator
{
private readonly IIterator<T> _iterator;
public IteratorOfTToIteratorAdapter(IIterator<T> iterator)
{ _iterator = iterator; }
public bool HasCurrent => _iterator.HasCurrent;
public object? Current => _iterator.Current;
public bool MoveNext() { return _iterator.MoveNext(); }
}
private IBindableVectorView? GetIBindableVectorViewNoThrow()
{
if ((_flags & InterfaceForwardingSupport.IBindableVectorView) != 0)
return Unsafe.As<IBindableVectorView>(_target);
else
return null;
}
private IVectorView<T2> GetVectorViewOfT()
{
if ((_flags & InterfaceForwardingSupport.IVectorView) != 0)
return Unsafe.As<IVectorView<T2>>(_target);
else
throw new InvalidOperationException(); // We should not go down this path, unless Jupiter pass this out to managed code
// and managed code use reflection to do the cast
}
//
// Convert to type T
//
private static T ConvertTo<T>(object value)
{
// Throw ArgumentNullException if value is null (otherwise we'll throw NullReferenceException
// when casting value to T)
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<T>(value, ExceptionArgument.value);
// No coersion support needed. If we need coersion later, this is the place
return (T)value;
}
}
}
| |
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace Lucene.Net.Index
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using AttributeSource = Lucene.Net.Util.AttributeSource;
using IBits = Lucene.Net.Util.IBits;
using BytesRef = Lucene.Net.Util.BytesRef;
/// <summary>
/// Abstract class for enumerating a subset of all terms.
/// <para/>
/// Term enumerations are always ordered by
/// <see cref="Comparer"/>. Each term in the enumeration is
/// greater than all that precede it.
/// <para/><c>Please note:</c> Consumers of this enumeration cannot
/// call <c>Seek()</c>, it is forward only; it throws
/// <see cref="NotSupportedException"/> when a seeking method
/// is called.
/// </summary>
public abstract class FilteredTermsEnum : TermsEnum
{
private BytesRef initialSeekTerm = null;
private bool doSeek;
private BytesRef actualTerm = null;
private readonly TermsEnum tenum;
/// <summary>
/// Return value, if term should be accepted or the iteration should
/// <see cref="END"/>. The <c>*_SEEK</c> values denote, that after handling the current term
/// the enum should call <see cref="NextSeekTerm(BytesRef)"/> and step forward. </summary>
/// <seealso cref="Accept(BytesRef)"/>
protected internal enum AcceptStatus
{
/// <summary>
/// Accept the term and position the enum at the next term. </summary>
YES,
/// <summary>
/// Accept the term and advance (<see cref="FilteredTermsEnum.NextSeekTerm(BytesRef)"/>)
/// to the next term.
/// </summary>
YES_AND_SEEK,
/// <summary>
/// Reject the term and position the enum at the next term. </summary>
NO,
/// <summary>
/// Reject the term and advance (<see cref="FilteredTermsEnum.NextSeekTerm(BytesRef)"/>)
/// to the next term.
/// </summary>
NO_AND_SEEK,
/// <summary>
/// Reject the term and stop enumerating. </summary>
END
}
/// <summary>
/// Return if term is accepted, not accepted or the iteration should ended
/// (and possibly seek).
/// </summary>
protected abstract AcceptStatus Accept(BytesRef term);
/// <summary>
/// Creates a filtered <see cref="TermsEnum"/> on a terms enum. </summary>
/// <param name="tenum"> the terms enumeration to filter. </param>
protected FilteredTermsEnum(TermsEnum tenum) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
: this(tenum, true)
{
}
/// <summary>
/// Creates a filtered <see cref="TermsEnum"/> on a terms enum. </summary>
/// <param name="tenum"> the terms enumeration to filter. </param>
/// <param name="startWithSeek"> start with seek </param>
protected FilteredTermsEnum(TermsEnum tenum, bool startWithSeek) // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
{
if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null);
this.tenum = tenum;
doSeek = startWithSeek;
}
/// <summary>
/// Use this method to set the initial <see cref="BytesRef"/>
/// to seek before iterating. This is a convenience method for
/// subclasses that do not override <see cref="NextSeekTerm(BytesRef)"/>.
/// If the initial seek term is <c>null</c> (default),
/// the enum is empty.
/// <para/>You can only use this method, if you keep the default
/// implementation of <see cref="NextSeekTerm(BytesRef)"/>.
/// </summary>
protected void SetInitialSeekTerm(BytesRef term)
{
this.initialSeekTerm = term;
}
/// <summary>
/// On the first call to <see cref="MoveNext()"/> or if <see cref="Accept(BytesRef)"/> returns
/// <see cref="AcceptStatus.YES_AND_SEEK"/> or <see cref="AcceptStatus.NO_AND_SEEK"/>,
/// this method will be called to eventually seek the underlying <see cref="TermsEnum"/>
/// to a new position.
/// On the first call, <paramref name="currentTerm"/> will be <c>null</c>, later
/// calls will provide the term the underlying enum is positioned at.
/// This method returns per default only one time the initial seek term
/// and then <c>null</c>, so no repositioning is ever done.
/// <para/>
/// Override this method, if you want a more sophisticated <see cref="TermsEnum"/>,
/// that repositions the iterator during enumeration.
/// If this method always returns <c>null</c> the enum is empty.
/// <para/><c>Please note:</c> this method should always provide a greater term
/// than the last enumerated term, else the behavior of this enum
/// violates the contract for <see cref="TermsEnum"/>s.
/// </summary>
protected virtual BytesRef NextSeekTerm(BytesRef currentTerm)
{
BytesRef t = initialSeekTerm;
initialSeekTerm = null;
return t;
}
/// <summary>
/// Returns the related attributes, the returned <see cref="AttributeSource"/>
/// is shared with the delegate <see cref="TermsEnum"/>.
/// </summary>
public override AttributeSource Attributes => tenum.Attributes;
public override BytesRef Term => tenum.Term;
public override IComparer<BytesRef> Comparer => tenum.Comparer;
public override int DocFreq => tenum.DocFreq;
public override long TotalTermFreq => tenum.TotalTermFreq;
/// <summary>
/// this enum does not support seeking! </summary>
/// <exception cref="NotSupportedException"> In general, subclasses do not
/// support seeking. </exception>
public override bool SeekExact(BytesRef term)
{
throw UnsupportedOperationException.Create(this.GetType().Name + " does not support seeking");
}
/// <summary>
/// this enum does not support seeking! </summary>
/// <exception cref="NotSupportedException"> In general, subclasses do not
/// support seeking. </exception>
public override SeekStatus SeekCeil(BytesRef term)
{
throw UnsupportedOperationException.Create(this.GetType().Name + " does not support seeking");
}
/// <summary>
/// this enum does not support seeking! </summary>
/// <exception cref="NotSupportedException"> In general, subclasses do not
/// support seeking. </exception>
public override void SeekExact(long ord)
{
throw UnsupportedOperationException.Create(this.GetType().Name + " does not support seeking");
}
public override long Ord => tenum.Ord;
public override DocsEnum Docs(IBits bits, DocsEnum reuse, DocsFlags flags)
{
return tenum.Docs(bits, reuse, flags);
}
public override DocsAndPositionsEnum DocsAndPositions(IBits bits, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags)
{
return tenum.DocsAndPositions(bits, reuse, flags);
}
/// <summary>
/// this enum does not support seeking! </summary>
/// <exception cref="NotSupportedException"> In general, subclasses do not
/// support seeking. </exception>
public override void SeekExact(BytesRef term, TermState state)
{
throw UnsupportedOperationException.Create(this.GetType().Name + " does not support seeking");
}
/// <summary>
/// Returns the filtered enums term state
/// </summary>
public override TermState GetTermState()
{
if (Debugging.AssertsEnabled) Debugging.Assert(tenum != null);
return tenum.GetTermState();
}
public override bool MoveNext()
{
//System.out.println("FTE.next doSeek=" + doSeek);
//new Throwable().printStackTrace(System.out);
for (; ; )
{
// Seek or forward the iterator
if (doSeek)
{
doSeek = false;
BytesRef t = NextSeekTerm(actualTerm);
//System.out.println(" seek to t=" + (t == null ? "null" : t.utf8ToString()) + " tenum=" + tenum);
// Make sure we always seek forward:
if (Debugging.AssertsEnabled) Debugging.Assert(actualTerm == null || t == null || Comparer.Compare(t, actualTerm) > 0, "curTerm={0} seekTerm={1}", actualTerm, t);
if (t == null || tenum.SeekCeil(t) == SeekStatus.END)
{
// no more terms to seek to or enum exhausted
//System.out.println(" return null");
return false;
}
actualTerm = tenum.Term;
//System.out.println(" got term=" + actualTerm.utf8ToString());
}
else
{
if (tenum.MoveNext())
{
actualTerm = tenum.Term;
}
else
{
// enum exhausted
actualTerm = null;
return false;
}
}
// check if term is accepted
switch (Accept(actualTerm))
{
case FilteredTermsEnum.AcceptStatus.YES_AND_SEEK:
doSeek = true;
// term accepted, but we need to seek so fall-through
goto case FilteredTermsEnum.AcceptStatus.YES;
case FilteredTermsEnum.AcceptStatus.YES:
// term accepted
return true;
case FilteredTermsEnum.AcceptStatus.NO_AND_SEEK:
// invalid term, seek next time
doSeek = true;
break;
case FilteredTermsEnum.AcceptStatus.END:
// we are supposed to end the enum
return false;
}
}
}
[Obsolete("Use MoveNext() and Term instead. This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public override BytesRef Next()
{
if (MoveNext())
return actualTerm;
return null;
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Collections.Specialized;
using OpenXmlSdk.Xamarin.Packaging;
using OpenXmlSdk;
namespace DocumentFormat.OpenXml.Packaging
{
/// <summary>
/// Defines WordprocessingDocument - an OpenXmlPackage represents a Word document.
/// </summary>
public class WordprocessingDocument : OpenXmlPackage
{
#region implemented abstract members of OpenXmlPartContainer
internal override PackageRelationship CreateRelationship (Uri targetUri, TargetMode targetMode, string relationshipType)
{
throw new NotImplementedException ();
}
internal override PackageRelationship CreateRelationship (Uri targetUri, TargetMode targetMode, string relationshipType, string id)
{
throw new NotImplementedException ();
}
#endregion
private static System.Collections.Generic.Dictionary<string, PartConstraintRule> _partConstraint;
private static System.Collections.Generic.Dictionary<string, PartConstraintRule> _dataPartReferenceConstraint;
/// <summary>
/// Gets part constraint data.
/// </summary>
/// <returns>The constraint data of the part.</returns>
internal sealed override System.Collections.Generic.IDictionary<string, PartConstraintRule> GetPartConstraint()
{
if (_partConstraint == null)
{
System.Collections.Generic.Dictionary<string, PartConstraintRule> tempData = new System.Collections.Generic.Dictionary<string, PartConstraintRule>();
tempData.Add("http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument", new PartConstraintRule("MainDocumentPart", null, true, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties", new PartConstraintRule("CoreFilePropertiesPart", CoreFilePropertiesPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.openxmlformats.org/officeDocument/2006/relationships/extended-properties", new PartConstraintRule("ExtendedFilePropertiesPart", ExtendedFilePropertiesPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.openxmlformats.org/officeDocument/2006/relationships/custom-properties", new PartConstraintRule("CustomFilePropertiesPart", CustomFilePropertiesPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail", new PartConstraintRule("ThumbnailPart", null, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.openxmlformats.org/package/2006/relationships/digital-signature/origin", new PartConstraintRule("DigitalSignatureOriginPart", DigitalSignatureOriginPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.microsoft.com/office/2006/relationships/ui/userCustomization", new PartConstraintRule("QuickAccessToolbarCustomizationsPart", QuickAccessToolbarCustomizationsPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.microsoft.com/office/2006/relationships/ui/extensibility", new PartConstraintRule("RibbonExtensibilityPart", RibbonExtensibilityPart.ContentTypeConstant, false, false, FileFormatVersions.Office2007 | FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.microsoft.com/office/2007/relationships/ui/extensibility", new PartConstraintRule("RibbonAndBackstageCustomizationsPart", RibbonAndBackstageCustomizationsPart.ContentTypeConstant, false, false, FileFormatVersions.Office2010 | FileFormatVersions.Office2013));
tempData.Add("http://schemas.microsoft.com/office/2011/relationships/webextensiontaskpanes", new PartConstraintRule("WebExTaskpanesPart", WebExTaskpanesPart.ContentTypeConstant, false, false, FileFormatVersions.Office2013));
_partConstraint = tempData;
}
return _partConstraint;
}
/// <summary>
/// Gets the constraint rule of DataPartReferenceRelationship.
/// </summary>
/// <returns>The constraint data of the part.</returns>
internal sealed override System.Collections.Generic.IDictionary<string, PartConstraintRule> GetDataPartReferenceConstraint()
{
if (_dataPartReferenceConstraint == null)
{
System.Collections.Generic.Dictionary<string, PartConstraintRule> tempData = new System.Collections.Generic.Dictionary<string, PartConstraintRule>();
_dataPartReferenceConstraint = tempData;
}
return _dataPartReferenceConstraint;
}
/// <summary>
/// Gets the relationship type of the main part.
/// </summary>
internal sealed override string MainPartRelationshipType
{
get
{
return MainDocumentPart.RelationshipTypeConstant;
}
}
private static Dictionary<WordprocessingDocumentType, string> _validMainPartContentType;
private static Dictionary<WordprocessingDocumentType, string> MainPartContentTypes
{
get
{
if (_validMainPartContentType == null)
{
//Dictionary<WordprocessingDocumentType, string> tempData = new Dictionary<WordprocessingDocumentType, string>();
Dictionary<WordprocessingDocumentType, string> tempData = new Dictionary<WordprocessingDocumentType, string>();
tempData.Add(WordprocessingDocumentType.Document, @"application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml");
tempData.Add(WordprocessingDocumentType.Template, @"application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml");
tempData.Add(WordprocessingDocumentType.MacroEnabledDocument, @"application/vnd.ms-word.document.macroEnabled.main+xml");
tempData.Add(WordprocessingDocumentType.MacroEnabledTemplate, @"application/vnd.ms-word.template.macroEnabledTemplate.main+xml");
_validMainPartContentType = tempData;
}
return _validMainPartContentType;
}
}
/// <summary>
/// Gets the list of valid content type for main part.
/// </summary>
internal sealed override ICollection<string> ValidMainPartContentTypes
{
get
{
return MainPartContentTypes.Values;
}
}
/// <summary>
/// Creates a WordprocessingDocument.
/// </summary>
public WordprocessingDocument()
//: base()
{
}
private WordprocessingDocumentType _documentType;
/// <summary>
/// Gets the type of the WordprocessingDocument.
/// </summary>
public WordprocessingDocumentType DocumentType
{
get
{
ThrowIfObjectDisposed();
return this._documentType;
}
private set
{
ThrowIfObjectDisposed();
this._documentType = value;
}
}
private void UpdateDocumentTypeFromContentType()
{
if (this.MainPartContentType == null)
{
throw new InvalidOperationException();
}
foreach (KeyValuePair<WordprocessingDocumentType, string> types in MainPartContentTypes)
{
if (types.Value == this.MainPartContentType)
{
this.DocumentType = types.Key;
}
}
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the specified file.
/// </summary>
/// <param name="path">The path and file name of the target WordprocessingDocument.</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "path" is null reference.</exception>
public static WordprocessingDocument Create(string path, WordprocessingDocumentType type)
{
return Create(path, type, true);
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the IO stream.
/// </summary>
/// <param name="stream">The IO stream on which to create the WordprocessingDocument.</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "stream" is null reference.</exception>
/// <exception cref="IOException">Thrown when "stream" is not opened with Write access.</exception>
public static WordprocessingDocument Create(Stream stream, WordprocessingDocumentType type)
{
return Create(stream, type, true);
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the spcified package.
/// </summary>
/// <param name="package">The specified OpenXml package.</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "package" is null reference.</exception>
/// <exception cref="IOException">Thrown when "package" is not opened with Write access.</exception>
public static WordprocessingDocument Create(Package package, WordprocessingDocumentType type)
{
return Create(package, type, true);
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the specified file.
/// </summary>
/// <param name="path">The path and file name of the target WordprocessingDocument.</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <param name="autoSave">Whether to auto save the created document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "path" is null reference.</exception>
public static WordprocessingDocument Create(string path, WordprocessingDocumentType type, bool autoSave)
{
if (string.IsNullOrEmpty(path))
{
throw new ArgumentNullException("path");
}
WordprocessingDocument doc = new WordprocessingDocument();
doc.DocumentType = type;
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = autoSave;
doc.MainPartContentType = MainPartContentTypes[type];
doc.CreateCore(path);
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the IO stream.
/// </summary>
/// <param name="stream">The IO stream on which to create the WordprocessingDocument.</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <param name="autoSave">Whether to auto save the created document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "stream" is null reference.</exception>
/// <exception cref="IOException">Thrown when "stream" is not opened with Write access.</exception>
public static WordprocessingDocument Create(Stream stream, WordprocessingDocumentType type, bool autoSave)
{
WordprocessingDocument doc = new WordprocessingDocument();
doc.DocumentType = type;
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = autoSave;
doc.MainPartContentType = MainPartContentTypes[type];
doc.CreateCore(stream);
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the spcified package.
/// </summary>
/// <param name="package">The specified OpenXml package</param>
/// <param name="type">The type of the WordprocessingDocument.</param>
/// <param name="autoSave">Whether to auto save the created document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "package" is null reference.</exception>
/// <exception cref="IOException">Thrown when "package" is not opened with Write access.</exception>
public static WordprocessingDocument Create(Package package, WordprocessingDocumentType type, bool autoSave)
{
WordprocessingDocument doc = new WordprocessingDocument();
doc.DocumentType = type;
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = autoSave;
doc.MainPartContentType = MainPartContentTypes[type];
doc.CreateCore(package);
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the specified file.
/// </summary>
/// <param name="path">The path and file name of the target WordprocessingDocument.</param>
/// <param name="isEditable">In ReadWrite mode. False for Read only mode.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "path" is null reference.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
public static WordprocessingDocument Open(string path, bool isEditable)
{
return WordprocessingDocument.Open(path, isEditable, new OpenSettings());
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the IO stream.
/// </summary>
/// <param name="stream">The IO stream on which to open the WordprocessingDocument.</param>
/// <param name="isEditable">In ReadWrite mode. False for Read only mode.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "stream" is null reference.</exception>
/// <exception cref="IOException">Thrown when "stream" is not opened with Read (ReadWrite) access.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
public static WordprocessingDocument Open(System.IO.Stream stream, bool isEditable)
{
return WordprocessingDocument.Open(stream, isEditable, new OpenSettings());
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the specified file.
/// </summary>
/// <param name="path">The path and file name of the target WordprocessingDocument.</param>
/// <param name="isEditable">In ReadWrite mode. False for Read only mode.</param>
/// <param name="openSettings">The advanced settings for opening a document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "path" is null reference.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
/// <exception cref="ArgumentException">Thrown when specified to process the markup compatibility but the given target FileFormatVersion is incorrect.</exception>
public static WordprocessingDocument Open(string path, bool isEditable, OpenSettings openSettings)
{
if (openSettings.MarkupCompatibilityProcessSettings.ProcessMode != MarkupCompatibilityProcessMode.NoProcess && (openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2007 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2010 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2013))
{
throw new ArgumentException(ExceptionMessages.InvalidMCMode);
}
WordprocessingDocument doc = new WordprocessingDocument();
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = openSettings.AutoSave;
doc.OpenSettings.MarkupCompatibilityProcessSettings.ProcessMode = openSettings.MarkupCompatibilityProcessSettings.ProcessMode;
doc.OpenSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions = openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions;
doc.MaxCharactersInPart = openSettings.MaxCharactersInPart;
doc.OpenCore(path, isEditable);
if (MainPartContentTypes[doc.DocumentType] != doc.MainPartContentType)
{
doc.UpdateDocumentTypeFromContentType();
}
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the IO stream.
/// </summary>
/// <param name="stream">The IO stream on which to open the WordprocessingDocument.</param>
/// <param name="isEditable">In ReadWrite mode. False for Read only mode.</param>
/// <param name="openSettings">The advanced settings for opening a document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "stream" is null reference.</exception>
/// <exception cref="IOException">Thrown when "stream" is not opened with Read (ReadWrite) access.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
/// <exception cref="ArgumentException">Thrown when specified to process the markup compatibility but the given target FileFormatVersion is incorrect.</exception>
public static WordprocessingDocument Open(System.IO.Stream stream, bool isEditable, OpenSettings openSettings)
{
if (openSettings.MarkupCompatibilityProcessSettings.ProcessMode != MarkupCompatibilityProcessMode.NoProcess && (openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2007 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2010 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2013))
{
throw new ArgumentException(ExceptionMessages.InvalidMCMode);
}
WordprocessingDocument doc = new WordprocessingDocument();
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = openSettings.AutoSave;
doc.OpenSettings.MarkupCompatibilityProcessSettings.ProcessMode = openSettings.MarkupCompatibilityProcessSettings.ProcessMode;
doc.OpenSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions = openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions;
doc.MaxCharactersInPart = openSettings.MaxCharactersInPart;
doc.OpenCore(stream, isEditable);
if (MainPartContentTypes[doc.DocumentType] != doc.MainPartContentType)
{
doc.UpdateDocumentTypeFromContentType();
}
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the spcified package.
/// </summary>
/// <param name="package">The specified OpenXml package</param>
/// <param name="openSettings">The advanced settings for opening a document.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "package" is null reference.</exception>
/// <exception cref="IOException">Thrown when "package" is not opened with Read (ReadWrite) access.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
/// <exception cref="ArgumentException">Thrown when specified to process the markup compatibility but the given target FileFormatVersion is incorrect.</exception>
public static WordprocessingDocument Open(Package package, OpenSettings openSettings)
{
if (openSettings.MarkupCompatibilityProcessSettings.ProcessMode != MarkupCompatibilityProcessMode.NoProcess && (openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2007 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2010 && openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions != FileFormatVersions.Office2013))
{
throw new ArgumentException(ExceptionMessages.InvalidMCMode);
}
WordprocessingDocument doc = new WordprocessingDocument();
doc.OpenSettings = new OpenSettings();
doc.OpenSettings.AutoSave = openSettings.AutoSave;
doc.OpenSettings.MarkupCompatibilityProcessSettings.ProcessMode = openSettings.MarkupCompatibilityProcessSettings.ProcessMode;
doc.OpenSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions = openSettings.MarkupCompatibilityProcessSettings.TargetFileFormatVersions;
doc.MaxCharactersInPart = openSettings.MaxCharactersInPart;
doc.OpenCore(package);
if (MainPartContentTypes[doc.DocumentType] != doc.MainPartContentType)
{
doc.UpdateDocumentTypeFromContentType();
}
return doc;
}
/// <summary>
/// Creates a new instance of the WordprocessingDocument class from the spcified package.
/// </summary>
/// <param name="package">The specified OpenXml package.</param>
/// <returns>A new instance of WordprocessingDocument.</returns>
/// <exception cref="ArgumentNullException">Thrown when "package" is null reference.</exception>
/// <exception cref="IOException">Thrown when "package" is not opened with Read (ReadWrite) access.</exception>
/// <exception cref="OpenXmlPackageException">Thrown when the package is not valid Open XML WordprocessingDocument.</exception>
public static WordprocessingDocument Open(Package package)
{
return WordprocessingDocument.Open(package, new OpenSettings());
}
/// <summary>
/// Changes the document type.
/// </summary>
/// <param name="newType">The new type of the document.</param>
/// <remarks>The MainDocumentPart will be changed.</remarks>
public void ChangeDocumentType(WordprocessingDocumentType newType)
{
ThrowIfObjectDisposed();
if (newType == this.DocumentType)
{
// same type, just return
return;
}
if (this.FileOpenAccess == FileAccess.Read)
{
throw new IOException(ExceptionMessages.PackageAccessModeIsReadonly);
}
WordprocessingDocumentType oldType = this.DocumentType;
this.DocumentType = newType;
this.MainPartContentType = MainPartContentTypes[newType];
if (this.MainDocumentPart == null)
{
return;
}
try
{
ChangeDocumentTypeInternal<MainDocumentPart>();
}
catch (OpenXmlPackageException e)
{
if (e.Message == ExceptionMessages.CannotChangeDocumentType)
{
this.DocumentType = oldType;
this.MainPartContentType = MainPartContentTypes[oldType];
}
throw;
}
}
/// <summary>
/// Creates an instance of OpenXmlPart according to the given relationship type.
/// </summary>
/// <param name="relationshipType">Relationship type.</param>
/// <returns>An instance of OpenXmlPart.</returns>
internal sealed override OpenXmlPart CreatePartCore(string relationshipType)
{
this.ThrowIfObjectDisposed();
if (relationshipType == null)
{
throw new ArgumentNullException("relationshipType");
}
switch (relationshipType)
{
case MainDocumentPart.RelationshipTypeConstant:
return new MainDocumentPart();
case CoreFilePropertiesPart.RelationshipTypeConstant:
return new CoreFilePropertiesPart();
case ExtendedFilePropertiesPart.RelationshipTypeConstant:
return new ExtendedFilePropertiesPart();
case CustomFilePropertiesPart.RelationshipTypeConstant:
return new CustomFilePropertiesPart();
case ThumbnailPart.RelationshipTypeConstant:
return new ThumbnailPart();
case DigitalSignatureOriginPart.RelationshipTypeConstant:
return new DigitalSignatureOriginPart();
case QuickAccessToolbarCustomizationsPart.RelationshipTypeConstant:
return new QuickAccessToolbarCustomizationsPart();
case RibbonExtensibilityPart.RelationshipTypeConstant:
return new RibbonExtensibilityPart();
// Fix for O15:#258840
case RibbonAndBackstageCustomizationsPart.RelationshipTypeConstant:
return new RibbonAndBackstageCustomizationsPart();
case WebExTaskpanesPart.RelationshipTypeConstant:
return new WebExTaskpanesPart();
}
throw new ArgumentOutOfRangeException("relationshipType");
}
/// <summary>
/// Adds a new part of type <typeparamref name="T"/>.
/// </summary>
/// <typeparam name="T">The class of the part.</typeparam>
/// <param name="contentType">The content type of the part. Must match the defined content type if the part is fixed content type.</param>
/// <param name="id">The relationship id. The id will be automaticly generated if this param is null.</param>
/// <returns>The added part.</returns>
/// <exception cref="OpenXmlPackageException">When the part is not allowed to be referenced by this part.</exception>
/// <exception cref="ArgumentOutOfRangeException">When the part is fixed content type and the passed in contentType does not match the defined content type.</exception>
/// <exception cref="ArgumentNullException">Thrown when "contentType" is null reference.</exception>
/// <remarks>Mainly used for adding not-fixed content type part - ImagePart, etc</remarks>
public override T AddNewPart<T>(string contentType, string id)
{
if (contentType == null)
{
throw new ArgumentNullException("contentType");
}
if (typeof(MainDocumentPart).IsAssignableFrom(typeof(T)) && contentType != WordprocessingDocument.MainPartContentTypes[this._documentType])
{
throw new OpenXmlPackageException(ExceptionMessages.ErrorContentType);
}
return base.AddNewPart<T>(contentType, id);
}
/// <summary>
/// Creates the MainDocumentPart and add it to this document.
/// </summary>
/// <returns>The newly added MainDocumentPart.</returns>
public MainDocumentPart AddMainDocumentPart()
{
MainDocumentPart childPart = new MainDocumentPart();
this.InitPart(childPart, this.MainPartContentType);
return childPart;
}
/// <summary>
/// Adds a CoreFilePropertiesPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added CoreFilePropertiesPart.</returns>
public CoreFilePropertiesPart AddCoreFilePropertiesPart()
{
CoreFilePropertiesPart childPart = new CoreFilePropertiesPart();
this.InitPart(childPart, CoreFilePropertiesPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a ExtendedFilePropertiesPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added ExtendedFilePropertiesPart.</returns>
public ExtendedFilePropertiesPart AddExtendedFilePropertiesPart()
{
ExtendedFilePropertiesPart childPart = new ExtendedFilePropertiesPart();
this.InitPart(childPart, ExtendedFilePropertiesPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a CustomFilePropertiesPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added CustomFilePropertiesPart.</returns>
public CustomFilePropertiesPart AddCustomFilePropertiesPart()
{
CustomFilePropertiesPart childPart = new CustomFilePropertiesPart();
this.InitPart(childPart, CustomFilePropertiesPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a DigitalSignatureOriginPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added DigitalSignatureOriginPart.</returns>
public DigitalSignatureOriginPart AddDigitalSignatureOriginPart()
{
DigitalSignatureOriginPart childPart = new DigitalSignatureOriginPart();
this.InitPart(childPart, DigitalSignatureOriginPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a ThumbnailPart to the WordprocessingDocument.
/// </summary>
/// <param name="contentType">The content type of the ThumbnailPart.</param>
/// <returns>The newly added ThumbnailPart.</returns>
public ThumbnailPart AddThumbnailPart(string contentType)
{
ThumbnailPart childPart = new ThumbnailPart();
this.InitPart(childPart, contentType);
return childPart;
}
/// <summary>
/// Adds a ThumbnailPart to the WordprocessingDocument.
/// </summary>
/// <param name="partType">The type of the ThumbnailPart.</param>
/// <returns>The newly added ThumbnailPart.</returns>
public ThumbnailPart AddThumbnailPart(ThumbnailPartType partType)
{
string contentType = ThumbnailPartTypeInfo.GetContentType(partType);
string partExtension = ThumbnailPartTypeInfo.GetTargetExtension(partType);
PartExtensionProvider.MakeSurePartExtensionExist(contentType, partExtension);
return AddThumbnailPart(contentType);
}
/// <summary>
/// Adds a QuickAccessToolbarCustomizationsPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added QuickAccessToolbarCustomizationsPart.</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "Toolbar")]
public QuickAccessToolbarCustomizationsPart AddQuickAccessToolbarCustomizationsPart()
{
QuickAccessToolbarCustomizationsPart childPart = new QuickAccessToolbarCustomizationsPart();
this.InitPart(childPart, QuickAccessToolbarCustomizationsPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a RibbonExtensibilityPart to the WordprocessingDocument.
/// </summary>
/// <returns>The newly added RibbonExtensibilityPart.</returns>
public RibbonExtensibilityPart AddRibbonExtensibilityPart()
{
RibbonExtensibilityPart childPart = new RibbonExtensibilityPart();
this.InitPart(childPart, RibbonExtensibilityPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a RibbonAndBackstageCustomizationsPart to the WordprocessingDocument, this part is only available in Office2010.
/// </summary>
/// <returns>The newly added RibbonExtensibilityPart.</returns>
public RibbonAndBackstageCustomizationsPart AddRibbonAndBackstageCustomizationsPart()
{
RibbonAndBackstageCustomizationsPart childPart = new RibbonAndBackstageCustomizationsPart();
this.InitPart(childPart, RibbonAndBackstageCustomizationsPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Adds a WebExTaskpanesPart to the WordprocessingDocument, this part is only available in Office2013.
/// </summary>
/// <returns>The newly added WebExTaskpanesPart.</returns>
public WebExTaskpanesPart AddWebExTaskpanesPart()
{
WebExTaskpanesPart childPart = new WebExTaskpanesPart();
this.InitPart(childPart, WebExTaskpanesPart.ContentTypeConstant);
return childPart;
}
/// <summary>
/// Gets the MainDocumentPart of the WordprocessingDocument.
/// </summary>
public MainDocumentPart MainDocumentPart
{
get { return GetSubPartOfType<MainDocumentPart>(); }
}
/// <summary>
/// Gets the CoreFilePropertiesPart of the WordprocessingDocument.
/// </summary>
public CoreFilePropertiesPart CoreFilePropertiesPart
{
get { return GetSubPartOfType<CoreFilePropertiesPart>(); }
}
/// <summary>
/// Gets the ExtendedFilePropertiesPart of the WordprocessingDocument.
/// </summary>
public ExtendedFilePropertiesPart ExtendedFilePropertiesPart
{
get { return GetSubPartOfType<ExtendedFilePropertiesPart>(); }
}
/// <summary>
/// Gets the CustomFilePropertiesPart of the WordprocessingDocument.
/// </summary>
public CustomFilePropertiesPart CustomFilePropertiesPart
{
get { return GetSubPartOfType<CustomFilePropertiesPart>(); }
}
/// <summary>
/// Gets the ThumbnailPart of the WordprocessingDocument.
/// </summary>
public ThumbnailPart ThumbnailPart
{
get
{
return this.GetSubPartOfType<ThumbnailPart>();
}
}
/// <summary>
/// Gets the DigitalSignatureOriginPart of the WordprocessingDocument.
/// </summary>
public DigitalSignatureOriginPart DigitalSignatureOriginPart
{
get { return GetSubPartOfType<DigitalSignatureOriginPart>(); }
}
/// <summary>
/// Gets the RibbonExtensibilityPart of the WordprocessingDocument.
/// </summary>
public RibbonExtensibilityPart RibbonExtensibilityPart
{
get { return GetSubPartOfType<RibbonExtensibilityPart>(); }
}
/// <summary>
/// Gets the QuickAccessToolbarCustomizationsPart of the WordprocessingDocument.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "Toolbar")]
public QuickAccessToolbarCustomizationsPart QuickAccessToolbarCustomizationsPart
{
get { return GetSubPartOfType<QuickAccessToolbarCustomizationsPart>(); }
}
/// <summary>
/// Gets the RibbonAndBackstageCustomizationsPart of the WordprocessingDocument, only available in Office2010.
/// </summary>
[OfficeAvailability(FileFormatVersions.Office2010)]
public RibbonAndBackstageCustomizationsPart RibbonAndBackstageCustomizationsPart
{
get { return GetSubPartOfType<RibbonAndBackstageCustomizationsPart>(); }
}
/// <summary>
/// Gets the WebExTaskpanesPart of the WordprocessingDocument, only available in Office2013.
/// </summary>
[OfficeAvailability(FileFormatVersions.Office2013)]
public WebExTaskpanesPart WebExTaskpanesPart
{
get { return GetSubPartOfType<WebExTaskpanesPart>(); }
}
}
/// <summary>
/// Defines SpreadsheetDocument - an OpenXmlPackage represents a Spreadsheet document.
/// </summary>
}
| |
//
// ForestDBCouchStore.cs
//
// Author:
// Jim Borden <jim.borden@couchbase.com>
//
// Copyright (c) 2015 Couchbase, Inc All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using CBForest;
using Couchbase.Lite.Db;
using Couchbase.Lite.Internal;
using Couchbase.Lite.Revisions;
using Couchbase.Lite.Storage.ForestDB.Internal;
using Couchbase.Lite.Store;
using Couchbase.Lite.Util;
namespace Couchbase.Lite.Storage.ForestDB
{
/// <summary>
/// This class will register this storage engine for use with Couchbase Lite
/// </summary>
public static class Plugin
{
/// <summary>
/// Register this class for use as the storage engine for the ForestDB storage type
/// (be careful, once you set this you cannot change it)
/// </summary>
public static void Register()
{
Database.RegisterStorageEngine(StorageEngineTypes.ForestDB, typeof(ForestDBCouchStore));
}
}
#region Delegates
internal unsafe delegate void C4DocumentActionDelegate(C4Document* doc);
internal unsafe delegate void C4RawDocumentActionDelegate(C4RawDocument* doc);
internal unsafe delegate bool C4RevisionSelector(C4Document* doc);
#endregion
#region ForestDBBridge
internal unsafe static class ForestDBBridge
{
public static void Check(C4TryLogicDelegate1 block)
{
RetryHandler.RetryIfBusy().Execute(block);
}
public static void* Check(C4TryLogicDelegate2 block)
{
return RetryHandler.RetryIfBusy().Execute(block);
}
public static void Check(C4TryLogicDelegate3 block)
{
RetryHandler.RetryIfBusy().Execute(block);
}
}
#endregion
#region ForestDBCouchStore
#if __IOS__
[Foundation.Preserve(AllMembers = true)]
#endif
internal unsafe sealed class ForestDBCouchStore : ICouchStore
{
#region Constants
private const int DEFAULT_MAX_REV_TREE_DEPTH = 20;
private const string DB_FILENAME = "db.forest";
private const string TAG = "ForestDBCouchStore";
#endregion
#region Variables
private ConcurrentDictionary<int, IntPtr> _fdbConnections =
new ConcurrentDictionary<int, IntPtr>();
private C4DatabaseFlags _config;
private SymmetricKey _encryptionKey;
private LruCache<string, ForestDBViewStore> _views = new LruCache<string, ForestDBViewStore>(100);
private object _closeLock = new object();
#endregion
#region Properties
public SymmetricKey EncryptionKey
{
get {
return _encryptionKey;
}
}
public bool AutoCompact { get; set; }
public int MaxRevTreeDepth { get; set; }
public ICouchStoreDelegate Delegate { get; set; }
public int DocumentCount
{
get {
if(Forest == null) {
return 0;
}
return (int)Native.c4db_getDocumentCount(Forest);
}
}
public long LastSequence
{
get {
if(Forest == null) {
return 0L;
}
return (long)Native.c4db_getLastSequence(Forest);
}
}
public bool InTransaction
{
get {
if(Forest == null) {
return false;
}
return Native.c4db_isInTransaction(Forest);
}
}
public bool IsOpen
{
get;
private set;
}
public string Directory { get; private set; }
public C4Database* Forest
{
get {
if(!IsOpen) {
return null;
}
var threadId = Thread.CurrentThread.ManagedThreadId;
var retVal = _fdbConnections.GetOrAdd(threadId, x => (IntPtr)Reopen());
return (C4Database*)retVal.ToPointer();
}
}
#endregion
#region Constructors
static ForestDBCouchStore()
{
Native.c4log_register(C4LogLevel.Debug, (level, msg) =>
{
switch(level) {
case C4LogLevel.Debug:
Log.To.Database.D("ForestDB", msg);
break;
case C4LogLevel.Info:
Log.To.Database.V("ForestDB", msg);
break;
case C4LogLevel.Warning:
Log.To.Database.W("ForestDB", msg);
break;
case C4LogLevel.Error:
Log.To.Database.E("ForestDB", msg);
break;
}
});
try {
Native.c4doc_generateOldStyleRevID(true);
} catch {
Log.W(TAG, "Out of date ForestDB native binaries detected!");
}
}
public ForestDBCouchStore()
{
AutoCompact = true;
MaxRevTreeDepth = DEFAULT_MAX_REV_TREE_DEPTH;
}
#endregion
#region Public Methods
public RevisionInternal GetDocument(string docId, long sequence)
{
var retVal = default(RevisionInternal);
WithC4Document(docId, sequence, doc =>
{
Log.To.Database.D(TAG, "Read {0} seq {1}", docId, sequence);
retVal = new ForestRevisionInternal(doc, true);
});
return retVal;
}
#endregion
#region Internal Methods
internal void ForgetViewStorage(string name)
{
_views.Remove(name);
}
#endregion
#region Private Methods
private CBForestHistoryEnumerator GetHistoryFromSequence(long sequence)
{
return new CBForestHistoryEnumerator(Forest, sequence, true);
}
private long[] GetLastSequenceNumbers()
{
List<long> foo = new List<long>();
foreach(var connection in _fdbConnections) {
foo.Add((long)Native.c4db_getLastSequence((C4Database*)connection.Value.ToPointer()));
}
return foo.ToArray();
}
private bool[] GetIsInTransactions()
{
List<bool> foo = new List<bool>();
foreach(var connection in _fdbConnections) {
foo.Add(Native.c4db_isInTransaction((C4Database*)connection.Value.ToPointer()));
}
return foo.ToArray();
}
private CBForestDocEnumerator GetDocEnumerator(QueryOptions options, out List<string> remainingIDs)
{
var forestOps = options.AsC4EnumeratorOptions();
var enumerator = default(CBForestDocEnumerator);
remainingIDs = new List<string>();
if(options.Keys != null) {
try {
remainingIDs = options.Keys.Cast<string>().ToList();
enumerator = new CBForestDocEnumerator(Forest, remainingIDs.ToArray(), forestOps);
} catch(InvalidCastException) {
Log.To.Database.E(TAG, "options.keys must contain strings");
throw;
}
} else {
string startKey, endKey;
if(options.Descending) {
startKey = Misc.KeyForPrefixMatch(options.StartKey, options.PrefixMatchLevel) as string;
endKey = options.EndKey as string;
} else {
startKey = options.StartKey as string;
endKey = Misc.KeyForPrefixMatch(options.EndKey, options.PrefixMatchLevel) as string;
}
enumerator = new CBForestDocEnumerator(Forest, startKey, endKey, forestOps);
}
return enumerator;
}
private CBForestHistoryEnumerator GetHistoryEnumerator(RevisionInternal rev, int generation, bool onlyCurrent = false)
{
if(generation <= 1) {
return null;
}
var doc = default(C4Document*);
try {
doc = (C4Document*)ForestDBBridge.Check(err => Native.c4doc_get(Forest, rev.DocID, true, err));
ForestDBBridge.Check(err => Native.c4doc_selectCurrentRevision(doc));
} catch(CBForestException e) {
if(e.Domain == C4ErrorDomain.ForestDB && e.Code == (int)ForestDBStatus.KeyNotFound) {
return null;
}
throw;
}
return new CBForestHistoryEnumerator(doc, onlyCurrent, true);
}
private void WithC4Document(string docId, RevisionID revId, bool withBody, bool create, C4DocumentActionDelegate block)
{
if(!IsOpen) {
return;
}
var doc = default(C4Document*);
try {
doc = (C4Document *)RetryHandler.RetryIfBusy().AllowErrors(
new C4Error() { code = 404, domain = C4ErrorDomain.HTTP },
new C4Error() { code = (int)ForestDBStatus.KeyNotFound, domain = C4ErrorDomain.ForestDB })
.Execute(err => Native.c4doc_get(Forest, docId, !create, err));
if(doc != null) {
var selected = true;
if(revId != null) {
selected = RetryHandler.RetryIfBusy().HandleExceptions(e =>
{
if(e.Code == 404) {
Native.c4doc_free(doc);
doc = null;
return;
}
throw e;
}).AllowError(410, C4ErrorDomain.HTTP).Execute(err =>
{
bool result = false;
revId.PinAndUse(slice =>
{
result = Native.c4doc_selectRevision(doc, slice, withBody, err);
});
return result;
});
}
if(selected && withBody) {
RetryHandler.RetryIfBusy().AllowError(410, C4ErrorDomain.HTTP).Execute((err => Native.c4doc_loadRevisionBody(doc, err)));
}
}
block(doc);
} finally {
Native.c4doc_free(doc);
}
}
private void WithC4Document(string docId, long sequence, C4DocumentActionDelegate block)
{
var doc = default(C4Document*);
try {
doc = (C4Document*)ForestDBBridge.Check(err => Native.c4doc_getBySequence(Forest, (ulong)sequence, err));
} catch(CBForestException e) {
if(e.Domain != C4ErrorDomain.ForestDB && (ForestDBStatus)e.Code != ForestDBStatus.KeyNotFound) {
throw;
}
}
try {
block(doc);
} finally {
Native.c4doc_free(doc);
}
}
private void WithC4Raw(string docId, string storeName, C4RawDocumentActionDelegate block)
{
var doc = default(C4RawDocument*);
try {
doc = (C4RawDocument*)ForestDBBridge.Check(err => Native.c4raw_get(Forest, storeName, docId, err));
} catch(CBForestException e) {
if(e.Domain != C4ErrorDomain.ForestDB && (ForestDBStatus)e.Code != ForestDBStatus.KeyNotFound) {
throw;
}
}
try {
block(doc);
} finally {
Native.c4raw_free(doc);
}
}
private C4Database* Reopen()
{
if(_encryptionKey != null) {
Log.To.Database.I(TAG, "Database is encrypted; setting CBForest encryption key");
}
var forestPath = Path.Combine(Directory, DB_FILENAME);
try {
return (C4Database*)ForestDBBridge.Check(err =>
{
var nativeKey = default(C4EncryptionKey);
if(_encryptionKey != null) {
nativeKey = new C4EncryptionKey(_encryptionKey.KeyData);
}
return Native.c4db_open(forestPath, _config, &nativeKey, err);
});
} catch(CBForestException e) {
if(e.Domain == C4ErrorDomain.ForestDB && e.Code == (int)ForestDBStatus.NoDbHeaders) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Unauthorized, TAG,
"Failed to decrypt database, or it is corrupt");
}
Log.To.Database.E(TAG, "Got exception while opening database, rethrowing...");
throw;
}
}
private void DeleteLocalRevision(string docId, RevisionID revId, bool obeyMVCC)
{
if(!docId.StartsWith("_local/")) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadId, TAG,
"Local revision IDs must start with _local/");
}
if(obeyMVCC && revId == null) {
// Didn't specify a revision to delete: NotFound or a Conflict, depending
var gotLocalDoc = GetLocalDocument(docId, null);
if(gotLocalDoc == null) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotFound, TAG,
"No revision ID specified in local delete operation");
}
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Conflict, TAG,
"No revision ID specified in local delete operation");
}
RunInTransaction(() =>
{
WithC4Raw(docId, "_local", doc =>
{
if(doc == null) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotFound, TAG,
"Specified revision ({0}) in delete operation not found", revId);
}
var currentRevID = doc->meta.AsRevID();
if(obeyMVCC && (revId != currentRevID)) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Conflict, TAG,
"Specified revision ({0}) in delete operation != current revision ({1})", revId, currentRevID);
}
ForestDBBridge.Check(err => Native.c4raw_put(Forest, "_local", docId, null, null, err));
});
return true;
});
}
private bool SaveDocument(C4Document* doc, RevisionID revId, IDictionary<string, object> properties)
{
// Is the new revision the winner?
var winningRevID = doc->revID.AsRevID();
bool isWinner = winningRevID.Equals(revId);
// Update the documentType:
if(!isWinner) {
Native.c4doc_selectCurrentRevision(doc);
properties = Manager.GetObjectMapper().ReadValue<IDictionary<string, object>>(doc->selectedRev.body);
}
Native.c4doc_setType(doc, properties?.GetCast<string>("type"));
// Save:
ForestDBBridge.Check(err => Native.c4doc_save(doc, (uint)MaxRevTreeDepth, err));
return isWinner;
}
private DocumentChange ChangeWithNewRevision(RevisionInternal inRev, bool isWinningRev, C4Document* doc, Uri source)
{
var winningRevId = default(RevisionID);
if(isWinningRev) {
winningRevId = inRev.RevID;
} else {
winningRevId = doc->revID.AsRevID();
}
return new DocumentChange(inRev, winningRevId, doc->IsConflicted, source);
}
private void SelectCurrentRevision(CBForestDocStatus status)
{
ForestDBBridge.Check(err => Native.c4doc_selectCurrentRevision(status.GetDocument()));
}
private void LoadRevisionBody(CBForestDocStatus status)
{
ForestDBBridge.Check(err => Native.c4doc_loadRevisionBody(status.GetDocument(), err));
}
private IDictionary<string, object> GetAllDocsEntry(string docId)
{
var value = default(IDictionary<string, object>);
var existingDoc = default(C4Document*);
try {
existingDoc = (C4Document*)ForestDBBridge.Check(err => Native.c4doc_get(Forest, docId, true, err));
if(existingDoc != null) {
value = new NonNullDictionary<string, object> {
{ "rev", (string)existingDoc->revID },
{ "deleted", true }
};
}
} catch(CBForestException e) {
if(e.Domain != C4ErrorDomain.ForestDB || e.Code != (int)ForestDBStatus.KeyNotFound) {
throw;
}
} finally {
Native.c4doc_free(existingDoc);
}
return value;
}
private ForestRevisionInternal CreateRevision(CBForestDocStatus s, bool loadBody)
{
return new ForestRevisionInternal(s.GetDocument(), loadBody);
}
private CBForestDocEnumerator EnumeratorFromSequence(long lastSequence, C4EnumeratorOptions options)
{
return new CBForestDocEnumerator(Forest, lastSequence, options); ;
}
private CBForestHistoryEnumerator EnumeratorUsingDoc(CBForestDocStatus doc, bool onlyLeaf, bool owner)
{
return new CBForestHistoryEnumerator(doc.GetDocument(), onlyLeaf, owner);
}
#endregion
#region ICouchStore
public IDatabaseUpgrader CreateUpgrader(Database upgradeTo, string upgradeFrom)
{
throw new NotSupportedException("Upgrades not supported on ForestDB");
}
public bool DatabaseExistsIn(string directory)
{
var dbPath = Path.Combine(directory, DB_FILENAME);
return File.Exists(dbPath) || File.Exists(dbPath + ".meta"); // Auto-compaction changes the filename
}
public void Open(string directory, Manager manager, bool readOnly)
{
if(IsOpen) {
return;
}
IsOpen = true;
Directory = directory;
if(!System.IO.Directory.Exists(directory)) {
System.IO.Directory.CreateDirectory(Directory);
}
_config = readOnly ? C4DatabaseFlags.ReadOnly : C4DatabaseFlags.Create;
if(AutoCompact) {
_config |= C4DatabaseFlags.AutoCompact;
}
_fdbConnections.GetOrAdd(Thread.CurrentThread.ManagedThreadId, x => (IntPtr)Reopen());
}
public void Close()
{
IsOpen = false;
var connections = _fdbConnections;
_fdbConnections = new ConcurrentDictionary<int, IntPtr>();
foreach(var ptr in connections) {
if(!Native.c4db_isInTransaction((C4Database*)ptr.Value)) {
ForestDBBridge.Check(err => Native.c4db_close((C4Database*)ptr.Value.ToPointer(), err));
Native.c4db_free((C4Database*)ptr.Value.ToPointer());
}
}
}
public void SetEncryptionKey(SymmetricKey key)
{
_encryptionKey = key;
}
public AtomicAction ActionToChangeEncryptionKey(SymmetricKey newKey)
{
var retVal = new AtomicAction(() =>
ForestDBBridge.Check(err =>
{
var newc4key = default(C4EncryptionKey);
if(newKey != null) {
newc4key = new C4EncryptionKey(newKey.KeyData);
}
return Native.c4db_rekey(Forest, &newc4key, err);
}), null, null);
foreach(var viewName in GetAllViews()) {
var store = GetViewStorage(viewName, false) as ForestDBViewStore;
if(store == null) {
continue;
}
retVal.AddLogic(store.ActionToChangeEncryptionKey(newKey));
}
return retVal;
}
public void Compact()
{
ForestDBBridge.Check(err => Native.c4db_compact(Forest, err));
}
public bool RunInTransaction(RunInTransactionDelegate block)
{
var nativeDb = default(C4Database*);
nativeDb = Forest;
if(nativeDb == null) {
Log.To.Database.W(TAG, "RunInTransaction called on a closed database, returning false...");
return false;
}
Log.To.Database.V(TAG, "BEGIN transaction...");
try {
ForestDBBridge.Check(err => Native.c4db_beginTransaction(nativeDb, err));
} catch(CBForestException e) {
if(e.Code == (int)ForestDBStatus.InvalidHandle) {
// Database was closed between the start of the method and now
Log.To.Database.W(TAG, "RunInTransaction called on a closed database, caught InvalidHandle and returning false...");
return false;
}
throw;
}
// At this point we can rest assured that the connection won't be closed from under us
var success = false;
try {
success = block();
} catch(CouchbaseLiteException) {
Log.To.Database.W(TAG, "Failed to run transaction");
success = false;
throw;
} catch(CBForestException e) {
success = false;
if(e.Domain == C4ErrorDomain.HTTP) {
var code = e.Code;
throw Misc.CreateExceptionAndLog(Log.To.Database, (StatusCode)code, TAG, "Failed to run transaction");
}
throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error running transaction");
} catch(Exception e) {
success = false;
throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error running transaction");
} finally {
Log.To.Database.V(TAG, "END transaction (success={0})", success);
ForestDBBridge.Check(err => Native.c4db_endTransaction(nativeDb, success, err));
if(!InTransaction && Delegate != null) {
Delegate.StorageExitedTransaction(success);
if(!IsOpen) {
ForestDBBridge.Check(err => Native.c4db_close(nativeDb, err));
Native.c4db_free(nativeDb);
}
}
}
return success;
}
public RevisionInternal GetDocument(string docId, RevisionID revId, bool withBody, Status outStatus = null)
{
if(outStatus == null) {
outStatus = new Status();
}
var retVal = default(RevisionInternal);
WithC4Document(docId, revId, withBody, false, doc =>
{
Log.To.Database.D(TAG, "Read {0} rev {1}", docId, revId);
if(doc == null) {
outStatus.Code = StatusCode.NotFound;
return;
}
if(revId == null && doc->IsDeleted) {
outStatus.Code = revId == null ? StatusCode.Deleted : StatusCode.NotFound;
return;
}
outStatus.Code = StatusCode.Ok;
retVal = new ForestRevisionInternal(doc, withBody);
});
return retVal;
}
public void LoadRevisionBody(RevisionInternal rev)
{
WithC4Document(rev.DocID, rev.RevID, true, false, doc =>
{
if(doc == null) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotFound, TAG,
"Cannot load revision body for non-existent revision {0}", rev);
}
rev.SetBody(new Body(doc->selectedRev.body));
});
}
public long GetRevisionSequence(RevisionInternal rev)
{
var retVal = 0L;
WithC4Document(rev.DocID, rev.RevID, false, false, doc => retVal = (long)doc->selectedRev.sequence);
return retVal;
}
public DateTime? NextDocumentExpiry()
{
var timestamp = IsOpen ? Native.c4db_nextDocExpiration(Forest) : 0UL;
if(timestamp == 0UL) {
return null;
}
return Misc.OffsetFromEpoch(TimeSpan.FromSeconds(timestamp));
}
public DateTime? GetDocumentExpiration(string documentId)
{
var timestamp = IsOpen ? Native.c4doc_getExpiration(Forest, documentId) : 0UL;
if(timestamp == 0UL) {
return null;
}
return Misc.OffsetFromEpoch(TimeSpan.FromSeconds(timestamp));
}
public void SetDocumentExpiration(string documentId, DateTime? expiration)
{
if(!IsOpen) {
return;
}
if (expiration.HasValue) {
var timestamp = (ulong)expiration.Value.ToUniversalTime().TimeSinceEpoch().TotalSeconds;
ForestDBBridge.Check(err => Native.c4doc_setExpiration(Forest, documentId, timestamp, err));
} else {
ForestDBBridge.Check(err => Native.c4doc_setExpiration(Forest, documentId, 0UL, err));
}
}
public RevisionInternal GetParentRevision(RevisionInternal rev)
{
var retVal = default(RevisionInternal);
WithC4Document(rev.DocID, rev.RevID, false, false, doc =>
{
if(!Native.c4doc_selectParentRevision(doc)) {
return;
}
ForestDBBridge.Check(err => Native.c4doc_loadRevisionBody(doc, err));
retVal = new RevisionInternal((string)doc->docID, doc->selectedRev.revID.AsRevID(), doc->selectedRev.IsDeleted);
retVal.Sequence = (long)doc->selectedRev.sequence;
retVal.SetBody(new Body(doc->selectedRev.body));
});
return retVal;
}
public RevisionList GetAllDocumentRevisions(string docId, bool onlyCurrent, bool includeDeleted)
{
var retVal = default(RevisionList);
WithC4Document(docId, null, false, false, doc =>
{
using(var enumerator = new CBForestHistoryEnumerator(doc, onlyCurrent, false)) {
var expression = includeDeleted ?
enumerator.Select (x => new ForestRevisionInternal (x.GetDocument (), false)) :
enumerator.Where (x => !x.SelectedRev.IsDeleted).Select (x => new ForestRevisionInternal (x.GetDocument (), false));
retVal = new RevisionList(expression.Cast<RevisionInternal>().ToList());
}
});
return retVal;
}
public IEnumerable<RevisionID> GetPossibleAncestors(RevisionInternal rev, int limit, ValueTypePtr<bool> haveBodies)
{
haveBodies.Value = true;
var returnedCount = 0;
var generation = rev.RevID.Generation;
for(int current = 1; current >= 0; current--) {
var enumerator = GetHistoryEnumerator(rev, generation, current == 1);
if(enumerator == null) {
yield break;
}
foreach(var next in enumerator) {
var flags = next.SelectedRev.flags;
var tmp = Native.c4rev_getGeneration(next.SelectedRev.revID);
if(flags.HasFlag(C4RevisionFlags.RevLeaf) == (current == 1) &&
Native.c4rev_getGeneration(next.SelectedRev.revID) < generation) {
if(haveBodies && !next.HasRevisionBody) {
haveBodies.Value = false;
}
yield return next.SelectedRev.revID.AsRevID();
if(limit > 0 && ++returnedCount >= limit) {
break;
}
}
}
if(returnedCount != 0) {
yield break;
}
}
}
public RevisionID FindCommonAncestor(RevisionInternal rev, IEnumerable<RevisionID> revIds)
{
var generation = rev.RevID.Generation;
var revIdArray = revIds == null ? null : revIds.ToList();
if(generation <= 1 || revIdArray == null || revIdArray.Count == 0) {
return null;
}
revIdArray.Sort();
var commonAncestor = default(RevisionID);
WithC4Document(rev.DocID, null, false, false, doc =>
{
foreach(var possibleRevId in revIds) {
if(possibleRevId.Generation <= generation &&
Native.c4doc_selectRevision(doc, possibleRevId.ToString(), false, null)) {
commonAncestor = possibleRevId;
return;
}
}
});
return commonAncestor;
}
public IList<RevisionID> GetRevisionHistory(RevisionInternal rev, ICollection<RevisionID> ancestorRevIds)
{
var history = new List<RevisionID>();
WithC4Document(rev.DocID, rev.RevID, false, false, doc =>
{
var enumerator = new CBForestHistoryEnumerator(doc, false);
foreach(var next in enumerator) {
var revId = next.SelectedRev.revID.AsRevID();
history.Add(revId);
if(ancestorRevIds != null && ancestorRevIds.Contains(revId)) {
break;
}
}
});
return history;
}
public RevisionList ChangesSince(long lastSequence, ChangesOptions options, RevisionFilter filter)
{
// http://wiki.apache.org/couchdb/HTTP_database_API#Changes
// Translate options to ForestDB:
if(options.Descending) {
// https://github.com/couchbase/couchbase-lite-ios/issues/641
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotImplemented, TAG,
"Descending ChangesSince is not currently implemented " +
"(see https://github.com/couchbase/couchbase-lite-ios/issues/641)");
}
var revsWithBodies = (options.IncludeDocs || filter != null);
var loadC4Doc = (revsWithBodies || options.IncludeConflicts);
var limit = options.Limit;
var c4Opts = C4EnumeratorOptions.DEFAULT;
c4Opts.flags |= C4EnumeratorFlags.IncludeDeleted;
if(!loadC4Doc) {
c4Opts.flags &= ~C4EnumeratorFlags.IncludeBodies;
}
var e = new CBForestDocEnumerator(Forest, lastSequence, c4Opts);
var changes = new RevisionList();
foreach(var doc in e) {
if(options.IncludeConflicts) {
using(var enumerator = new CBForestHistoryEnumerator(doc.GetDocument(), true, false)) {
var includeBody = c4Opts.flags.HasFlag(C4EnumeratorFlags.IncludeBodies);
var selection = enumerator.Select<CBForestDocStatus, RevisionInternal>(x => new ForestRevisionInternal(x.GetDocument(), includeBody));
if(filter != null) {
selection = selection.Where(x => filter(x));
}
foreach(var rev in selection) {
if(!options.IncludeDocs) {
rev.SetBody(null);
}
changes.Add(rev);
}
}
} else {
var rev = new ForestRevisionInternal(doc.GetDocument(), c4Opts.flags.HasFlag(C4EnumeratorFlags.IncludeBodies));
if(filter == null || filter(rev)) {
if(!options.IncludeDocs) {
rev.SetBody(null);
}
changes.Add(rev);
}
}
}
return changes;
}
public IEnumerable<RevisionInternal> ChangesSinceStreaming(long lastSequence, ChangesOptions options, RevisionFilter filter)
{
// http://wiki.apache.org/couchdb/HTTP_database_API#Changes
// Translate options to ForestDB:
if(options.Descending) {
// https://github.com/couchbase/couchbase-lite-ios/issues/641
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotImplemented, TAG,
"Descending ChangesSince is not currently implemented " +
"(see https://github.com/couchbase/couchbase-lite-ios/issues/641)");
}
var revsWithBodies = (options.IncludeDocs || filter != null);
var loadC4Doc = (revsWithBodies || options.IncludeConflicts);
var limit = options.Limit;
var c4Opts = C4EnumeratorOptions.DEFAULT;
c4Opts.flags |= C4EnumeratorFlags.IncludeDeleted;
if(!loadC4Doc) {
c4Opts.flags &= ~C4EnumeratorFlags.IncludeBodies;
}
var e = EnumeratorFromSequence(lastSequence, c4Opts);
var changes = new RevisionList();
foreach(var doc in e) {
if(options.IncludeConflicts) {
using(var enumerator = EnumeratorUsingDoc(doc, true, false)) {
var includeBody = c4Opts.flags.HasFlag(C4EnumeratorFlags.IncludeBodies);
var selection = enumerator.Select<CBForestDocStatus, RevisionInternal>(x => CreateRevision(x, includeBody));
if(filter != null) {
selection = selection.Where(x => filter(x));
}
foreach(var rev in selection) {
if(!options.IncludeDocs) {
rev.SetBody(null);
}
yield return rev;
}
}
} else {
var rev = CreateRevision(doc, c4Opts.flags.HasFlag(C4EnumeratorFlags.IncludeBodies));
if(filter == null || filter(rev)) {
if(!options.IncludeDocs) {
rev.SetBody(null);
}
yield return rev;
}
}
}
}
public IEnumerable<QueryRow> GetAllDocs(QueryOptions options)
{
var remainingIDs = default(List<string>);
var enumerator = GetDocEnumerator(options, out remainingIDs);
var current = 0;
foreach(var next in enumerator) {
if(current++ >= options.Limit) {
yield break;
}
var sequenceNumber = 0L;
var docID = next.CurrentDocID;
remainingIDs.Remove(docID);
var value = default(IDictionary<string, object>);
if(next.Exists) {
sequenceNumber = (long)next.SelectedRev.sequence;
var conflicts = default(IList<string>);
if(options.AllDocsMode >= AllDocsMode.ShowConflicts && next.IsConflicted) {
SelectCurrentRevision(next);
LoadRevisionBody(next);
using(var innerEnumerator = GetHistoryFromSequence(next.Sequence)) {
conflicts = innerEnumerator.Select(x => (string)x.SelectedRev.revID).ToList();
}
if(conflicts.Count == 1) {
conflicts = null;
}
}
bool valid = conflicts != null || options.AllDocsMode != AllDocsMode.OnlyConflicts;
if(!valid) {
continue;
}
value = new NonNullDictionary<string, object> {
{ "rev", next.CurrentRevID },
{ "deleted", next.IsDeleted ? (object)true : null },
{ "_conflicts", conflicts }
};
Log.To.Query.V(TAG, "AllDocs: Found row with key=\"{0}\", value={1}",
new SecureLogString(docID, LogMessageSensitivity.PotentiallyInsecure),
new SecureLogJsonString(value, LogMessageSensitivity.PotentiallyInsecure));
} else {
Log.To.Query.V(TAG, "AllDocs: No such row with key=\"{0}\"", new SecureLogString(docID, LogMessageSensitivity.PotentiallyInsecure));
}
var row = new QueryRow(value == null ? null : docID, sequenceNumber, docID, value,
value == null ? null : new ForestRevisionInternal(next, options.IncludeDocs), null);
if(options.Filter == null || options.Filter(row)) {
yield return row;
} else {
Log.To.Query.V(TAG, " ... on 2nd thought, filter predicate skipped that row");
}
}
foreach(var docId in remainingIDs) {
var value = GetAllDocsEntry(docId);
var row = new QueryRow(value != null ? docId as string : null, 0, docId, value, null, null);
if(options.Filter == null || options.Filter(row)) {
yield return row;
}
}
}
public int FindMissingRevisions(RevisionList revs)
{
var sortedRevs = new RevisionList(revs);
sortedRevs.SortByDocID();
var lastDocId = (string)null;
var doc = (C4Document*)null;
var removedCount = 0;
try {
foreach(var rev in sortedRevs) {
if(rev.DocID != lastDocId) {
lastDocId = rev.DocID;
Native.c4doc_free(doc);
doc = Native.c4doc_get(Forest, lastDocId, true, null);
}
if(doc == null) {
continue;
}
rev.RevID.PinAndUse(slice =>
{
if(Native.c4doc_selectRevision(doc, slice, false, null)) {
while (revs.Contains (rev)) {
removedCount++;
revs.Remove (rev);
}
}
});
}
} finally {
Native.c4doc_free(doc);
}
return removedCount;
}
public ICollection<BlobKey> FindAllAttachmentKeys()
{
var keys = new HashSet<BlobKey>();
var options = C4EnumeratorOptions.DEFAULT;
options.flags &= ~C4EnumeratorFlags.IncludeBodies;
options.flags |= C4EnumeratorFlags.IncludeDeleted;
var e = new CBForestDocEnumerator(Forest, null, null, options);
foreach(var next in e) {
var docInfo = next.DocumentInfo;
if(!docInfo->HasAttachments || (docInfo->IsDeleted && !docInfo->IsConflicted)) {
continue;
}
var doc = next.GetDocument();
// Since db is assumed to have just been compacted, we know that non-current revisions
// won't have any bodies. So only scan the current revs.
do {
if(doc->selectedRev.IsActive && doc->selectedRev.HasAttachments) {
ForestDBBridge.Check(err => Native.c4doc_loadRevisionBody(doc, err));
var body = doc->selectedRev.body;
if(body.size > 0) {
var rev = Manager.GetObjectMapper().ReadValue<IDictionary<string, object>>(body);
var attachments = rev.Get("_attachments").AsDictionary<string, IDictionary<string, object>>();
if(attachments == null) {
continue;
}
foreach(var entry in attachments) {
try {
var key = new BlobKey(entry.Value.GetCast<string>("digest"));
keys.Add(key);
} catch(Exception) {
Log.To.Database.W(TAG, "Invalid digest {0}; skipping", entry.Value.GetCast<string>("digest"));
}
}
}
}
} while(Native.c4doc_selectNextLeafRevision(doc, true, true, null));
}
return keys;
}
public IDictionary<string, object> PurgeRevisions(IDictionary<string, IList<string>> docsToRev)
{
// <http://wiki.apache.org/couchdb/Purge_Documents>
IDictionary<string, object> result = new Dictionary<string, object>();
if(docsToRev.Count == 0) {
return result;
}
Log.To.Database.I(TAG, "Purging {0} docs...", docsToRev.Count);
RunInTransaction(() =>
{
foreach(var docRevPair in docsToRev) {
var docID = docRevPair.Key;
WithC4Document(docID, null, false, false, doc => {
;
if(!doc->Exists) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.NotFound, TAG,
"Invalid attempt to purge revisions of a nonexistent document (ID={0})",
new SecureLogString(docID, LogMessageSensitivity.PotentiallyInsecure));
}
var revsPurged = default(IList<string>);
var revIDs = docRevPair.Value;
if(revIDs.Count == 0) {
revsPurged = new List<string>();
} else if(revIDs.Contains("*")) {
// Delete all revisions if magic "*" revision ID is given:
ForestDBBridge.Check(err => Native.c4db_purgeDoc(Forest, doc->docID, err));
revsPurged = new List<string> { "*" };
Log.To.Database.I(TAG, "Purged document '{0}'", new SecureLogString(docID, LogMessageSensitivity.PotentiallyInsecure));
} else {
var purged = new List<string>();
foreach(var revID in revIDs) {
if(Native.c4doc_purgeRevision(doc, revID, null) > 0) {
purged.Add(revID);
}
}
if(purged.Count > 0) {
ForestDBBridge.Check(err => Native.c4doc_save(doc, (uint)MaxRevTreeDepth, err));
Log.To.Database.I(TAG, "Purged doc '{0}' revs {1}",
new SecureLogString(docID, LogMessageSensitivity.PotentiallyInsecure),
new LogJsonString(revIDs));
}
revsPurged = purged;
}
result[docID] = revsPurged;
});
}
return true;
});
return result;
}
public IList<string> PurgeExpired()
{
var results = new List<string>();
RunInTransaction (() => {
foreach (var expired in new CBForestExpiryEnumerator (Forest, true)) {
results.Add (expired);
}
var purgeMap = results.ToDictionary<string, string, IList<string>> (x => x, x => new List<string> { "*" });
PurgeRevisions (purgeMap);
return true;
});
return results;
}
public RevisionInternal GetLocalDocument(string docId, RevisionID revId)
{
if(!docId.StartsWith("_local/")) {
return null;
}
var retVal = default(RevisionInternal);
WithC4Raw(docId, "_local", doc =>
{
if(doc == null) {
return;
}
var gotRevId = doc->meta.AsRevID();
if(revId != null && revId != gotRevId || doc->body.size == 0) {
return;
}
var properties = default(IDictionary<string, object>);
try {
properties = Manager.GetObjectMapper().ReadValue<IDictionary<string, object>>(doc->body);
} catch(CouchbaseLiteException) {
Log.To.Database.W(TAG, "Invalid JSON for document {0}\n{1}",
new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure),
new SecureLogString(doc->body.ToArray(), LogMessageSensitivity.PotentiallyInsecure));
return;
}
properties.SetDocRevID(docId, gotRevId);
retVal = new RevisionInternal(docId, revId, false);
retVal.SetProperties(properties);
});
return retVal;
}
public RevisionInternal PutLocalRevision(RevisionInternal revision, RevisionID prevRevId, bool obeyMVCC)
{
var docId = revision.DocID;
if(!docId.StartsWith("_local/")) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadId, TAG,
"Invalid document ID ({0}) in write operation, it must start with _local/",
new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure));
}
if(revision.Deleted) {
DeleteLocalRevision(docId, prevRevId, obeyMVCC);
return revision;
}
var result = default(RevisionInternal);
RunInTransaction(() =>
{
var json = Manager.GetObjectMapper().WriteValueAsString(revision.GetProperties(), true);
WithC4Raw(docId, "_local", doc =>
{
var generation = prevRevId == null ? 0 : prevRevId.Generation;
if(obeyMVCC) {
var currentRevId = (doc != null ? doc->meta.AsRevID() : null);
if(prevRevId != null) {
if(prevRevId != currentRevId) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Conflict, TAG,
"Attempt to write new revision on {0} of {1} when a newer revision ({2}) exists",
prevRevId, new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure),
currentRevId);
}
if(generation == 0) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadId, TAG,
"Attempt to write new revision on invalid revision ID ({0}) for document {1}",
prevRevId, new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure));
}
} else if(doc != null) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Conflict, TAG,
"Revision ID not specified, but document {0} already exists (current rev: {1})",
new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure), currentRevId);
}
}
var newRevId = String.Format("{0}-local", ++generation).AsRevID();
ForestDBBridge.Check(err => Native.c4raw_put(Forest, "_local", docId, newRevId.ToString(), json, err));
result = revision.Copy(docId, newRevId);
});
return true;
});
return result;
}
public string GetInfo(string key)
{
var value = default(string);
WithC4Raw(key, "info", doc =>
{
if(doc == null) {
return;
}
value = (string)doc->body;
});
return value;
}
public void SetInfo(string key, string info)
{
ForestDBBridge.Check(err => Native.c4raw_put(Forest, "info", key, null, info, err));
}
public RevisionInternal PutRevision(string inDocId, RevisionID inPrevRevId, IDictionary<string, object> properties,
bool deleting, bool allowConflict, Uri source, StoreValidation validationBlock)
{
if(_config.HasFlag(C4DatabaseFlags.ReadOnly)) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Forbidden, TAG,
"Attempting to write to a readonly database (PutRevision)");
}
if(inDocId == null) {
inDocId = Misc.CreateGUID();
}
C4Document* doc = null;
var putRev = default(RevisionInternal);
var change = default(DocumentChange);
var nativeDb = Forest;
var success = RunInTransaction(() =>
{
try {
var docId = inDocId;
var prevRevId = inPrevRevId;
var attachments = properties.CblAttachments();
if(attachments != null) {
// https://github.com/couchbase/couchbase-lite-net/issues/749
// Need to ensure revpos is correct for a revision inserted on top
// of a deletion
var existing = (C4Document*)ForestDBBridge.Check(err => Native.c4doc_getForPut(nativeDb, docId, prevRevId?.ToString(), deleting,
allowConflict, err));
if(existing->IsDeleted) {
foreach(var attach in attachments) {
var metadata = attach.Value.AsDictionary<string, object>();
if(metadata != null) {
metadata["revpos"] = existing->revID.AsRevID().Generation + 1;
}
}
}
Native.c4doc_free(existing);
}
var json = default(string);
if(properties != null) {
json = Manager.GetObjectMapper().WriteValueAsString(Database.StripDocumentJSON(properties), true);
} else {
json = "{}";
}
C4DocPutRequest rq = new C4DocPutRequest {
body = json,
docID = docId,
deletion = deleting,
hasAttachments = properties?.Get("_attachments") != null,
existingRevision = false,
allowConflict = allowConflict,
history = prevRevId == null ? null : new[] { prevRevId.ToString() },
save = false
};
UIntPtr commonAncestorIndex = UIntPtr.Zero;
doc = (C4Document*)ForestDBBridge.Check(err =>
{
UIntPtr tmp;
var retVal = Native.c4doc_put(nativeDb, rq, &tmp, err);
commonAncestorIndex = tmp;
return retVal;
});
if(docId == null) {
docId = (string)doc->docID;
}
var newRevID = doc->selectedRev.revID.AsRevID();
Body body = null;
if(properties != null) {
properties.SetDocRevID(docId, newRevID);
body = new Body(properties);
}
putRev = new RevisionInternal(docId, newRevID, deleting, body);
if((uint)commonAncestorIndex == 0U) {
return true;
}
if(validationBlock != null) {
var prevRev = default(RevisionInternal);
if(Native.c4doc_selectParentRevision(doc)) {
prevRev = new ForestRevisionInternal(doc, false);
}
var status = validationBlock(putRev, prevRev, prevRev == null ? null : prevRev.RevID);
if(status.IsError) {
Log.To.Validation.I(TAG, "{0} ({1}) failed validation", new SecureLogString(docId, LogMessageSensitivity.PotentiallyInsecure), new SecureLogString(newRevID, LogMessageSensitivity.PotentiallyInsecure));
throw new CouchbaseLiteException("A document failed validation", status.Code);
}
}
var isWinner = SaveDocument(doc, newRevID, properties);
putRev.Sequence = (long)doc->sequence;
change = ChangeWithNewRevision(putRev, isWinner, doc, null);
return true;
} finally {
Native.c4doc_free(doc);
}
});
if(!success) {
return null;
}
if(Delegate != null && change != null) {
Delegate.DatabaseStorageChanged(change);
}
return putRev;
}
public void ForceInsert(RevisionInternal inRev, IList<RevisionID> revHistory, StoreValidation validationBlock, Uri source)
{
if(_config.HasFlag(C4DatabaseFlags.ReadOnly)) {
throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Forbidden, TAG,
"Attempting to write to a readonly database (ForceInsert)");
}
var json = Manager.GetObjectMapper().WriteValueAsString(inRev.GetProperties(), true);
var change = default(DocumentChange);
RunInTransaction(() =>
{
// First get the CBForest doc:
WithC4Document(inRev.DocID, null, false, true, doc =>
{
ForestDBBridge.Check(err => Native.c4doc_insertRevisionWithHistory(doc, json, inRev.Deleted,
inRev.GetAttachments() != null, revHistory.Select(x => x.ToString()).ToArray(), err));
// Save updated doc back to the database:
var isWinner = SaveDocument(doc, revHistory[0], inRev.GetProperties());
inRev.Sequence = (long)doc->sequence;
Log.To.Database.D(TAG, "Saved {0}", inRev.DocID);
change = ChangeWithNewRevision(inRev, isWinner, doc, source);
});
return true;
});
if(change != null && Delegate != null) {
Delegate.DatabaseStorageChanged(change);
}
}
public IViewStore GetViewStorage(string name, bool create)
{
var view = _views[name];
if(view == null) {
try {
view = new ForestDBViewStore(this, name, create);
_views[name] = view;
} catch(InvalidOperationException) {
return null;
} catch(Exception e) {
Log.To.View.W(TAG, String.Format("Error creating view storage for {0}, returning null...", name), e);
return null;
}
}
return view;
}
public IEnumerable<string> GetAllViews()
{
return System.IO.Directory.GetFiles(Directory, "*." + ForestDBViewStore.VIEW_INDEX_PATH_EXTENSION).
Select(x => ForestDBViewStore.FileNameToViewName(Path.GetFileName(x)));
}
#endregion
}
#endregion
}
| |
/*
* SHS -- The Scalable Hyperlink Store
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT
* LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR
* A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Diagnostics;
using System.IO;
using SHS;
public class SCC2 {
private class Frame {
internal Frame parent;
internal int id;
internal uint ctr;
internal Frame(Frame parent, int id, uint[] pastLast) {
this.parent = parent;
this.id = id;
this.ctr = id == 0 ? 0 : pastLast[id - 1];
}
}
private static long[] Flatten(long[][] nbors) {
int c = 0;
for (int i = 0; i < nbors.Length; i++) {
c += nbors[i].Length;
}
long[] res = new long[c];
int p = 0;
for (int i = 0; i < nbors.Length; i++) {
for (int j = 0; j < nbors[i].Length; j++) {
res[p++] = nbors[i][j];
}
}
return res;
}
public static void Main(string[] args) {
if (args.Length != 2) {
Console.Error.WriteLine("Usage: SHS.SCC2 <leader> <store>");
} else {
var sw = Stopwatch.StartNew();
var shs = new Service(args[0]).OpenStore(Guid.Parse(args[1]));
var map = shs.AllocateUidState<int>(); // Mapping from UID to local ID
int numVerts = 0; // Number of core vertices
var batch = new Batch<long>(500000);
foreach (long u in shs.Uids()) {
batch.Add(u);
if (batch.Full || shs.IsLastUid(u)) {
int[] fwdDegs = shs.BatchedGetDegree(batch, Dir.Fwd);
int[] bwdDegs = shs.BatchedGetDegree(batch, Dir.Bwd);
var mapChunk = new int[batch.Count];
for (int i = 0; i < batch.Count; i++) {
mapChunk[i] = fwdDegs[i] == 0 || bwdDegs[i] == 0 ? -1 : numVerts++;
}
map.SetMany(batch, mapChunk);
batch.Reset();
}
}
uint numEdges = 0;
foreach (var up in map.GetAll()) {
if (up.val != -1) batch.Add(up.uid);
if (batch.Full || shs.IsLastUid(up.uid)) {
long[][] nbors = shs.BatchedGetLinks(batch, Dir.Fwd);
int[] mappedNbors = map.GetMany(Flatten(nbors));
int q = 0;
for (int i = 0; i < nbors.Length; i++) {
for (int j = 0; j < nbors[i].Length; j++) {
if (mappedNbors[q++] != -1) numEdges++;
}
}
batch.Reset();
}
}
uint[] pastLast = new uint[numVerts]; // one past last link of that page
var links = new int[numEdges];
int p = 0;
uint r = 0;
foreach (var up in map.GetAll()) {
if (up.val != -1) batch.Add(up.uid);
if (batch.Full || shs.IsLastUid(up.uid)) {
long[][] nbors = shs.BatchedGetLinks(batch, Dir.Fwd);
int[] mappedNbors = map.GetMany(Flatten(nbors));
int q = 0;
for (int i = 0; i < nbors.Length; i++) {
for (int j = 0; j < nbors[i].Length; j++) {
int id = mappedNbors[q++];
if (id != -1) links[r++] = id;
}
pastLast[p++] = r;
}
batch.Reset();
}
}
var bv = new BitVector(numVerts); // All false at creation
int[] stk = new int[numVerts];
int stkPtr = stk.Length;
for (int u = 0; u < numVerts; u++) {
if (!bv[u]) {
bv[u] = true;
Frame frame = new Frame(null, u, pastLast);
while (frame != null) {
while (frame.ctr < pastLast[frame.id]) {
int v = links[frame.ctr++];
if (!bv[v]) {
bv[v] = true;
frame = new Frame(frame, v, pastLast);
}
}
stk[--stkPtr] = frame.id;
frame = frame.parent;
}
}
}
p = 0;
r = 0;
foreach (var up in map.GetAll()) {
if (up.val != -1) batch.Add(up.uid);
if (batch.Full || shs.IsLastUid(up.uid)) {
long[][] nbors = shs.BatchedGetLinks(batch, Dir.Bwd);
int[] mappedNbors = map.GetMany(Flatten(nbors));
int q = 0;
for (int i = 0; i < nbors.Length; i++) {
for (int j = 0; j < nbors[i].Length; j++) {
int id = mappedNbors[q++];
if (id != -1) links[r++] = id;
}
pastLast[p++] = r;
}
batch.Reset();
}
}
var pam = new long[numVerts];
p = 0;
foreach (var up in map.GetAll()) {
if (up.val != -1) pam[p++] = up.uid;
}
using (var sccWr = new BinaryWriter(new BufferedStream(new FileStream("scc-main.bin", FileMode.Create, FileAccess.Write)))) {
using (var idxWr = new BinaryWriter(new BufferedStream(new FileStream("scc-index.bin", FileMode.Create, FileAccess.Write)))) {
long sccPos = 0;
bv.SetAll(false);
for (int i = 0; i < stk.Length; i++) {
int u = stk[i];
if (!bv[u]) {
long sccSize = 0;
bv[u] = true;
Frame frame = new Frame(null, u, pastLast);
while (frame != null) {
while (frame.ctr < pastLast[frame.id]) {
int v = links[frame.ctr++];
if (!bv[v]) {
bv[v] = true;
frame = new Frame(frame, v, pastLast);
}
}
sccWr.Write(pam[frame.id]);
sccSize++;
frame = frame.parent;
}
idxWr.Write(sccSize);
idxWr.Write(sccPos);
sccPos += sccSize;
}
}
foreach (var up in map.GetAll()) {
if (up.val == -1) {
sccWr.Write(up.uid);
idxWr.Write(1L);
idxWr.Write(sccPos++);
}
}
}
}
var dict = new System.Collections.Generic.Dictionary<long, long>();
using (var ib = new BinaryReader(new BufferedStream(new FileStream("scc-index.bin", FileMode.Open, FileAccess.Read)))) {
while (true) {
try {
long size = ib.ReadInt64();
long pos = ib.ReadInt64();
if (!dict.ContainsKey(size)) dict[size] = 0;
dict[size]++;
} catch (EndOfStreamException) {
break;
}
}
}
long maxSize = 0;
long numSCCs = 0;
foreach (var kv in dict) {
if (kv.Key > maxSize) maxSize = kv.Key;
numSCCs += kv.Value;
}
Console.WriteLine("Done. Job took {0} seconds.", 0.001 * sw.ElapsedMilliseconds);
}
}
}
| |
/********************************************************
* ADO.NET 2.0 Data Provider for SQLite Version 3.X
* Written by Robert Simpson (robert@blackcastlesoft.com)
*
* Released to the public domain, use at your own risk!
********************************************************/
namespace Mono.Data.Sqlite
{
using System;
using System.Data;
using System.Runtime.InteropServices;
using System.Collections.Generic;
using System.Globalization;
/// <summary>
/// This class implements SQLiteBase completely, and is the guts of the code that interop's SQLite with .NET
/// </summary>
internal class SQLite3 : SQLiteBase
{
/// <summary>
/// The opaque pointer returned to us by the sqlite provider
/// </summary>
protected SqliteConnectionHandle _sql;
protected string _fileName;
protected bool _usePool;
protected int _poolVersion = 0;
#if !PLATFORM_COMPACTFRAMEWORK
private bool _buildingSchema = false;
#endif
#if MONOTOUCH
GCHandle gch;
#endif
/// <summary>
/// The user-defined functions registered on this connection
/// </summary>
protected SqliteFunction[] _functionsArray;
internal SQLite3(SQLiteDateFormats fmt)
: base(fmt)
{
#if MONOTOUCH
gch = GCHandle.Alloc (this);
#endif
}
protected override void Dispose(bool bDisposing)
{
if (bDisposing)
Close();
}
// It isn't necessary to cleanup any functions we've registered. If the connection
// goes to the pool and is resurrected later, re-registered functions will overwrite the
// previous functions. The SqliteFunctionCookieHandle will take care of freeing unmanaged
// resources belonging to the previously-registered functions.
internal override void Close()
{
if (_sql != null)
{
if (_usePool)
{
SQLiteBase.ResetConnection(_sql);
SqliteConnectionPool.Add(_fileName, _sql, _poolVersion);
}
else
_sql.Dispose();
}
_sql = null;
#if MONOTOUCH
if (gch.IsAllocated)
gch.Free ();
#endif
}
internal override void Cancel()
{
UnsafeNativeMethods.sqlite3_interrupt(_sql);
}
internal override string Version
{
get
{
return SQLite3.SQLiteVersion;
}
}
internal static string SQLiteVersion
{
get
{
return UTF8ToString(UnsafeNativeMethods.sqlite3_libversion(), -1);
}
}
internal override int Changes
{
get
{
return UnsafeNativeMethods.sqlite3_changes(_sql);
}
}
internal override void Open(string strFilename, SQLiteOpenFlagsEnum flags, int maxPoolSize, bool usePool)
{
if (_sql != null) return;
_usePool = usePool;
if (usePool)
{
_fileName = strFilename;
_sql = SqliteConnectionPool.Remove(strFilename, maxPoolSize, out _poolVersion);
}
if (_sql == null)
{
IntPtr db;
#if !SQLITE_STANDARD
int n = UnsafeNativeMethods.sqlite3_open_interop(ToUTF8(strFilename), (int)flags, out db);
#else
// Compatibility with versions < 3.5.0
int n;
if (UnsafeNativeMethods.use_sqlite3_open_v2) {
n = UnsafeNativeMethods.sqlite3_open_v2(ToUTF8(strFilename), out db, (int)flags, IntPtr.Zero);
} else {
Console.WriteLine ("Your sqlite3 version is old - please upgrade to at least v3.5.0!");
n = UnsafeNativeMethods.sqlite3_open (ToUTF8 (strFilename), out db);
}
#endif
if (n > 0) throw new SqliteException(n, null);
_sql = db;
}
// Bind functions to this connection. If any previous functions of the same name
// were already bound, then the new bindings replace the old.
_functionsArray = SqliteFunction.BindFunctions(this);
SetTimeout(0);
}
internal override void ClearPool()
{
SqliteConnectionPool.ClearPool(_fileName);
}
internal override void SetTimeout(int nTimeoutMS)
{
int n = UnsafeNativeMethods.sqlite3_busy_timeout(_sql, nTimeoutMS);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override bool Step(SqliteStatement stmt)
{
int n;
Random rnd = null;
uint starttick = (uint)Environment.TickCount;
uint timeout = (uint)(stmt._command._commandTimeout * 1000);
while (true)
{
n = UnsafeNativeMethods.sqlite3_step(stmt._sqlite_stmt);
if (n == 100) return true;
if (n == 101) return false;
if (n > 0)
{
int r;
// An error occurred, attempt to reset the statement. If the reset worked because the
// schema has changed, re-try the step again. If it errored our because the database
// is locked, then keep retrying until the command timeout occurs.
r = Reset(stmt);
if (r == 0)
throw new SqliteException(n, SQLiteLastError());
else if ((r == 6 || r == 5) && stmt._command != null) // SQLITE_LOCKED || SQLITE_BUSY
{
// Keep trying
if (rnd == null) // First time we've encountered the lock
rnd = new Random();
// If we've exceeded the command's timeout, give up and throw an error
if ((uint)Environment.TickCount - starttick > timeout)
{
throw new SqliteException(r, SQLiteLastError());
}
else
{
// Otherwise sleep for a random amount of time up to 150ms
System.Threading.Thread.CurrentThread.Join(rnd.Next(1, 150));
}
}
}
}
}
internal override int Reset(SqliteStatement stmt)
{
int n;
#if !SQLITE_STANDARD
n = UnsafeNativeMethods.sqlite3_reset_interop(stmt._sqlite_stmt);
#else
n = UnsafeNativeMethods.sqlite3_reset(stmt._sqlite_stmt);
#endif
// If the schema changed, try and re-prepare it
if (n == 17) // SQLITE_SCHEMA
{
// Recreate a dummy statement
string str;
using (SqliteStatement tmp = Prepare(null, stmt._sqlStatement, null, (uint)(stmt._command._commandTimeout * 1000), out str))
{
// Finalize the existing statement
stmt._sqlite_stmt.Dispose();
// Reassign a new statement pointer to the old statement and clear the temporary one
stmt._sqlite_stmt = tmp._sqlite_stmt;
tmp._sqlite_stmt = null;
// Reapply parameters
stmt.BindParameters();
}
return -1; // Reset was OK, with schema change
}
else if (n == 6 || n == 5) // SQLITE_LOCKED || SQLITE_BUSY
return n;
if (n > 0)
throw new SqliteException(n, SQLiteLastError());
return 0; // We reset OK, no schema changes
}
internal override string SQLiteLastError()
{
return SQLiteBase.SQLiteLastError(_sql);
}
internal override SqliteStatement Prepare(SqliteConnection cnn, string strSql, SqliteStatement previous, uint timeoutMS, out string strRemain)
{
IntPtr stmt = IntPtr.Zero;
IntPtr ptr = IntPtr.Zero;
int len = 0;
int n = 17;
int retries = 0;
byte[] b = ToUTF8(strSql);
string typedefs = null;
SqliteStatement cmd = null;
Random rnd = null;
uint starttick = (uint)Environment.TickCount;
GCHandle handle = GCHandle.Alloc(b, GCHandleType.Pinned);
IntPtr psql = handle.AddrOfPinnedObject();
try
{
while ((n == 17 || n == 6 || n == 5) && retries < 3)
{
#if !SQLITE_STANDARD
n = UnsafeNativeMethods.sqlite3_prepare_interop(_sql, psql, b.Length - 1, out stmt, out ptr, out len);
#else
n = UnsafeNativeMethods.sqlite3_prepare(_sql, psql, b.Length - 1, out stmt, out ptr);
len = -1;
#endif
if (n == 17)
retries++;
else if (n == 1)
{
if (String.Compare(SQLiteLastError(), "near \"TYPES\": syntax error", StringComparison.OrdinalIgnoreCase) == 0)
{
int pos = strSql.IndexOf(';');
if (pos == -1) pos = strSql.Length - 1;
typedefs = strSql.Substring(0, pos + 1);
strSql = strSql.Substring(pos + 1);
strRemain = "";
while (cmd == null && strSql.Length > 0)
{
cmd = Prepare(cnn, strSql, previous, timeoutMS, out strRemain);
strSql = strRemain;
}
if (cmd != null)
cmd.SetTypes(typedefs);
return cmd;
}
#if !PLATFORM_COMPACTFRAMEWORK
else if (_buildingSchema == false && String.Compare(SQLiteLastError(), 0, "no such table: TEMP.SCHEMA", 0, 26, StringComparison.OrdinalIgnoreCase) == 0)
{
strRemain = "";
_buildingSchema = true;
try
{
ISQLiteSchemaExtensions ext = ((IServiceProvider)SqliteFactory.Instance).GetService(typeof(ISQLiteSchemaExtensions)) as ISQLiteSchemaExtensions;
if (ext != null)
ext.BuildTempSchema(cnn);
while (cmd == null && strSql.Length > 0)
{
cmd = Prepare(cnn, strSql, previous, timeoutMS, out strRemain);
strSql = strRemain;
}
return cmd;
}
finally
{
_buildingSchema = false;
}
}
#endif
}
else if (n == 6 || n == 5) // Locked -- delay a small amount before retrying
{
// Keep trying
if (rnd == null) // First time we've encountered the lock
rnd = new Random();
// If we've exceeded the command's timeout, give up and throw an error
if ((uint)Environment.TickCount - starttick > timeoutMS)
{
throw new SqliteException(n, SQLiteLastError());
}
else
{
// Otherwise sleep for a random amount of time up to 150ms
System.Threading.Thread.CurrentThread.Join(rnd.Next(1, 150));
}
}
}
if (n > 0) throw new SqliteException(n, SQLiteLastError());
strRemain = UTF8ToString(ptr, len);
if (stmt != IntPtr.Zero) cmd = new SqliteStatement(this, stmt, strSql.Substring(0, strSql.Length - strRemain.Length), previous);
return cmd;
}
finally
{
handle.Free();
}
}
internal override void Bind_Double(SqliteStatement stmt, int index, double value)
{
#if !PLATFORM_COMPACTFRAMEWORK
int n = UnsafeNativeMethods.sqlite3_bind_double(stmt._sqlite_stmt, index, value);
#else
int n = UnsafeNativeMethods.sqlite3_bind_double_interop(stmt._sqlite_stmt, index, ref value);
#endif
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_Int32(SqliteStatement stmt, int index, int value)
{
int n = UnsafeNativeMethods.sqlite3_bind_int(stmt._sqlite_stmt, index, value);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_Int64(SqliteStatement stmt, int index, long value)
{
#if !PLATFORM_COMPACTFRAMEWORK
int n = UnsafeNativeMethods.sqlite3_bind_int64(stmt._sqlite_stmt, index, value);
#else
int n = UnsafeNativeMethods.sqlite3_bind_int64_interop(stmt._sqlite_stmt, index, ref value);
#endif
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_Text(SqliteStatement stmt, int index, string value)
{
byte[] b = ToUTF8(value);
int n = UnsafeNativeMethods.sqlite3_bind_text(stmt._sqlite_stmt, index, b, b.Length - 1, (IntPtr)(-1));
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_DateTime(SqliteStatement stmt, int index, DateTime dt)
{
byte[] b = ToUTF8(dt);
int n = UnsafeNativeMethods.sqlite3_bind_text(stmt._sqlite_stmt, index, b, b.Length - 1, (IntPtr)(-1));
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_Blob(SqliteStatement stmt, int index, byte[] blobData)
{
int n = UnsafeNativeMethods.sqlite3_bind_blob(stmt._sqlite_stmt, index, blobData, blobData.Length, (IntPtr)(-1));
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void Bind_Null(SqliteStatement stmt, int index)
{
int n = UnsafeNativeMethods.sqlite3_bind_null(stmt._sqlite_stmt, index);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override int Bind_ParamCount(SqliteStatement stmt)
{
return UnsafeNativeMethods.sqlite3_bind_parameter_count(stmt._sqlite_stmt);
}
internal override string Bind_ParamName(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_bind_parameter_name_interop(stmt._sqlite_stmt, index, out len), len);
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_bind_parameter_name(stmt._sqlite_stmt, index), -1);
#endif
}
internal override int Bind_ParamIndex(SqliteStatement stmt, string paramName)
{
return UnsafeNativeMethods.sqlite3_bind_parameter_index(stmt._sqlite_stmt, ToUTF8(paramName));
}
internal override int ColumnCount(SqliteStatement stmt)
{
return UnsafeNativeMethods.sqlite3_column_count(stmt._sqlite_stmt);
}
internal override string ColumnName(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_name_interop(stmt._sqlite_stmt, index, out len), len);
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_name(stmt._sqlite_stmt, index), -1);
#endif
}
internal override TypeAffinity ColumnAffinity(SqliteStatement stmt, int index)
{
return UnsafeNativeMethods.sqlite3_column_type(stmt._sqlite_stmt, index);
}
internal override string ColumnType(SqliteStatement stmt, int index, out TypeAffinity nAffinity)
{
int len;
#if !SQLITE_STANDARD
IntPtr p = UnsafeNativeMethods.sqlite3_column_decltype_interop(stmt._sqlite_stmt, index, out len);
#else
len = -1;
IntPtr p = UnsafeNativeMethods.sqlite3_column_decltype(stmt._sqlite_stmt, index);
#endif
nAffinity = ColumnAffinity(stmt, index);
if (p != IntPtr.Zero) return UTF8ToString(p, len);
else
{
string[] ar = stmt.TypeDefinitions;
if (ar != null)
{
if (index < ar.Length && ar[index] != null)
return ar[index];
}
return String.Empty;
//switch (nAffinity)
//{
// case TypeAffinity.Int64:
// return "BIGINT";
// case TypeAffinity.Double:
// return "DOUBLE";
// case TypeAffinity.Blob:
// return "BLOB";
// default:
// return "TEXT";
//}
}
}
internal override int ColumnIndex(SqliteStatement stmt, string columnName)
{
int x = ColumnCount(stmt);
for (int n = 0; n < x; n++)
{
if (String.Compare(columnName, ColumnName(stmt, n), true, CultureInfo.InvariantCulture) == 0)
return n;
}
return -1;
}
internal override string ColumnOriginalName(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_origin_name_interop(stmt._sqlite_stmt, index, out len), len);
#elif MONOTOUCH
throw new NotImplementedException ();
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_origin_name(stmt._sqlite_stmt, index), -1);
#endif
}
internal override string ColumnDatabaseName(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_database_name_interop(stmt._sqlite_stmt, index, out len), len);
#elif MONOTOUCH
throw new NotImplementedException ();
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_database_name(stmt._sqlite_stmt, index), -1);
#endif
}
internal override string ColumnTableName(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_table_name_interop(stmt._sqlite_stmt, index, out len), len);
#elif MONOTOUCH
throw new NotImplementedException ();
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_table_name(stmt._sqlite_stmt, index), -1);
#endif
}
internal override void ColumnMetaData(string dataBase, string table, string column, out string dataType, out string collateSequence, out bool notNull, out bool primaryKey, out bool autoIncrement)
{
IntPtr dataTypePtr;
IntPtr collSeqPtr;
int nnotNull;
int nprimaryKey;
int nautoInc;
int n;
int dtLen;
int csLen;
#if !SQLITE_STANDARD
n = UnsafeNativeMethods.sqlite3_table_column_metadata_interop(_sql, ToUTF8(dataBase), ToUTF8(table), ToUTF8(column), out dataTypePtr, out collSeqPtr, out nnotNull, out nprimaryKey, out nautoInc, out dtLen, out csLen);
#else
dtLen = -1;
csLen = -1;
n = UnsafeNativeMethods.sqlite3_table_column_metadata(_sql, ToUTF8(dataBase), ToUTF8(table), ToUTF8(column), out dataTypePtr, out collSeqPtr, out nnotNull, out nprimaryKey, out nautoInc);
#endif
if (n > 0) throw new SqliteException(n, SQLiteLastError());
dataType = UTF8ToString(dataTypePtr, dtLen);
collateSequence = UTF8ToString(collSeqPtr, csLen);
notNull = (nnotNull == 1);
primaryKey = (nprimaryKey == 1);
autoIncrement = (nautoInc == 1);
}
internal override double GetDouble(SqliteStatement stmt, int index)
{
double value;
#if !PLATFORM_COMPACTFRAMEWORK
value = UnsafeNativeMethods.sqlite3_column_double(stmt._sqlite_stmt, index);
#else
UnsafeNativeMethods.sqlite3_column_double_interop(stmt._sqlite_stmt, index, out value);
#endif
return value;
}
internal override int GetInt32(SqliteStatement stmt, int index)
{
return UnsafeNativeMethods.sqlite3_column_int(stmt._sqlite_stmt, index);
}
internal override long GetInt64(SqliteStatement stmt, int index)
{
long value;
#if !PLATFORM_COMPACTFRAMEWORK
value = UnsafeNativeMethods.sqlite3_column_int64(stmt._sqlite_stmt, index);
#else
UnsafeNativeMethods.sqlite3_column_int64_interop(stmt._sqlite_stmt, index, out value);
#endif
return value;
}
internal override string GetText(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_text_interop(stmt._sqlite_stmt, index, out len), len);
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_column_text(stmt._sqlite_stmt, index), -1);
#endif
}
internal override DateTime GetDateTime(SqliteStatement stmt, int index)
{
#if !SQLITE_STANDARD
int len;
return ToDateTime(UnsafeNativeMethods.sqlite3_column_text_interop(stmt._sqlite_stmt, index, out len), len);
#else
return ToDateTime(UnsafeNativeMethods.sqlite3_column_text(stmt._sqlite_stmt, index), -1);
#endif
}
internal override long GetBytes(SqliteStatement stmt, int index, int nDataOffset, byte[] bDest, int nStart, int nLength)
{
IntPtr ptr;
int nlen;
int nCopied = nLength;
nlen = UnsafeNativeMethods.sqlite3_column_bytes(stmt._sqlite_stmt, index);
ptr = UnsafeNativeMethods.sqlite3_column_blob(stmt._sqlite_stmt, index);
if (bDest == null) return nlen;
if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart;
if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset;
unsafe {
if (nCopied > 0)
Marshal.Copy((IntPtr)((byte*)ptr + nDataOffset), bDest, nStart, nCopied);
else nCopied = 0;
}
return nCopied;
}
internal override long GetChars(SqliteStatement stmt, int index, int nDataOffset, char[] bDest, int nStart, int nLength)
{
int nlen;
int nCopied = nLength;
string str = GetText(stmt, index);
nlen = str.Length;
if (bDest == null) return nlen;
if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart;
if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset;
if (nCopied > 0)
str.CopyTo(nDataOffset, bDest, nStart, nCopied);
else nCopied = 0;
return nCopied;
}
internal override bool IsNull(SqliteStatement stmt, int index)
{
return (ColumnAffinity(stmt, index) == TypeAffinity.Null);
}
internal override int AggregateCount(IntPtr context)
{
return UnsafeNativeMethods.sqlite3_aggregate_count(context);
}
#if MONOTOUCH
class FunctionData {
public SQLiteCallback Func;
public SQLiteCallback FuncStep;
public SQLiteFinalCallback FuncFinal;
}
#endif
internal override void CreateFunction(string strFunction, int nArgs, bool needCollSeq, SQLiteCallback func, SQLiteCallback funcstep, SQLiteFinalCallback funcfinal)
{
int n;
#if MONOTOUCH
var data = new FunctionData();
data.Func = func;
data.FuncStep = funcstep;
data.FuncFinal = funcfinal;
SQLiteCallback func_callback = func == null ? null : new SQLiteCallback(scalar_callback);
SQLiteCallback funcstep_callback = funcstep == null ? null : new SQLiteCallback(step_callback);
SQLiteFinalCallback funcfinal_callback = funcfinal == null ? null : new SQLiteFinalCallback(final_callback);
IntPtr user_data;
user_data = GCHandle.ToIntPtr(GCHandle.Alloc(data));
n = UnsafeNativeMethods.sqlite3_create_function_v2(_sql, ToUTF8(strFunction), nArgs, 4, user_data, func_callback, funcstep_callback, funcfinal_callback, destroy_callback);
if (n == 0) {
// sqlite3_create_function_v2 will call 'destroy_callback' if it fails, so we need to recreate the gchandle here.
user_data = GCHandle.ToIntPtr(GCHandle.Alloc(data));
n = UnsafeNativeMethods.sqlite3_create_function_v2(_sql, ToUTF8(strFunction), nArgs, 1, user_data, func_callback, funcstep_callback, funcfinal_callback, destroy_callback);
}
#elif !SQLITE_STANDARD
n = UnsafeNativeMethods.sqlite3_create_function_interop(_sql, ToUTF8(strFunction), nArgs, 4, IntPtr.Zero, func, funcstep, funcfinal, (needCollSeq == true) ? 1 : 0);
if (n == 0) n = UnsafeNativeMethods.sqlite3_create_function_interop(_sql, ToUTF8(strFunction), nArgs, 1, IntPtr.Zero, func, funcstep, funcfinal, (needCollSeq == true) ? 1 : 0);
#else
n = UnsafeNativeMethods.sqlite3_create_function(_sql, ToUTF8(strFunction), nArgs, 4, IntPtr.Zero, func, funcstep, funcfinal);
if (n == 0) n = UnsafeNativeMethods.sqlite3_create_function(_sql, ToUTF8(strFunction), nArgs, 1, IntPtr.Zero, func, funcstep, funcfinal);
#endif
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void CreateCollation(string strCollation, SQLiteCollation func, SQLiteCollation func16, IntPtr user_data)
{
int n = UnsafeNativeMethods.sqlite3_create_collation(_sql, ToUTF8(strCollation), 2, user_data, func16);
if (n == 0) UnsafeNativeMethods.sqlite3_create_collation(_sql, ToUTF8(strCollation), 1, user_data, func);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
#if MONOTOUCH
[MonoTouch.MonoPInvokeCallback(typeof(SQLiteCallback))]
internal static void scalar_callback(IntPtr context, int nArgs, IntPtr argsptr)
{
var handle = GCHandle.FromIntPtr (UnsafeNativeMethods.sqlite3_user_data(context));
var func = (FunctionData)handle.Target;
func.Func(context, nArgs, argsptr);
}
[MonoTouch.MonoPInvokeCallback(typeof(SQLiteCallback))]
internal static void step_callback(IntPtr context, int nArgs, IntPtr argsptr)
{
var handle = GCHandle.FromIntPtr(UnsafeNativeMethods.sqlite3_user_data(context));
var func = (FunctionData)handle.Target;
func.FuncStep(context, nArgs, argsptr);
}
[MonoTouch.MonoPInvokeCallback(typeof(SQLiteFinalCallback))]
internal static void final_callback(IntPtr context)
{
var handle = GCHandle.FromIntPtr(UnsafeNativeMethods.sqlite3_user_data(context));
var func = (FunctionData)handle.Target;
func.FuncFinal(context);
}
[MonoTouch.MonoPInvokeCallback(typeof(SQLiteFinalCallback))]
internal static void destroy_callback(IntPtr context)
{
GCHandle.FromIntPtr(context).Free();
}
#endif
internal override int ContextCollateCompare(CollationEncodingEnum enc, IntPtr context, string s1, string s2)
{
#if !SQLITE_STANDARD
byte[] b1;
byte[] b2;
System.Text.Encoding converter = null;
switch (enc)
{
case CollationEncodingEnum.UTF8:
converter = System.Text.Encoding.UTF8;
break;
case CollationEncodingEnum.UTF16LE:
converter = System.Text.Encoding.Unicode;
break;
case CollationEncodingEnum.UTF16BE:
converter = System.Text.Encoding.BigEndianUnicode;
break;
}
b1 = converter.GetBytes(s1);
b2 = converter.GetBytes(s2);
return UnsafeNativeMethods.sqlite3_context_collcompare(context, b1, b1.Length, b2, b2.Length);
#else
throw new NotImplementedException();
#endif
}
internal override int ContextCollateCompare(CollationEncodingEnum enc, IntPtr context, char[] c1, char[] c2)
{
#if !SQLITE_STANDARD
byte[] b1;
byte[] b2;
System.Text.Encoding converter = null;
switch (enc)
{
case CollationEncodingEnum.UTF8:
converter = System.Text.Encoding.UTF8;
break;
case CollationEncodingEnum.UTF16LE:
converter = System.Text.Encoding.Unicode;
break;
case CollationEncodingEnum.UTF16BE:
converter = System.Text.Encoding.BigEndianUnicode;
break;
}
b1 = converter.GetBytes(c1);
b2 = converter.GetBytes(c2);
return UnsafeNativeMethods.sqlite3_context_collcompare(context, b1, b1.Length, b2, b2.Length);
#else
throw new NotImplementedException();
#endif
}
internal override CollationSequence GetCollationSequence(SqliteFunction func, IntPtr context)
{
#if !SQLITE_STANDARD
CollationSequence seq = new CollationSequence();
int len;
int type;
int enc;
IntPtr p = UnsafeNativeMethods.sqlite3_context_collseq(context, out type, out enc, out len);
if (p != null) seq.Name = UTF8ToString(p, len);
seq.Type = (CollationTypeEnum)type;
seq._func = func;
seq.Encoding = (CollationEncodingEnum)enc;
return seq;
#else
throw new NotImplementedException();
#endif
}
internal override long GetParamValueBytes(IntPtr p, int nDataOffset, byte[] bDest, int nStart, int nLength)
{
IntPtr ptr;
int nlen;
int nCopied = nLength;
nlen = UnsafeNativeMethods.sqlite3_value_bytes(p);
ptr = UnsafeNativeMethods.sqlite3_value_blob(p);
if (bDest == null) return nlen;
if (nCopied + nStart > bDest.Length) nCopied = bDest.Length - nStart;
if (nCopied + nDataOffset > nlen) nCopied = nlen - nDataOffset;
unsafe {
if (nCopied > 0)
Marshal.Copy((IntPtr)((byte*)ptr + nDataOffset), bDest, nStart, nCopied);
else nCopied = 0;
}
return nCopied;
}
internal override double GetParamValueDouble(IntPtr ptr)
{
double value;
#if !PLATFORM_COMPACTFRAMEWORK
value = UnsafeNativeMethods.sqlite3_value_double(ptr);
#else
UnsafeNativeMethods.sqlite3_value_double_interop(ptr, out value);
#endif
return value;
}
internal override int GetParamValueInt32(IntPtr ptr)
{
return UnsafeNativeMethods.sqlite3_value_int(ptr);
}
internal override long GetParamValueInt64(IntPtr ptr)
{
Int64 value;
#if !PLATFORM_COMPACTFRAMEWORK
value = UnsafeNativeMethods.sqlite3_value_int64(ptr);
#else
UnsafeNativeMethods.sqlite3_value_int64_interop(ptr, out value);
#endif
return value;
}
internal override string GetParamValueText(IntPtr ptr)
{
#if !SQLITE_STANDARD
int len;
return UTF8ToString(UnsafeNativeMethods.sqlite3_value_text_interop(ptr, out len), len);
#else
return UTF8ToString(UnsafeNativeMethods.sqlite3_value_text(ptr), -1);
#endif
}
internal override TypeAffinity GetParamValueType(IntPtr ptr)
{
return UnsafeNativeMethods.sqlite3_value_type(ptr);
}
internal override void ReturnBlob(IntPtr context, byte[] value)
{
UnsafeNativeMethods.sqlite3_result_blob(context, value, value.Length, (IntPtr)(-1));
}
internal override void ReturnDouble(IntPtr context, double value)
{
#if !PLATFORM_COMPACTFRAMEWORK
UnsafeNativeMethods.sqlite3_result_double(context, value);
#else
UnsafeNativeMethods.sqlite3_result_double_interop(context, ref value);
#endif
}
internal override void ReturnError(IntPtr context, string value)
{
UnsafeNativeMethods.sqlite3_result_error(context, ToUTF8(value), value.Length);
}
internal override void ReturnInt32(IntPtr context, int value)
{
UnsafeNativeMethods.sqlite3_result_int(context, value);
}
internal override void ReturnInt64(IntPtr context, long value)
{
#if !PLATFORM_COMPACTFRAMEWORK
UnsafeNativeMethods.sqlite3_result_int64(context, value);
#else
UnsafeNativeMethods.sqlite3_result_int64_interop(context, ref value);
#endif
}
internal override void ReturnNull(IntPtr context)
{
UnsafeNativeMethods.sqlite3_result_null(context);
}
internal override void ReturnText(IntPtr context, string value)
{
byte[] b = ToUTF8(value);
UnsafeNativeMethods.sqlite3_result_text(context, ToUTF8(value), b.Length - 1, (IntPtr)(-1));
}
internal override IntPtr AggregateContext(IntPtr context)
{
return UnsafeNativeMethods.sqlite3_aggregate_context(context, 1);
}
#if MONOTOUCH
internal override void SetPassword(byte[] passwordBytes)
{
throw new NotImplementedException ();
}
internal override void ChangePassword(byte[] newPasswordBytes)
{
throw new NotImplementedException ();
}
#else
internal override void SetPassword(byte[] passwordBytes)
{
int n = UnsafeNativeMethods.sqlite3_key(_sql, passwordBytes, passwordBytes.Length);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
internal override void ChangePassword(byte[] newPasswordBytes)
{
int n = UnsafeNativeMethods.sqlite3_rekey(_sql, newPasswordBytes, (newPasswordBytes == null) ? 0 : newPasswordBytes.Length);
if (n > 0) throw new SqliteException(n, SQLiteLastError());
}
#endif
#if MONOTOUCH
SQLiteUpdateCallback update_callback;
SQLiteCommitCallback commit_callback;
SQLiteRollbackCallback rollback_callback;
[MonoTouch.MonoPInvokeCallback (typeof (SQLiteUpdateCallback))]
static void update (IntPtr puser, int type, IntPtr database, IntPtr table, Int64 rowid)
{
SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3;
instance.update_callback (puser, type, database, table, rowid);
}
internal override void SetUpdateHook (SQLiteUpdateCallback func)
{
update_callback = func;
if (func == null)
UnsafeNativeMethods.sqlite3_update_hook (_sql, null, IntPtr.Zero);
else
UnsafeNativeMethods.sqlite3_update_hook (_sql, update, GCHandle.ToIntPtr (gch));
}
[MonoTouch.MonoPInvokeCallback (typeof (SQLiteCommitCallback))]
static int commit (IntPtr puser)
{
SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3;
return instance.commit_callback (puser);
}
internal override void SetCommitHook (SQLiteCommitCallback func)
{
commit_callback = func;
if (func == null)
UnsafeNativeMethods.sqlite3_commit_hook (_sql, null, IntPtr.Zero);
else
UnsafeNativeMethods.sqlite3_commit_hook (_sql, commit, GCHandle.ToIntPtr (gch));
}
[MonoTouch.MonoPInvokeCallback (typeof (SQLiteRollbackCallback))]
static void rollback (IntPtr puser)
{
SQLite3 instance = GCHandle.FromIntPtr (puser).Target as SQLite3;
instance.rollback_callback (puser);
}
internal override void SetRollbackHook (SQLiteRollbackCallback func)
{
rollback_callback = func;
if (func == null)
UnsafeNativeMethods.sqlite3_rollback_hook (_sql, null, IntPtr.Zero);
else
UnsafeNativeMethods.sqlite3_rollback_hook (_sql, rollback, GCHandle.ToIntPtr (gch));
}
#else
internal override void SetUpdateHook(SQLiteUpdateCallback func)
{
UnsafeNativeMethods.sqlite3_update_hook(_sql, func, IntPtr.Zero);
}
internal override void SetCommitHook(SQLiteCommitCallback func)
{
UnsafeNativeMethods.sqlite3_commit_hook(_sql, func, IntPtr.Zero);
}
internal override void SetRollbackHook(SQLiteRollbackCallback func)
{
UnsafeNativeMethods.sqlite3_rollback_hook(_sql, func, IntPtr.Zero);
}
#endif
/// <summary>
/// Helper function to retrieve a column of data from an active statement.
/// </summary>
/// <param name="stmt">The statement being step()'d through</param>
/// <param name="index">The column index to retrieve</param>
/// <param name="typ">The type of data contained in the column. If Uninitialized, this function will retrieve the datatype information.</param>
/// <returns>Returns the data in the column</returns>
internal override object GetValue(SqliteStatement stmt, int index, SQLiteType typ)
{
if (IsNull(stmt, index)) return DBNull.Value;
TypeAffinity aff = typ.Affinity;
Type t = null;
if (typ.Type != DbType.Object)
{
t = SqliteConvert.SQLiteTypeToType(typ);
aff = TypeToAffinity(t);
}
switch (aff)
{
case TypeAffinity.Blob:
if (typ.Type == DbType.Guid && typ.Affinity == TypeAffinity.Text)
return new Guid(GetText(stmt, index));
int n = (int)GetBytes(stmt, index, 0, null, 0, 0);
byte[] b = new byte[n];
GetBytes(stmt, index, 0, b, 0, n);
if (typ.Type == DbType.Guid && n == 16)
return new Guid(b);
return b;
case TypeAffinity.DateTime:
return GetDateTime(stmt, index);
case TypeAffinity.Double:
if (t == null) return GetDouble(stmt, index);
else
return Convert.ChangeType(GetDouble(stmt, index), t, null);
case TypeAffinity.Int64:
if (t == null) return GetInt64(stmt, index);
else
return Convert.ChangeType(GetInt64(stmt, index), t, null);
default:
return GetText(stmt, index);
}
}
internal override int GetCursorForTable(SqliteStatement stmt, int db, int rootPage)
{
#if !SQLITE_STANDARD
return UnsafeNativeMethods.sqlite3_table_cursor(stmt._sqlite_stmt, db, rootPage);
#else
return -1;
#endif
}
internal override long GetRowIdForCursor(SqliteStatement stmt, int cursor)
{
#if !SQLITE_STANDARD
long rowid;
int rc = UnsafeNativeMethods.sqlite3_cursor_rowid(stmt._sqlite_stmt, cursor, out rowid);
if (rc == 0) return rowid;
return 0;
#else
return 0;
#endif
}
internal override void GetIndexColumnExtendedInfo(string database, string index, string column, out int sortMode, out int onError, out string collationSequence)
{
#if !SQLITE_STANDARD
IntPtr coll;
int colllen;
int rc;
rc = UnsafeNativeMethods.sqlite3_index_column_info_interop(_sql, ToUTF8(database), ToUTF8(index), ToUTF8(column), out sortMode, out onError, out coll, out colllen);
if (rc != 0) throw new SqliteException(rc, "");
collationSequence = UTF8ToString(coll, colllen);
#else
sortMode = 0;
onError = 2;
collationSequence = "BINARY";
#endif
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type GroupEventsCollectionRequest.
/// </summary>
public partial class GroupEventsCollectionRequest : BaseRequest, IGroupEventsCollectionRequest
{
/// <summary>
/// Constructs a new GroupEventsCollectionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public GroupEventsCollectionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Adds the specified Event to the collection via POST.
/// </summary>
/// <param name="eventsEvent">The Event to add.</param>
/// <returns>The created Event.</returns>
public System.Threading.Tasks.Task<Event> AddAsync(Event eventsEvent)
{
return this.AddAsync(eventsEvent, CancellationToken.None);
}
/// <summary>
/// Adds the specified Event to the collection via POST.
/// </summary>
/// <param name="eventsEvent">The Event to add.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created Event.</returns>
public System.Threading.Tasks.Task<Event> AddAsync(Event eventsEvent, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
return this.SendAsync<Event>(eventsEvent, cancellationToken);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <returns>The collection page.</returns>
public System.Threading.Tasks.Task<IGroupEventsCollectionPage> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The collection page.</returns>
public async System.Threading.Tasks.Task<IGroupEventsCollectionPage> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var response = await this.SendAsync<GroupEventsCollectionResponse>(null, cancellationToken).ConfigureAwait(false);
if (response != null && response.Value != null && response.Value.CurrentPage != null)
{
if (response.AdditionalData != null)
{
object nextPageLink;
response.AdditionalData.TryGetValue("@odata.nextLink", out nextPageLink);
var nextPageLinkString = nextPageLink as string;
if (!string.IsNullOrEmpty(nextPageLinkString))
{
response.Value.InitializeNextPageRequest(
this.Client,
nextPageLinkString);
}
// Copy the additional data collection to the page itself so that information is not lost
response.Value.AdditionalData = response.AdditionalData;
}
return response.Value;
}
return null;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Expand(Expression<Func<Event, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Select(Expression<Func<Event, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Adds the specified top value to the request.
/// </summary>
/// <param name="value">The top value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Top(int value)
{
this.QueryOptions.Add(new QueryOption("$top", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified filter value to the request.
/// </summary>
/// <param name="value">The filter value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Filter(string value)
{
this.QueryOptions.Add(new QueryOption("$filter", value));
return this;
}
/// <summary>
/// Adds the specified skip value to the request.
/// </summary>
/// <param name="value">The skip value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest Skip(int value)
{
this.QueryOptions.Add(new QueryOption("$skip", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified orderby value to the request.
/// </summary>
/// <param name="value">The orderby value.</param>
/// <returns>The request object to send.</returns>
public IGroupEventsCollectionRequest OrderBy(string value)
{
this.QueryOptions.Add(new QueryOption("$orderby", value));
return this;
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="WSTrust13ResponseSerializer.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//-----------------------------------------------------------------------
namespace System.IdentityModel.Protocols.WSTrust
{
using System.Xml;
/// <summary>
/// Class for serializing a WS-Trust 1.3 RequestSecurityTokenResponse to an XmlWriter
/// </summary>
public class WSTrust13ResponseSerializer : WSTrustResponseSerializer
{
/// <summary>
/// Deserializes an RSTR and returns a RequestSecurityTokenRespone object.
/// </summary>
/// <param name="reader">Reader over the RSTR.</param>
/// <param name="context">Current Serialization context.</param>
/// <returns>RequestSecurityTokenResponse object if deserialization was successful.</returns>
/// <exception cref="ArgumentNullException">The given reader or context parameter is null</exception>
public override RequestSecurityTokenResponse ReadXml(XmlReader reader, WSTrustSerializationContext context)
{
if (reader == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader");
}
if (context == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context");
}
bool isFinal = false;
if (reader.IsStartElement(WSTrust13Constants.ElementNames.RequestSecurityTokenResponseCollection, WSTrust13Constants.NamespaceURI))
{
reader.ReadStartElement(WSTrust13Constants.ElementNames.RequestSecurityTokenResponseCollection, WSTrust13Constants.NamespaceURI);
isFinal = true;
}
RequestSecurityTokenResponse rstr = WSTrustSerializationHelper.CreateResponse(reader, context, this, WSTrustConstantsAdapter.Trust13);
rstr.IsFinal = isFinal;
if (isFinal)
{
reader.ReadEndElement();
}
return rstr;
}
/// <summary>
/// Override of the base class that Reads a specific child element inside the RSTR.
/// </summary>
/// <param name="reader">Reader pointing at an element to read inside the RSTR.</param>
/// <param name="rstr">The RequestSecurityTokenResponse element that is being populated from the reader.</param>
/// <param name="context">Current Serialization context.</param>
/// <exception cref="ArgumentNullException">Either reader or rstr or context parameter is null.</exception>
/// <exception cref="WSTrustSerializationException">Unable to deserialize the current parameter.</exception>
public override void ReadXmlElement(XmlReader reader, RequestSecurityTokenResponse rstr, WSTrustSerializationContext context)
{
if (reader == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader");
}
if (rstr == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("rstr");
}
if (context == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context");
}
if (reader.IsStartElement(WSTrust13Constants.ElementNames.KeyWrapAlgorithm, WSTrust13Constants.NamespaceURI))
{
rstr.KeyWrapAlgorithm = reader.ReadElementContentAsString();
return;
}
WSTrustSerializationHelper.ReadRSTRXml(reader, rstr, context, WSTrustConstantsAdapter.Trust13);
}
/// <summary>
/// Writes out the supported elements on the response object.
/// </summary>
/// <param name="rstr">The response instance</param>
/// <param name="writer">The writer to write to</param>
/// <param name="context">Current Serialization context.</param>
/// <exception cref="ArgumentNullException">Either rstr or writer or context parameter is null.</exception>
public override void WriteKnownResponseElement(RequestSecurityTokenResponse rstr, XmlWriter writer, WSTrustSerializationContext context)
{
if (rstr == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("rstr");
}
if (writer == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer");
}
if (context == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context");
}
// Write out the exisiting ones
WSTrustSerializationHelper.WriteKnownResponseElement(rstr, writer, context, this, WSTrustConstantsAdapter.Trust13);
// Specific to WS-Trust 13
if (!string.IsNullOrEmpty(rstr.KeyWrapAlgorithm))
{
this.WriteXmlElement(writer, WSTrust13Constants.ElementNames.KeyWrapAlgorithm, rstr.KeyWrapAlgorithm, rstr, context);
}
}
/// <summary>
/// Serializes a RequestSecurityTokenResponse object to the given XmlWriter
/// stream.
/// </summary>
/// <param name="response">RequestSecurityTokenResponse object that needs to be serialized to the writer.</param>
/// <param name="writer">XmlWriter into which the object will be serialized</param>
/// <param name="context">Current Serialization context.</param>
/// <exception cref="ArgumentNullException">The given response or writer or context parameter is null</exception>
public override void WriteXml(RequestSecurityTokenResponse response, XmlWriter writer, WSTrustSerializationContext context)
{
if (response == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("response");
}
if (writer == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer");
}
if (context == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context");
}
if (response.IsFinal)
{
writer.WriteStartElement(WSTrust13Constants.Prefix, WSTrust13Constants.ElementNames.RequestSecurityTokenResponseCollection, WSTrust13Constants.NamespaceURI);
}
WSTrustSerializationHelper.WriteResponse(response, writer, context, this, WSTrustConstantsAdapter.Trust13);
if (response.IsFinal)
{
writer.WriteEndElement();
}
}
/// <summary>
/// Override of the Base class method that writes a specific RSTR parameter to the outgoing stream.
/// </summary>
/// <param name="writer">Writer to which the RSTR is serialized</param>
/// <param name="elementName">The Local name of the element to be written.</param>
/// <param name="elementValue">The value of the element.</param>
/// <param name="rstr">The entire RSTR object that is being serialized.</param>
/// <param name="context">Current Serialization context.</param>
/// <exception cref="ArgumentNullException">Either writer or rstr or context is null.</exception>
/// <exception cref="ArgumentException">elementName is null or an empty string.</exception>
public override void WriteXmlElement(XmlWriter writer, string elementName, object elementValue, RequestSecurityTokenResponse rstr, WSTrustSerializationContext context)
{
if (writer == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer");
}
if (string.IsNullOrEmpty(elementName))
{
throw DiagnosticUtility.ThrowHelperArgumentNullOrEmptyString("elementName");
}
if (rstr == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("rstr");
}
if (context == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context");
}
if (StringComparer.Ordinal.Equals(elementName, WSTrust13Constants.ElementNames.KeyWrapAlgorithm))
{
writer.WriteElementString(WSTrust13Constants.Prefix, WSTrust13Constants.ElementNames.KeyWrapAlgorithm, WSTrust13Constants.NamespaceURI, (string)elementValue);
return;
}
WSTrustSerializationHelper.WriteRSTRXml(writer, elementName, elementValue, context, WSTrustConstantsAdapter.Trust13);
}
/// <summary>
/// Checks if the given reader is positioned at a RequestSecurityTokenResponse or
/// RequestSecurityTokenResponseCollection element with namespace 'http://docs.oasis-open.org/ws-sx/ws-trust/200512'
/// </summary>
/// <param name="reader">The reader to read from</param>
/// <returns>
/// 'True' if the reader is positioned at a RequestSecurityTokenResponse or RequestSecurityTokenResponseCollection
/// element with namespace 'http://docs.oasis-open.org/ws-sx/ws-trust/200512'.
/// </returns>
/// <exception cref="ArgumentNullException">The input argument is null.</exception>
public override bool CanRead(XmlReader reader)
{
if (reader == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader");
}
return reader.IsStartElement(WSTrust13Constants.ElementNames.RequestSecurityTokenResponseCollection, WSTrust13Constants.NamespaceURI)
|| reader.IsStartElement(WSTrust13Constants.ElementNames.RequestSecurityTokenResponse, WSTrust13Constants.NamespaceURI);
}
}
}
| |
using System;
using System.Collections;
using System.Reflection;
namespace Python.Runtime
{
/// <summary>
/// A MethodBinder encapsulates information about a (possibly overloaded)
/// managed method, and is responsible for selecting the right method given
/// a set of Python arguments. This is also used as a base class for the
/// ConstructorBinder, a minor variation used to invoke constructors.
/// </summary>
internal class MethodBinder
{
public ArrayList list;
public MethodBase[] methods;
public bool init = false;
public bool allow_threads = true;
internal MethodBinder()
{
list = new ArrayList();
}
internal MethodBinder(MethodInfo mi)
{
list = new ArrayList { mi };
}
public int Count
{
get { return list.Count; }
}
internal void AddMethod(MethodBase m)
{
list.Add(m);
}
/// <summary>
/// Given a sequence of MethodInfo and a sequence of types, return the
/// MethodInfo that matches the signature represented by those types.
/// </summary>
internal static MethodInfo MatchSignature(MethodInfo[] mi, Type[] tp)
{
if (tp == null)
{
return null;
}
int count = tp.Length;
foreach (MethodInfo t in mi)
{
ParameterInfo[] pi = t.GetParameters();
if (pi.Length != count)
{
continue;
}
for (var n = 0; n < pi.Length; n++)
{
if (tp[n] != pi[n].ParameterType)
{
break;
}
if (n == pi.Length - 1)
{
return t;
}
}
}
return null;
}
/// <summary>
/// Given a sequence of MethodInfo and a sequence of type parameters,
/// return the MethodInfo that represents the matching closed generic.
/// </summary>
internal static MethodInfo MatchParameters(MethodInfo[] mi, Type[] tp)
{
if (tp == null)
{
return null;
}
int count = tp.Length;
foreach (MethodInfo t in mi)
{
if (!t.IsGenericMethodDefinition)
{
continue;
}
Type[] args = t.GetGenericArguments();
if (args.Length != count)
{
continue;
}
return t.MakeGenericMethod(tp);
}
return null;
}
/// <summary>
/// Given a sequence of MethodInfo and two sequences of type parameters,
/// return the MethodInfo that matches the signature and the closed generic.
/// </summary>
internal static MethodInfo MatchSignatureAndParameters(MethodInfo[] mi, Type[] genericTp, Type[] sigTp)
{
if (genericTp == null || sigTp == null)
{
return null;
}
int genericCount = genericTp.Length;
int signatureCount = sigTp.Length;
foreach (MethodInfo t in mi)
{
if (!t.IsGenericMethodDefinition)
{
continue;
}
Type[] genericArgs = t.GetGenericArguments();
if (genericArgs.Length != genericCount)
{
continue;
}
ParameterInfo[] pi = t.GetParameters();
if (pi.Length != signatureCount)
{
continue;
}
for (var n = 0; n < pi.Length; n++)
{
if (sigTp[n] != pi[n].ParameterType)
{
break;
}
if (n == pi.Length - 1)
{
MethodInfo match = t;
if (match.IsGenericMethodDefinition)
{
// FIXME: typeArgs not used
Type[] typeArgs = match.GetGenericArguments();
return match.MakeGenericMethod(genericTp);
}
return match;
}
}
}
return null;
}
/// <summary>
/// Return the array of MethodInfo for this method. The result array
/// is arranged in order of precedence (done lazily to avoid doing it
/// at all for methods that are never called).
/// </summary>
internal MethodBase[] GetMethods()
{
if (!init)
{
// I'm sure this could be made more efficient.
list.Sort(new MethodSorter());
methods = (MethodBase[])list.ToArray(typeof(MethodBase));
init = true;
}
return methods;
}
/// <summary>
/// Precedence algorithm largely lifted from Jython - the concerns are
/// generally the same so we'll start with this and tweak as necessary.
/// </summary>
/// <remarks>
/// Based from Jython `org.python.core.ReflectedArgs.precedence`
/// See: https://github.com/jythontools/jython/blob/master/src/org/python/core/ReflectedArgs.java#L192
/// </remarks>
internal static int GetPrecedence(MethodBase mi)
{
ParameterInfo[] pi = mi.GetParameters();
int val = mi.IsStatic ? 3000 : 0;
int num = pi.Length;
val += mi.IsGenericMethod ? 1 : 0;
for (var i = 0; i < num; i++)
{
val += ArgPrecedence(pi[i].ParameterType);
}
return val;
}
/// <summary>
/// Return a precedence value for a particular Type object.
/// </summary>
internal static int ArgPrecedence(Type t)
{
Type objectType = typeof(object);
if (t == objectType)
{
return 3000;
}
TypeCode tc = Type.GetTypeCode(t);
// TODO: Clean up
switch (tc)
{
case TypeCode.Object:
return 1;
case TypeCode.UInt64:
return 10;
case TypeCode.UInt32:
return 11;
case TypeCode.UInt16:
return 12;
case TypeCode.Int64:
return 13;
case TypeCode.Int32:
return 14;
case TypeCode.Int16:
return 15;
case TypeCode.Char:
return 16;
case TypeCode.SByte:
return 17;
case TypeCode.Byte:
return 18;
case TypeCode.Single:
return 20;
case TypeCode.Double:
return 21;
case TypeCode.String:
return 30;
case TypeCode.Boolean:
return 40;
}
if (t.IsArray)
{
Type e = t.GetElementType();
if (e == objectType)
{
return 2500;
}
return 100 + ArgPrecedence(e);
}
return 2000;
}
/// <summary>
/// Bind the given Python instance and arguments to a particular method
/// overload and return a structure that contains the converted Python
/// instance, converted arguments and the correct method to call.
/// </summary>
internal Binding Bind(IntPtr inst, IntPtr args, IntPtr kw)
{
return Bind(inst, args, kw, null, null);
}
internal Binding Bind(IntPtr inst, IntPtr args, IntPtr kw, MethodBase info)
{
return Bind(inst, args, kw, info, null);
}
internal Binding Bind(IntPtr inst, IntPtr args, IntPtr kw, MethodBase info, MethodInfo[] methodinfo)
{
// loop to find match, return invoker w/ or /wo error
MethodBase[] _methods = null;
int pynargs = Runtime.PyTuple_Size(args);
object arg;
var isGeneric = false;
ArrayList defaultArgList = null;
if (info != null)
{
_methods = new MethodBase[1];
_methods.SetValue(info, 0);
}
else
{
_methods = GetMethods();
}
Type clrtype;
// TODO: Clean up
foreach (MethodBase mi in _methods)
{
if (mi.IsGenericMethod)
{
isGeneric = true;
}
ParameterInfo[] pi = mi.GetParameters();
int clrnargs = pi.Length;
var match = false;
int arrayStart = -1;
var outs = 0;
if (pynargs == clrnargs)
{
match = true;
}
else if (pynargs < clrnargs)
{
match = true;
defaultArgList = new ArrayList();
for (int v = pynargs; v < clrnargs; v++)
{
if (pi[v].DefaultValue == DBNull.Value)
{
match = false;
}
else
{
defaultArgList.Add(pi[v].DefaultValue);
}
}
}
else if (pynargs > clrnargs && clrnargs > 0 &&
Attribute.IsDefined(pi[clrnargs - 1], typeof(ParamArrayAttribute)))
{
// This is a `foo(params object[] bar)` style method
match = true;
arrayStart = clrnargs - 1;
}
if (match)
{
var margs = new object[clrnargs];
for (var n = 0; n < clrnargs; n++)
{
IntPtr op;
if (n < pynargs)
{
if (arrayStart == n)
{
// map remaining Python arguments to a tuple since
// the managed function accepts it - hopefully :]
op = Runtime.PyTuple_GetSlice(args, arrayStart, pynargs);
}
else
{
op = Runtime.PyTuple_GetItem(args, n);
}
// this logic below handles cases when multiple overloading methods
// are ambiguous, hence comparison between Python and CLR types
// is necessary
clrtype = null;
IntPtr pyoptype;
if (_methods.Length > 1)
{
pyoptype = IntPtr.Zero;
pyoptype = Runtime.PyObject_Type(op);
Exceptions.Clear();
if (pyoptype != IntPtr.Zero)
{
clrtype = Converter.GetTypeByAlias(pyoptype);
}
Runtime.XDecref(pyoptype);
}
if (clrtype != null)
{
var typematch = false;
if ((pi[n].ParameterType != typeof(object)) && (pi[n].ParameterType != clrtype))
{
IntPtr pytype = Converter.GetPythonTypeByAlias(pi[n].ParameterType);
pyoptype = Runtime.PyObject_Type(op);
Exceptions.Clear();
if (pyoptype != IntPtr.Zero)
{
if (pytype != pyoptype)
{
typematch = false;
}
else
{
typematch = true;
clrtype = pi[n].ParameterType;
}
}
if (!typematch)
{
// this takes care of enum values
TypeCode argtypecode = Type.GetTypeCode(pi[n].ParameterType);
TypeCode paramtypecode = Type.GetTypeCode(clrtype);
if (argtypecode == paramtypecode)
{
typematch = true;
clrtype = pi[n].ParameterType;
}
}
Runtime.XDecref(pyoptype);
if (!typematch)
{
margs = null;
break;
}
}
else
{
typematch = true;
clrtype = pi[n].ParameterType;
}
}
else
{
clrtype = pi[n].ParameterType;
}
if (pi[n].IsOut || clrtype.IsByRef)
{
outs++;
}
if (!Converter.ToManaged(op, clrtype, out arg, false))
{
Exceptions.Clear();
margs = null;
break;
}
if (arrayStart == n)
{
// GetSlice() creates a new reference but GetItem()
// returns only a borrow reference.
Runtime.XDecref(op);
}
margs[n] = arg;
}
else
{
if (defaultArgList != null)
{
margs[n] = defaultArgList[n - pynargs];
}
}
}
if (margs == null)
{
continue;
}
object target = null;
if (!mi.IsStatic && inst != IntPtr.Zero)
{
//CLRObject co = (CLRObject)ManagedType.GetManagedObject(inst);
// InvalidCastException: Unable to cast object of type
// 'Python.Runtime.ClassObject' to type 'Python.Runtime.CLRObject'
var co = ManagedType.GetManagedObject(inst) as CLRObject;
// Sanity check: this ensures a graceful exit if someone does
// something intentionally wrong like call a non-static method
// on the class rather than on an instance of the class.
// XXX maybe better to do this before all the other rigmarole.
if (co == null)
{
return null;
}
target = co.inst;
}
return new Binding(mi, target, margs, outs);
}
}
// We weren't able to find a matching method but at least one
// is a generic method and info is null. That happens when a generic
// method was not called using the [] syntax. Let's introspect the
// type of the arguments and use it to construct the correct method.
if (isGeneric && info == null && methodinfo != null)
{
Type[] types = Runtime.PythonArgsToTypeArray(args, true);
MethodInfo mi = MatchParameters(methodinfo, types);
return Bind(inst, args, kw, mi, null);
}
return null;
}
internal virtual IntPtr Invoke(IntPtr inst, IntPtr args, IntPtr kw)
{
return Invoke(inst, args, kw, null, null);
}
internal virtual IntPtr Invoke(IntPtr inst, IntPtr args, IntPtr kw, MethodBase info)
{
return Invoke(inst, args, kw, info, null);
}
internal virtual IntPtr Invoke(IntPtr inst, IntPtr args, IntPtr kw, MethodBase info, MethodInfo[] methodinfo)
{
Binding binding = Bind(inst, args, kw, info, methodinfo);
object result;
IntPtr ts = IntPtr.Zero;
if (binding == null)
{
Exceptions.SetError(Exceptions.TypeError, "No method matches given arguments");
return IntPtr.Zero;
}
if (allow_threads)
{
ts = PythonEngine.BeginAllowThreads();
}
try
{
result = binding.info.Invoke(binding.inst, BindingFlags.Default, null, binding.args, null);
}
catch (Exception e)
{
if (e.InnerException != null)
{
e = e.InnerException;
}
if (allow_threads)
{
PythonEngine.EndAllowThreads(ts);
}
Exceptions.SetError(e);
return IntPtr.Zero;
}
if (allow_threads)
{
PythonEngine.EndAllowThreads(ts);
}
// If there are out parameters, we return a tuple containing
// the result followed by the out parameters. If there is only
// one out parameter and the return type of the method is void,
// we return the out parameter as the result to Python (for
// code compatibility with ironpython).
var mi = (MethodInfo)binding.info;
if (binding.outs == 1 && mi.ReturnType == typeof(void))
{
}
if (binding.outs > 0)
{
ParameterInfo[] pi = mi.GetParameters();
int c = pi.Length;
var n = 0;
IntPtr t = Runtime.PyTuple_New(binding.outs + 1);
IntPtr v = Converter.ToPython(result, mi.ReturnType);
Runtime.PyTuple_SetItem(t, n, v);
n++;
for (var i = 0; i < c; i++)
{
Type pt = pi[i].ParameterType;
if (pi[i].IsOut || pt.IsByRef)
{
v = Converter.ToPython(binding.args[i], pt);
Runtime.PyTuple_SetItem(t, n, v);
n++;
}
}
if (binding.outs == 1 && mi.ReturnType == typeof(void))
{
v = Runtime.PyTuple_GetItem(t, 1);
Runtime.XIncref(v);
Runtime.XDecref(t);
return v;
}
return t;
}
return Converter.ToPython(result, mi.ReturnType);
}
}
/// <summary>
/// Utility class to sort method info by parameter type precedence.
/// </summary>
internal class MethodSorter : IComparer
{
int IComparer.Compare(object m1, object m2)
{
int p1 = MethodBinder.GetPrecedence((MethodBase)m1);
int p2 = MethodBinder.GetPrecedence((MethodBase)m2);
if (p1 < p2)
{
return -1;
}
if (p1 > p2)
{
return 1;
}
return 0;
}
}
/// <summary>
/// A Binding is a utility instance that bundles together a MethodInfo
/// representing a method to call, a (possibly null) target instance for
/// the call, and the arguments for the call (all as managed values).
/// </summary>
internal class Binding
{
public MethodBase info;
public object[] args;
public object inst;
public int outs;
internal Binding(MethodBase info, object inst, object[] args, int outs)
{
this.info = info;
this.inst = inst;
this.args = args;
this.outs = outs;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void CompareScalarLessThanOrEqualDouble()
{
var test = new SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse2.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Double[] inArray1, Double[] inArray2, Double[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Double>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Double>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Double>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Double, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Double, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Double> _fld1;
public Vector128<Double> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble testClass)
{
var result = Sse2.CompareScalarLessThanOrEqual(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble testClass)
{
fixed (Vector128<Double>* pFld1 = &_fld1)
fixed (Vector128<Double>* pFld2 = &_fld2)
{
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double);
private static Double[] _data1 = new Double[Op1ElementCount];
private static Double[] _data2 = new Double[Op2ElementCount];
private static Vector128<Double> _clsVar1;
private static Vector128<Double> _clsVar2;
private Vector128<Double> _fld1;
private Vector128<Double> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
}
public SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Double>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
_dataTable = new DataTable(_data1, _data2, new Double[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse2.CompareScalarLessThanOrEqual(
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarLessThanOrEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarLessThanOrEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareScalarLessThanOrEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse2.CompareScalarLessThanOrEqual(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Double>* pClsVar1 = &_clsVar1)
fixed (Vector128<Double>* pClsVar2 = &_clsVar2)
{
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(pClsVar1)),
Sse2.LoadVector128((Double*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr);
var result = Sse2.CompareScalarLessThanOrEqual(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.CompareScalarLessThanOrEqual(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.CompareScalarLessThanOrEqual(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble();
var result = Sse2.CompareScalarLessThanOrEqual(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__CompareScalarLessThanOrEqualDouble();
fixed (Vector128<Double>* pFld1 = &test._fld1)
fixed (Vector128<Double>* pFld2 = &test._fld2)
{
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse2.CompareScalarLessThanOrEqual(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Double>* pFld1 = &_fld1)
fixed (Vector128<Double>* pFld2 = &_fld2)
{
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(pFld1)),
Sse2.LoadVector128((Double*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse2.CompareScalarLessThanOrEqual(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse2.CompareScalarLessThanOrEqual(
Sse2.LoadVector128((Double*)(&test._fld1)),
Sse2.LoadVector128((Double*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Double> op1, Vector128<Double> op2, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.DoubleToInt64Bits(result[0]) != ((left[0] <= right[0]) ? -1 : 0))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.DoubleToInt64Bits(left[i]) != BitConverter.DoubleToInt64Bits(result[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.CompareScalarLessThanOrEqual)}<Double>(Vector128<Double>, Vector128<Double>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
using System;
using System.Net;
using System.Text;
using System.IO;
using System.Threading;
namespace Hydna.Net
{
/// <summary>
/// Http API client for pushing data and signals to a Hydna Endpoint.
/// </summary>
/// <example>
/// using Hydna.Net;
///
/// HttpApiClient client;
/// client = HttpApiClient.create("public.hydna.net");
/// client.send("Hello world");
/// </example>
public class HttpApiClient
{
static int PAYLOAD_MAX_SIZE = 0xFFFF;
Uri endPointUri;
HttpWebRequest currentHttpRequest;
ProgressResult currentProgressResult;
IAsyncResult currentAsyncResult;
enum RequestType { Data, Signal }
enum ContentType { Text, Binary }
private HttpApiClient (Uri endpoint)
{
endPointUri = endpoint;
currentHttpRequest = null;
currentProgressResult = null;
currentAsyncResult = null;
}
/// <summary>
/// Create a new HttpApiClient bound to the specified url.
/// </summary>
/// <param name="url">Url of the channel</param>
public static HttpApiClient create(string url)
{
Uri uri;
if (url == null)
{
throw new ArgumentNullException("url", "Expected an URL");
}
if (url.Contains("://") == false)
{
UriBuilder builder = new UriBuilder("http://" + url);
uri = builder.Uri;
}
else
{
uri = new Uri(url);
}
return create(uri);
}
/// <summary>
/// Create a new HttpApiClient bound to the specified url.
/// </summary>
/// <param name="url">Url of the channel</param>
public static HttpApiClient create(Uri url)
{
if (url.Scheme != "http" && url.Scheme != "https")
{
throw new ArgumentNullException("url", "Unsupported Scheme");
}
return new HttpApiClient(url);
}
/// <summary>
/// Send the specified buffer to the underlying Channel.
/// </summary>
/// <param name="buffer">An array of bytes</param>
public void Send(byte[] buffer) {
IAsyncResult result = BeginSend(buffer, null, null);
result.AsyncWaitHandle.WaitOne();
EndSend(result);
}
/// <summary>
/// Send the specified buffer to the underlying Channel.
/// </summary>
/// <param name="buffer">An array of bytes. This method copies count
/// bytes from buffer to the current stream.</param>
/// <param name="offset">The zero-based byte offset in buffer at which
/// to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the
/// current stream.</param>
public void Send(byte[] buffer, int offset, int count) {
IAsyncResult result = BeginSend(buffer, offset, count, null, null);
result.AsyncWaitHandle.WaitOne();
EndSend(result);
}
/// <summary>
/// Send the specified message to the underlying Channel.
/// </summary>
/// <param message="message">The message to send</param>
public void Send(String message) {
IAsyncResult result = BeginSend(message, null, null);
result.AsyncWaitHandle.WaitOne();
EndSend(result);
}
/// <summary>
/// Emit the specified buffer to the underlying Channel.
/// </summary>
/// <param name="buffer">An array of bytes</param>
public void Emit(byte[] buffer) {
IAsyncResult result = BeginEmit(buffer, null, null);
result.AsyncWaitHandle.WaitOne();
EndEmit(result);
}
/// <summary>
/// Emit the specified buffer to the underlying Channel.
/// </summary>
/// <param name="buffer">An array of bytes. This method copies count
/// bytes from buffer to the current stream.</param>
/// <param name="offset">The zero-based byte offset in buffer at which
/// to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the
/// current stream.</param>
public void Emit(byte[] buffer, int offset, int count) {
IAsyncResult result = BeginEmit(buffer, offset, count, null, null);
result.AsyncWaitHandle.WaitOne();
EndEmit(result);
}
/// <summary>
/// Emit the specified message to the underlying Channel.
/// </summary>
/// <param message="message">The message to send</param>
public void Emit(String message) {
IAsyncResult result = BeginEmit(message, null, null);
result.AsyncWaitHandle.WaitOne();
EndEmit(result);
}
/// <summary>
/// Send the specified message to the underlying Channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="data">Data.</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginSend(byte[] data,
AsyncCallback callback,
Object state)
{
if (data == null)
{
throw new ArgumentNullException("data");
}
if (data.Length == 0)
{
throw new ArgumentException("Cannot be zero-length", "data");
}
return BeginSend(data, 0, data.Length, callback, state);
}
/// <summary>
/// Send the specified buffer to the underlying Channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="buffer">An array of bytes. This method copies count
/// bytes from buffer to the current stream.</param>
/// <param name="offset">The zero-based byte offset in buffer at which
/// to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the
/// current stream.</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginSend(byte[] buffer,
int offset,
int count,
AsyncCallback callback,
Object state)
{
if (buffer == null)
{
throw new ArgumentNullException("data");
}
if (count == 0)
{
throw new ArgumentException("Cannot be zero-length", "data");
}
if (offset + count > buffer.Length) {
throw new IndexOutOfRangeException();
}
return BeginRequest(buffer,
offset,
count,
ContentType.Binary,
RequestType.Data,
callback,
state);
}
/// <summary>
/// Begins the send data to specificed channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="message">The message to send.</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginSend(string message,
AsyncCallback callback,
Object state)
{
if (message == null)
{
throw new ArgumentNullException("data");
}
if (message.Length == 0)
{
throw new ArgumentException("Expected at least one char",
"data");
}
byte[] data = Encoding.UTF8.GetBytes(message);
return BeginRequest(data,
0,
data.Length,
ContentType.Text,
RequestType.Data,
callback,
state);
}
/// <summary>
/// Emit the specified message to the underlying Channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="data">The data to send</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginEmit(byte[] data,
AsyncCallback callback,
Object state)
{
if (data == null)
{
throw new ArgumentNullException("data");
}
if (data.Length == 0)
{
throw new ArgumentException("Cannot be zero-length", "data");
}
return BeginEmit(data, 0, data.Length, callback, state);
}
/// <summary>
/// Emit the specified buffer to the underlying Channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="buffer">An array of bytes. This method copies count
/// bytes from buffer to the current stream.</param>
/// <param name="offset">The zero-based byte offset in buffer at which
/// to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the
/// current stream.</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginEmit(byte[] buffer,
int offset,
int count,
AsyncCallback callback,
Object state)
{
if (buffer == null)
{
throw new ArgumentNullException("data");
}
if (count == 0)
{
throw new ArgumentException("Cannot be zero-length", "data");
}
if (offset + count > buffer.Length) {
throw new IndexOutOfRangeException();
}
return BeginRequest(buffer,
offset,
count,
ContentType.Binary,
RequestType.Signal,
callback,
state);
}
/// <summary>
/// Emit the specified buffer to the underlying Channel.
/// </summary>
/// <returns>An IAsyncResult that represents the asynchronous write,
/// which could still be pending.</returns>
/// <param name="message">The message to emit.</param>
/// <param name="callback">Callback.</param>
/// <param name="state">State.</param>
public IAsyncResult BeginEmit(string message,
AsyncCallback callback,
Object state)
{
if (message == null)
{
throw new ArgumentNullException("data");
}
if (message.Length == 0)
{
throw new ArgumentException("Expected at least one char",
"data");
}
byte[] data = Encoding.UTF8.GetBytes(message);
return BeginRequest(data,
0,
data.Length,
ContentType.Text,
RequestType.Signal,
callback,
state);
}
/// <summary>
/// Ends an asynchronous Send operation.
/// </summary>
/// <param name="result">A reference to the outstanding asynchronous
/// I/O request.</param>
public void EndSend(IAsyncResult result)
{
ProgressResult progressResult;
progressResult = (ProgressResult) result;
if (progressResult == null ||
progressResult.type != RequestType.Data)
{
throw new ArgumentException("Bad handler", "result");
}
EndRequest(progressResult);
}
/// <summary>
/// Ends an asynchronous Emit operation.
/// </summary>
/// <param name="result">A reference to the outstanding asynchronous
/// I/O request.</param>
public void EndEmit(IAsyncResult result)
{
ProgressResult progressResult;
progressResult = (ProgressResult) result;
if (progressResult == null ||
progressResult.type != RequestType.Signal)
{
throw new ArgumentException("Bad handler", "result");
}
EndRequest(progressResult);
}
IAsyncResult BeginRequest(byte[] data,
int offset,
int count,
ContentType contentType,
RequestType requestType,
AsyncCallback userCallback,
object userState)
{
AsyncCallback callback = null;
IAsyncResult result = null;
if (currentProgressResult != null)
{
throw new NotSupportedException ("HttpApiClient does not " +
"support concurrent I/O " +
"operations.");
}
if (count > PAYLOAD_MAX_SIZE)
{
throw new ArgumentException("Payload exceedes max size");
}
currentHttpRequest = (HttpWebRequest) WebRequest.Create(endPointUri);
currentHttpRequest.Method = "POST";
if (contentType == ContentType.Binary)
{
currentHttpRequest.ContentType = "application/octet-stream";
}
else
{
currentHttpRequest.ContentType = "text/plain";
}
if (requestType == RequestType.Signal)
{
currentHttpRequest.Headers.Add("X-Emit", "yes");
}
callback = new AsyncCallback(RequestCallback);
result = currentHttpRequest.BeginGetRequestStream(callback, null);
currentAsyncResult = result;
currentProgressResult = new ProgressResult(requestType,
data,
offset,
count,
userCallback,
userState);
return currentProgressResult;
}
void EndRequest(ProgressResult result)
{
ProgressResult progressResult = null;
Exception exception = null;
String denyMessage = null;
progressResult = currentProgressResult;
currentProgressResult = null;
if (result == null)
{
throw new ArgumentNullException("result");
}
if (progressResult == null)
{
throw new ArgumentException("A handle to the pending " +
"operation is not available.");
}
if (currentAsyncResult != null &&
currentAsyncResult.IsCompleted == false)
{
exception = new WebException("Aborted",
WebExceptionStatus.RequestCanceled);
CompleteApiCall(exception, null);
}
denyMessage = progressResult.denyMessage;
exception = progressResult.throwedException;
if (denyMessage != null)
{
throw new HttpApiException(denyMessage, true);
}
if (exception is WebException)
{
throw new HttpApiException((WebException) exception);
}
if (exception != null)
{
throw new HttpApiException(exception);
}
}
void RequestCallback(IAsyncResult result)
{
Stream stream = null;
AsyncCallback callback = null;
try
{
stream = currentHttpRequest.EndGetRequestStream(result);
}
catch (Exception exception)
{
CompleteApiCall(exception, null);
return;
}
stream.Write(currentProgressResult.data,
currentProgressResult.offset,
currentProgressResult.length);
stream.Close();
callback = new AsyncCallback(ResponseCallback);
currentAsyncResult = currentHttpRequest.BeginGetResponse(callback,
null);
}
void ResponseCallback(IAsyncResult result)
{
String denyMessage = null;
currentAsyncResult = null;
try
{
currentHttpRequest.EndGetResponse(result);
}
catch (WebException exception)
{
denyMessage = readOpenDeny((HttpWebResponse) exception.Response);
CompleteApiCall(exception, denyMessage);
return;
}
catch (Exception exception)
{
CompleteApiCall(exception, null);
return;
}
CompleteApiCall(null, null);
}
void CompleteApiCall(Exception exception, String denyMessage)
{
if (currentHttpRequest != null)
{
currentHttpRequest.Abort();
currentHttpRequest = null;
}
if (currentProgressResult != null)
{
currentProgressResult.Compelete(exception, denyMessage);
}
}
String readOpenDeny(HttpWebResponse response)
{
Stream stream = null;
byte[] data = null;
// TODO: Check that we are dealing with UTF8-data
if (response.StatusCode == HttpStatusCode.BadRequest &&
response.ContentLength > 0)
{
stream = response.GetResponseStream();
if (stream.Length > 0)
{
try
{
data = new byte[stream.Length];
stream.Read(data, 0, data.Length);
return Encoding.UTF8.GetString(data);
}
catch
{
}
}
}
return null;
}
class ProgressResult : IAsyncResult
{
internal Exception throwedException;
internal String denyMessage;
internal RequestType type;
internal byte[] data;
internal int offset;
internal int length;
internal Boolean completed;
AsyncCallback callback;
Object state;
ManualResetEvent resetEvent;
internal ProgressResult(RequestType requestType,
byte[] userData,
int userOffset,
int userLength,
AsyncCallback userCallback,
Object userState)
{
type = requestType;
data = userData;
offset = userOffset;
length = userLength;
callback = userCallback;
state = userState;
resetEvent = new ManualResetEvent(false);
completed = false;
}
object IAsyncResult.AsyncState {
get {
return state;
}
}
System.Threading.WaitHandle IAsyncResult.AsyncWaitHandle {
get {
return resetEvent;
}
}
bool IAsyncResult.CompletedSynchronously {
get {
return completed;
}
}
bool IAsyncResult.IsCompleted {
get {
return completed;
}
}
internal void Compelete(Exception exception, String message) {
if (completed == true) {
return;
}
throwedException = exception;
denyMessage = message;
completed = true;
resetEvent.Set();
if (callback != null) {
callback.Invoke(this);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.ServiceModel;
namespace System.Runtime
{
internal class MruCache<TKey, TValue> : IDisposable
where TKey : class
where TValue : class
{
private LinkedList<TKey> _mruList;
private Dictionary<TKey, CacheEntry> _items;
private readonly int _lowWatermark;
private readonly int _highWatermark;
private CacheEntry _mruEntry;
public MruCache(int watermark)
: this(watermark * 4 / 5, watermark)
{
}
//
// The cache will grow until the high watermark. At which point, the least recently used items
// will be purge until the cache's size is reduced to low watermark
//
public MruCache(int lowWatermark, int highWatermark)
: this(lowWatermark, highWatermark, null)
{
}
public MruCache(int lowWatermark, int highWatermark, IEqualityComparer<TKey> comparer)
{
Fx.Assert(lowWatermark < highWatermark, "");
Fx.Assert(lowWatermark >= 0, "");
_lowWatermark = lowWatermark;
_highWatermark = highWatermark;
_mruList = new LinkedList<TKey>();
if (comparer == null)
{
_items = new Dictionary<TKey, CacheEntry>();
}
else
{
_items = new Dictionary<TKey, CacheEntry>(comparer);
}
}
public int Count
{
get
{
ThrowIfDisposed();
return _items.Count;
}
}
public bool IsDisposed { get; private set; }
public void Add(TKey key, TValue value)
{
Fx.Assert(null != key, "");
ThrowIfDisposed();
// if anything goes wrong (duplicate entry, etc) we should
// clear our caches so that we don't get out of sync
bool success = false;
try
{
if (_items.Count == _highWatermark)
{
// If the cache is full, purge enough LRU items to shrink the
// cache down to the low watermark
int countToPurge = _highWatermark - _lowWatermark;
for (int i = 0; i < countToPurge; i++)
{
TKey keyRemove = _mruList.Last.Value;
_mruList.RemoveLast();
TValue item = _items[keyRemove].value;
_items.Remove(keyRemove);
OnSingleItemRemoved(item);
OnItemAgedOutOfCache(item);
}
}
// Add the new entry to the cache and make it the MRU element
CacheEntry entry;
entry.node = _mruList.AddFirst(key);
entry.value = value;
_items.Add(key, entry);
_mruEntry = entry;
success = true;
}
finally
{
if (!success)
{
Clear();
}
}
}
public void Clear()
{
ThrowIfDisposed();
Clear(false);
}
private void Clear(bool dispose)
{
_mruList.Clear();
if (dispose)
{
foreach (CacheEntry cacheEntry in _items.Values)
{
var item = cacheEntry.value as IDisposable;
if (item != null)
{
try
{
item.Dispose();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
}
}
}
}
_items.Clear();
_mruEntry.value = null;
_mruEntry.node = null;
}
public bool Remove(TKey key)
{
Fx.Assert(null != key, "");
ThrowIfDisposed();
CacheEntry entry;
if (_items.TryGetValue(key, out entry))
{
_items.Remove(key);
OnSingleItemRemoved(entry.value);
_mruList.Remove(entry.node);
if (object.ReferenceEquals(_mruEntry.node, entry.node))
{
_mruEntry.value = null;
_mruEntry.node = null;
}
return true;
}
return false;
}
protected virtual void OnSingleItemRemoved(TValue item)
{
ThrowIfDisposed();
}
protected virtual void OnItemAgedOutOfCache(TValue item)
{
ThrowIfDisposed();
}
//
// If found, make the entry most recently used
//
public bool TryGetValue(TKey key, out TValue value)
{
// first check our MRU item
if (_mruEntry.node != null && key != null && key.Equals(_mruEntry.node.Value))
{
value = _mruEntry.value;
return true;
}
CacheEntry entry;
bool found = _items.TryGetValue(key, out entry);
value = entry.value;
// Move the node to the head of the MRU list if it's not already there
if (found && _mruList.Count > 1
&& !object.ReferenceEquals(_mruList.First, entry.node))
{
_mruList.Remove(entry.node);
_mruList.AddFirst(entry.node);
_mruEntry = entry;
}
return found;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (!IsDisposed)
{
IsDisposed = true;
Clear(true);
}
}
}
private void ThrowIfDisposed()
{
if (IsDisposed)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ObjectDisposedException(GetType().FullName));
}
}
private struct CacheEntry
{
internal TValue value;
internal LinkedListNode<TKey> node;
}
}
}
| |
using System;
using System.Collections.Generic;
using UnityEngine;
using DarkMultiPlayerCommon;
using MessageStream2;
namespace DarkMultiPlayer
{
public class TimeSyncer
{
public bool locked
{
get;
private set;
}
public bool synced
{
get;
private set;
}
public bool workerEnabled;
public int currentSubspace
{
get;
private set;
}
public long clockOffsetAverage
{
get;
private set;
}
public long networkLatencyAverage
{
get;
private set;
}
public long serverLag
{
get;
private set;
}
public float requestedRate
{
get;
private set;
}
private const float MAX_CLOCK_SKEW = 5f;
private const float MAX_SUBSPACE_RATE = 1f;
private const float MIN_SUBSPACE_RATE = 0.3f;
private const float MIN_CLOCK_RATE = 0.3f;
private const float MAX_CLOCK_RATE = 1.5f;
private const float SYNC_TIME_INTERVAL = 30f;
private const float CLOCK_SET_INTERVAL = .1f;
private const int SYNC_TIME_VALID = 4;
private const int SYNC_TIME_MAX = 10;
private float lastSyncTime = 0f;
private float lastClockSkew = 0f;
private List<long> clockOffset = new List<long>();
private List<long> networkLatency = new List<long>();
private List<float> requestedRatesList = new List<float>();
private Dictionary<int, Subspace> subspaces = new Dictionary<int, Subspace>();
public static TimeSyncer singleton;
public TimeSyncer()
{
currentSubspace = -1;
requestedRate = 1f;
}
public static TimeSyncer fetch
{
get
{
return singleton;
}
}
public void FixedUpdate()
{
if (workerEnabled)
{
if (synced)
{
if ((UnityEngine.Time.realtimeSinceStartup - lastSyncTime) > SYNC_TIME_INTERVAL)
{
lastSyncTime = UnityEngine.Time.realtimeSinceStartup;
NetworkWorker.fetch.SendTimeSync();
}
if (!locked && currentSubspace != -1)
{
LockSubspace(currentSubspace);
}
if (locked)
{
if (WarpWorker.fetch.warpMode == WarpMode.SUBSPACE)
{
VesselWorker.fetch.DetectReverting();
}
//Set the universe time here
SyncTime();
}
}
}
}
//Skew or set the clock
private void SyncTime()
{
if (HighLogic.LoadedScene != GameScenes.FLIGHT)
{
if (requestedRatesList.Count > 0)
{
requestedRatesList.Clear();
requestedRate = 1f;
}
}
if (Time.timeSinceLevelLoad < 1f)
{
return;
}
if (HighLogic.LoadedScene == GameScenes.FLIGHT)
{
if (!FlightGlobals.ready)
{
return;
}
if (FlightGlobals.fetch.activeVessel == null)
{
return;
}
}
if ((UnityEngine.Time.realtimeSinceStartup - lastClockSkew) > CLOCK_SET_INTERVAL)
{
lastClockSkew = UnityEngine.Time.realtimeSinceStartup;
if (CanSyncTime())
{
double targetTime = GetUniverseTime();
double currentError = GetCurrentError();
if (Math.Abs(currentError) > MAX_CLOCK_SKEW)
{
StepClock(targetTime);
}
else
{
SkewClock(currentError);
}
}
else
{
Time.timeScale = 1f;
}
}
}
private void StepClock(double targetTick)
{
if (HighLogic.LoadedScene == GameScenes.LOADING)
{
DarkLog.Debug("Skipping StepClock in loading screen");
return;
}
if (HighLogic.LoadedSceneIsFlight)
{
if (FlightGlobals.fetch.activeVessel == null || !FlightGlobals.ready)
{
DarkLog.Debug("Skipping StepClock (active vessel is null or not ready)");
return;
}
try
{
OrbitPhysicsManager.HoldVesselUnpack(5);
}
catch
{
DarkLog.Debug("Failed to hold vessel unpack");
return;
}
foreach (Vessel v in FlightGlobals.fetch.vessels)
{
if (!v.packed)
{
if (v != FlightGlobals.fetch.activeVessel)
{
try
{
v.GoOnRails();
}
catch
{
DarkLog.Debug("Error packing vessel " + v.id.ToString());
}
}
if (v == FlightGlobals.fetch.activeVessel)
{
if (SafeToStepClock(v, targetTick))
{
try
{
v.GoOnRails();
}
catch
{
DarkLog.Debug("Error packing active vessel " + v.id.ToString());
}
}
}
}
}
}
Planetarium.SetUniversalTime(targetTick);
}
private bool SafeToStepClock(Vessel checkVessel, double targetTick)
{
switch (checkVessel.situation)
{
case Vessel.Situations.LANDED:
case Vessel.Situations.PRELAUNCH:
case Vessel.Situations.SPLASHED:
return (checkVessel.srf_velocity.magnitude < 2);
case Vessel.Situations.ORBITING:
case Vessel.Situations.ESCAPING:
return true;
case Vessel.Situations.SUB_ORBITAL:
double altitudeAtUT = checkVessel.orbit.getRelativePositionAtUT(targetTick).magnitude;
return (altitudeAtUT > checkVessel.mainBody.Radius + 10000 && checkVessel.altitude > 10000);
default :
return false;
}
}
private void SkewClock(double currentError)
{
float timeWarpRate = (float)Math.Pow(2, -currentError);
if (timeWarpRate > MAX_CLOCK_RATE)
{
timeWarpRate = MAX_CLOCK_RATE;
}
if (timeWarpRate < MIN_CLOCK_RATE)
{
timeWarpRate = MIN_CLOCK_RATE;
}
//Request how fast we *think* we can run (The reciporical of the current warp rate)
float tempRequestedRate = subspaces[currentSubspace].subspaceSpeed * (1 / timeWarpRate);
if (tempRequestedRate > MAX_SUBSPACE_RATE)
{
tempRequestedRate = MAX_SUBSPACE_RATE;
}
requestedRatesList.Add(tempRequestedRate);
//Delete entries if there are too many
while (requestedRatesList.Count > 50)
{
requestedRatesList.RemoveAt(0);
}
//Set the average requested rate
float requestedRateTotal = 0f;
foreach (float requestedRateEntry in requestedRatesList)
{
requestedRateTotal += requestedRateEntry;
}
requestedRate = requestedRateTotal / requestedRatesList.Count;
//Set the physwarp rate
Time.timeScale = timeWarpRate;
}
private bool SituationIsGrounded(Vessel.Situations situation)
{
switch (situation)
{
case Vessel.Situations.LANDED:
case Vessel.Situations.PRELAUNCH:
case Vessel.Situations.SPLASHED:
return true;
}
return false;
}
public int LockNewSubspace(long serverTime, double planetariumTime, float subspaceSpeed)
{
int highestSubpaceID = 0;
foreach (int subspaceID in subspaces.Keys)
{
if (subspaceID > highestSubpaceID)
{
highestSubpaceID = subspaceID;
}
}
LockNewSubspace((highestSubpaceID + 1), serverTime, planetariumTime, subspaceSpeed);
return (highestSubpaceID + 1);
}
public void LockNewSubspace(int subspaceID, long serverTime, double planetariumTime, float subspaceSpeed)
{
if (!subspaces.ContainsKey(subspaceID))
{
Subspace newSubspace = new Subspace();
newSubspace.serverClock = serverTime;
newSubspace.planetTime = planetariumTime;
newSubspace.subspaceSpeed = subspaceSpeed;
subspaces.Add(subspaceID, newSubspace);
}
DarkLog.Debug("Subspace " + subspaceID + " locked to server, time: " + planetariumTime);
}
public void LockNewSubspaceToCurrentTime()
{
TimeSyncer.fetch.UnlockSubspace();
long serverClock = TimeSyncer.fetch.GetServerClock();
double universeTime = Planetarium.GetUniversalTime();
int newSubspace = TimeSyncer.fetch.LockNewSubspace(serverClock, universeTime, requestedRate);
using (MessageWriter mw = new MessageWriter())
{
mw.Write<int>((int)WarpMessageType.NEW_SUBSPACE);
mw.Write<string>(Settings.fetch.playerName);
mw.Write<int>(newSubspace);
mw.Write<long>(serverClock);
mw.Write<double>(universeTime);
mw.Write<float>(requestedRate);
NetworkWorker.fetch.SendWarpMessage(mw.GetMessageBytes());
}
TimeSyncer.fetch.LockSubspace(newSubspace);
}
public void LockSubspace(int subspaceID)
{
if (subspaces.ContainsKey(subspaceID))
{
TimeWarp.SetRate(0, true);
locked = true;
DarkLog.Debug("Locked to subspace " + subspaceID + ", time: " + GetUniverseTime());
using (MessageWriter mw = new MessageWriter())
{
mw.Write<int>((int)WarpMessageType.CHANGE_SUBSPACE);
mw.Write<string>(Settings.fetch.playerName);
mw.Write<int>(subspaceID);
NetworkWorker.fetch.SendWarpMessage(mw.GetMessageBytes());
}
}
currentSubspace = subspaceID;
}
public void UnlockSubspace()
{
currentSubspace = -1;
locked = false;
Time.timeScale = 1f;
using (MessageWriter mw = new MessageWriter())
{
mw.Write<int>((int)WarpMessageType.CHANGE_SUBSPACE);
mw.Write<string>(Settings.fetch.playerName);
mw.Write<int>(currentSubspace);
NetworkWorker.fetch.SendWarpMessage(mw.GetMessageBytes());
}
}
public void RelockSubspace(int subspaceID, long serverClock, double planetTime, float subspaceSpeed)
{
if (subspaces.ContainsKey(subspaceID))
{
subspaces[subspaceID].serverClock = serverClock;
subspaces[subspaceID].planetTime = planetTime;
subspaces[subspaceID].subspaceSpeed = subspaceSpeed;
}
else
{
DarkLog.Debug("Failed to relock non-existant subspace!");
}
}
public long GetServerClock()
{
if (synced)
{
return DateTime.UtcNow.Ticks + clockOffsetAverage;
}
return 0;
}
public double GetUniverseTime()
{
if (synced && locked && (currentSubspace != -1))
{
return GetUniverseTime(currentSubspace);
}
return 0;
}
public double GetUniverseTime(int subspace)
{
if (subspaces.ContainsKey(subspace))
{
long realTimeSinceLock = GetServerClock() - subspaces[subspace].serverClock;
double realTimeSinceLockSeconds = realTimeSinceLock / 10000000d;
double adjustedTimeSinceLockSeconds = realTimeSinceLockSeconds * subspaces[subspace].subspaceSpeed;
return subspaces[subspace].planetTime + adjustedTimeSinceLockSeconds;
}
else
{
return 0;
}
}
public double GetCurrentError()
{
if (synced && locked)
{
double currentTime = Planetarium.GetUniversalTime();
double targetTime = GetUniverseTime();
return (currentTime - targetTime);
}
return 0;
}
public Subspace GetSubspace(int subspaceID)
{
Subspace ss = new Subspace();
if (subspaces.ContainsKey(subspaceID))
{
ss.serverClock = subspaces[subspaceID].serverClock;
ss.planetTime = subspaces[subspaceID].planetTime;
ss.subspaceSpeed = subspaces[subspaceID].subspaceSpeed;
}
return ss;
}
private bool CanSyncTime()
{
bool canSync;
switch (HighLogic.LoadedScene)
{
case GameScenes.TRACKSTATION:
case GameScenes.FLIGHT:
case GameScenes.SPACECENTER:
canSync = true;
break;
default :
canSync = false;
break;
}
return canSync;
}
public void HandleSyncTime(long clientSend, long serverReceive, long serverSend)
{
long clientReceive = DateTime.UtcNow.Ticks;
long clientLatency = (clientReceive - clientSend) - (serverSend - serverReceive);
long clientOffset = ((serverReceive - clientSend) + (serverSend - clientReceive)) / 2;
clockOffset.Add(clientOffset);
networkLatency.Add(clientLatency);
serverLag = serverSend - serverReceive;
if (clockOffset.Count > SYNC_TIME_MAX)
{
clockOffset.RemoveAt(0);
}
if (networkLatency.Count > SYNC_TIME_MAX)
{
networkLatency.RemoveAt(0);
}
long clockOffsetTotal = 0;
//Calculate the average for the offset and latency.
foreach (long currentOffset in clockOffset)
{
clockOffsetTotal += currentOffset;
}
clockOffsetAverage = clockOffsetTotal / clockOffset.Count;
long networkLatencyTotal = 0;
foreach (long currentLatency in networkLatency)
{
networkLatencyTotal += currentLatency;
}
networkLatencyAverage = networkLatencyTotal / networkLatency.Count;
//Check if we are now synced
if ((clockOffset.Count > SYNC_TIME_VALID) && !synced)
{
synced = true;
float clockOffsetAverageMs = clockOffsetAverage / 10000f;
float networkLatencyMs = networkLatencyAverage / 10000f;
DarkLog.Debug("Initial clock syncronized, offset " + clockOffsetAverageMs + "ms, latency " + networkLatencyMs + "ms");
}
//Ask for another time sync if we aren't synced yet.
if (!synced)
{
lastSyncTime = UnityEngine.Time.realtimeSinceStartup;
NetworkWorker.fetch.SendTimeSync();
}
}
public static void Reset()
{
lock (Client.eventLock)
{
if (singleton != null)
{
singleton.workerEnabled = false;
Client.fixedUpdateEvent.Remove(singleton.FixedUpdate);
Time.timeScale = 1f;
}
singleton = new TimeSyncer();
Client.fixedUpdateEvent.Add(singleton.FixedUpdate);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Runtime.Remoting;
using System.Runtime.Remoting.Lifetime;
using System.Security.Permissions;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace LifeTimeService
{
internal class Program
{
static void Main(string[] args)
{
AppDomain ad = AppDomain.CreateDomain("OtherDomain");
while (true)
{
using (var sponsor = new MySponsor())
{
for (int i = 0; i < 30; i++)
{
#if true
var ro = ProxyHolder.Create((RemoteObject)ad.CreateInstanceAndUnwrap(
Assembly.GetExecutingAssembly().FullName,
typeof(RemoteObject).FullName));
#else
var ro = (LeaseManagedRemoteObject)ad.CreateInstanceAndUnwrap(
Assembly.GetExecutingAssembly().FullName,
typeof(LeaseManagedRemoteObject).FullName);
sponsor.Register((ILease)ro.GetLifetimeService());
#endif
// Oops, forgot to dispose it!
// ro.Dispose();
Thread.Sleep(100);
}
}
}
}
}
internal static class ProxyHolder
{
public static ProxyHolder<T> Create<T>(T proxy)
where T : IDisposable
{
return new ProxyHolder<T>(proxy);
}
}
// Disposes the holding proxy object in its finalizer.
internal class ProxyHolder<T> : IDisposable
where T : IDisposable
{
public T Proxy { get; }
public ProxyHolder(T proxy)
{
this.Proxy = proxy;
}
#region IDisposable Support
protected virtual void Dispose(bool disposing)
{
Proxy.Dispose();
}
~ProxyHolder()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
}
internal class RemoteObject : MarshalByRefObject, IDisposable
{
private static int InstanceCount;
// Add some memory pressure.
private readonly List<int>[] _bigArray = Enumerable.Range(0, 1024).Select(_ => new List<int>(1024)).ToArray();
private bool _disposed = false;
public RemoteObject()
{
Interlocked.Increment(ref InstanceCount);
Debug.WriteLine($"RemoteObject.ctor: {InstanceCount} instances remain.");
}
#region IDisposable Support
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
RemotingServices.Disconnect(this);
_disposed = true;
Interlocked.Decrement(ref InstanceCount);
Debug.WriteLine($"RemoteObject.Dispose: {InstanceCount} instances remain.");
}
}
public void Dispose()
{
Dispose(true);
}
#endregion
[SecurityPermission(SecurityAction.Demand, Flags = SecurityPermissionFlag.Infrastructure)]
public override object InitializeLifetimeService()
{
// Infinite lifetime.
// This instance will never be GC'ed unless RemotingServices.Disconnect(this) explicitly called.
return null;
}
}
internal class LeaseManagedRemoteObject : MarshalByRefObject, IDisposable
{
private static int InstanceCount;
// Add some memory pressure.
private readonly List<int>[] _bigArray = Enumerable.Range(0, 1024).Select(_ => new List<int>(1024)).ToArray();
private bool _disposed = false;
public LeaseManagedRemoteObject()
{
Interlocked.Increment(ref InstanceCount);
Debug.WriteLine($"RemoteObject.ctor: {InstanceCount} instances remain.");
}
#region IDisposable Support
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
RemotingServices.Disconnect(this);
_disposed = true;
Interlocked.Decrement(ref InstanceCount);
Debug.WriteLine($"RemoteObject.Dispose: {InstanceCount} instances remain.");
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
// Just for logging the instance count.
~LeaseManagedRemoteObject()
{
Interlocked.Decrement(ref InstanceCount);
Debug.WriteLine($"RemoteObject.Dispose: {InstanceCount} instances remain.");
}
[SecurityPermission(SecurityAction.Demand, Flags = SecurityPermissionFlag.Infrastructure)]
public override object InitializeLifetimeService()
{
// This MBRO instance will not be GC'ed until this lease expires because this lease holds a reference to this MBRO instance.
var lease = (ILease)base.InitializeLifetimeService();
if (lease.CurrentState == LeaseState.Initial)
{
lease.InitialLeaseTime = TimeSpan.FromSeconds(1);
lease.SponsorshipTimeout = TimeSpan.FromSeconds(30);
lease.RenewOnCallTime = TimeSpan.FromSeconds(1);
}
return lease;
}
}
internal class MySponsor : MarshalByRefObject, IDisposable, ISponsor
{
private static int CurrentId;
private bool _disposed = false;
private int _id;
private List<ILease> _registeredLeases = new List<ILease>();
public MySponsor()
{
this._id = Interlocked.Increment(ref CurrentId);
}
#region IDisposable Support
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
RemotingServices.Disconnect(this);
UnregisterAllLeases();
_disposed = true;
}
}
~MySponsor()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
[SecurityPermission(SecurityAction.Demand, Flags = SecurityPermissionFlag.Infrastructure)]
public override object InitializeLifetimeService()
{
return null;
}
public TimeSpan Renewal(ILease lease)
{
Debug.WriteLine($"MySponsor[{_id}].Renewal");
return lease.InitialLeaseTime;
}
public void Register(ILease lease)
{
lease.Register(this);
lock (_registeredLeases)
{
_registeredLeases.Add(lease);
}
}
private void UnregisterAllLeases()
{
lock (_registeredLeases)
{
foreach (var lease in _registeredLeases)
{
try
{
lease.Unregister(this);
}
catch (RemotingException)
{
// Okay. Already disconnected.
}
}
_registeredLeases.Clear();
}
}
}
}
| |
/*
* Copyright (c) 2006-2011 Erin Catto http://www.box2d.org
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
// Linear constraint (point-to-line)
// d = p2 - p1 = x2 + r2 - x1 - r1
// C = dot(perp, d)
// Cdot = dot(d, cross(w1, perp)) + dot(perp, v2 + cross(w2, r2) - v1 - cross(w1, r1))
// = -dot(perp, v1) - dot(cross(d + r1, perp), w1) + dot(perp, v2) + dot(cross(r2, perp), v2)
// J = [-perp, -cross(d + r1, perp), perp, cross(r2,perp)]
//
// Angular constraint
// C = a2 - a1 + a_initial
// Cdot = w2 - w1
// J = [0 0 -1 0 0 1]
//
// K = J * invM * JT
//
// J = [-a -s1 a s2]
// [0 -1 0 1]
// a = perp
// s1 = cross(d + r1, a) = cross(p2 - x1, a)
// s2 = cross(r2, a) = cross(p2 - x2, a)
// Motor/Limit linear constraint
// C = dot(ax1, d)
// Cdot = = -dot(ax1, v1) - dot(cross(d + r1, ax1), w1) + dot(ax1, v2) + dot(cross(r2, ax1), v2)
// J = [-ax1 -cross(d+r1,ax1) ax1 cross(r2,ax1)]
// Block Solver
// We develop a block solver that includes the joint limit. This makes the limit stiff (inelastic) even
// when the mass has poor distribution (leading to large torques about the joint anchor points).
//
// The Jacobian has 3 rows:
// J = [-uT -s1 uT s2] // linear
// [0 -1 0 1] // angular
// [-vT -a1 vT a2] // limit
//
// u = perp
// v = axis
// s1 = cross(d + r1, u), s2 = cross(r2, u)
// a1 = cross(d + r1, v), a2 = cross(r2, v)
// M * (v2 - v1) = JT * df
// J * v2 = bias
//
// v2 = v1 + invM * JT * df
// J * (v1 + invM * JT * df) = bias
// K * df = bias - J * v1 = -Cdot
// K = J * invM * JT
// Cdot = J * v1 - bias
//
// Now solve for f2.
// df = f2 - f1
// K * (f2 - f1) = -Cdot
// f2 = invK * (-Cdot) + f1
//
// Clamp accumulated limit impulse.
// lower: f2(3) = max(f2(3), 0)
// upper: f2(3) = min(f2(3), 0)
//
// Solve for correct f2(1:2)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:3) * f1
// = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:2) * f1(1:2) + K(1:2,3) * f1(3)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3)) + K(1:2,1:2) * f1(1:2)
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
//
// Now compute impulse to be applied:
// df = f2 - f1
using System;
using Box2D.Common;
namespace Box2D.Dynamics.Joints
{
public class b2PrismaticJoint : b2Joint
{
// Solver shared
protected b2Vec2 m_localAnchorA;
protected b2Vec2 m_localAnchorB;
protected b2Vec2 m_localXAxisA;
protected b2Vec2 m_localYAxisA;
protected float m_referenceAngle;
protected b2Vec3 m_impulse;
protected float m_motorImpulse;
protected float m_lowerTranslation;
protected float m_upperTranslation;
protected float m_maxMotorForce;
protected float m_motorSpeed;
protected bool m_enableLimit;
protected bool m_enableMotor;
protected b2LimitState m_limitState;
// Solver temp
protected int m_indexA;
protected int m_indexB;
protected b2Vec2 m_localCenterA;
protected b2Vec2 m_localCenterB;
protected float InvertedMassA;
protected float InvertedMassB;
protected float InvertedIA;
protected float InvertedIB;
protected b2Vec2 m_axis, m_perp;
protected float m_s1, m_s2;
protected float m_a1, m_a2;
protected b2Mat33 m_K;
protected float m_motorMass;
public b2PrismaticJoint(b2PrismaticJointDef def)
: base(def)
{
m_localAnchorA = def.localAnchorA;
m_localAnchorB = def.localAnchorB;
m_localXAxisA = def.localAxisA;
m_localXAxisA.Normalize();
m_localYAxisA = m_localXAxisA.NegUnitCross(); // b2Math.b2Cross(1.0f, m_localXAxisA);
m_referenceAngle = def.referenceAngle;
m_impulse.SetZero();
m_motorMass = 0.0f;
m_motorImpulse = 0.0f;
m_lowerTranslation = def.lowerTranslation;
m_upperTranslation = def.upperTranslation;
m_maxMotorForce = def.maxMotorForce;
m_motorSpeed = def.motorSpeed;
m_enableLimit = def.enableLimit;
m_enableMotor = def.enableMotor;
m_limitState = b2LimitState.e_inactiveLimit;
m_axis.SetZero();
m_perp.SetZero();
}
/// The local anchor point relative to bodyA's origin.
public virtual b2Vec2 GetLocalAnchorA() { return m_localAnchorA; }
/// The local anchor point relative to bodyB's origin.
public virtual b2Vec2 GetLocalAnchorB() { return m_localAnchorB; }
/// The local joint axis relative to bodyA.
public virtual b2Vec2 GetLocalAxisA() { return m_localXAxisA; }
/// Get the reference angle.
public virtual float GetReferenceAngle() { return m_referenceAngle; }
public virtual float GetMaxMotorForce() { return m_maxMotorForce; }
public virtual float GetMotorSpeed()
{
return m_motorSpeed;
}
public override void InitVelocityConstraints(b2SolverData data)
{
m_indexA = m_bodyA.IslandIndex;
m_indexB = m_bodyB.IslandIndex;
m_localCenterA = m_bodyA.Sweep.localCenter;
m_localCenterB = m_bodyB.Sweep.localCenter;
InvertedMassA = m_bodyA.InvertedMass;
InvertedMassB = m_bodyB.InvertedMass;
InvertedIA = m_bodyA.InvertedI;
InvertedIB = m_bodyB.InvertedI;
b2Vec2 cA = m_bodyA.InternalPosition.c;
float aA = m_bodyA.InternalPosition.a;
b2Vec2 vA = m_bodyA.InternalVelocity.v;
float wA = m_bodyA.InternalVelocity.w;
b2Vec2 cB = m_bodyB.InternalPosition.c;
float aB = m_bodyB.InternalPosition.a;
b2Vec2 vB = m_bodyB.InternalVelocity.v;
float wB = m_bodyB.InternalVelocity.w;
b2Rot qA = new b2Rot(aA);
b2Rot qB = new b2Rot(aB);
// Compute the effective masses.
b2Vec2 rA = b2Math.b2Mul(qA, m_localAnchorA - m_localCenterA);
b2Vec2 rB = b2Math.b2Mul(qB, m_localAnchorB - m_localCenterB);
b2Vec2 d = (cB - cA) + rB - rA;
float mA = InvertedMassA, mB = InvertedMassB;
float iA = InvertedIA, iB = InvertedIB;
// Compute motor Jacobian and effective mass.
{
m_axis = b2Math.b2Mul(qA, m_localXAxisA);
m_a1 = b2Math.b2Cross(d + rA, m_axis);
m_a2 = b2Math.b2Cross(rB, m_axis);
m_motorMass = mA + mB + iA * m_a1 * m_a1 + iB * m_a2 * m_a2;
if (m_motorMass > 0.0f)
{
m_motorMass = 1.0f / m_motorMass;
}
}
// Prismatic constraint.
{
m_perp = b2Math.b2Mul(qA, m_localYAxisA);
m_s1 = b2Math.b2Cross(d + rA, m_perp);
m_s2 = b2Math.b2Cross(rB, m_perp);
float k11 = mA + mB + iA * m_s1 * m_s1 + iB * m_s2 * m_s2;
float k12 = iA * m_s1 + iB * m_s2;
float k13 = iA * m_s1 * m_a1 + iB * m_s2 * m_a2;
float k22 = iA + iB;
if (k22 == 0.0f)
{
// For bodies with fixed rotation.
k22 = 1.0f;
}
float k23 = iA * m_a1 + iB * m_a2;
float k33 = mA + mB + iA * m_a1 * m_a1 + iB * m_a2 * m_a2;
m_K.ex.Set(k11, k12, k13);
m_K.ey.Set(k12, k22, k23);
m_K.ez.Set(k13, k23, k33);
}
// Compute motor and limit terms.
if (m_enableLimit)
{
float jointTranslation = b2Math.b2Dot(m_axis, d);
if (b2Math.b2Abs(m_upperTranslation - m_lowerTranslation) < 2.0f * b2Settings.b2_linearSlop)
{
m_limitState = b2LimitState.e_equalLimits;
}
else if (jointTranslation <= m_lowerTranslation)
{
if (m_limitState != b2LimitState.e_atLowerLimit)
{
m_limitState = b2LimitState.e_atLowerLimit;
m_impulse.z = 0.0f;
}
}
else if (jointTranslation >= m_upperTranslation)
{
if (m_limitState != b2LimitState.e_atUpperLimit)
{
m_limitState = b2LimitState.e_atUpperLimit;
m_impulse.z = 0.0f;
}
}
else
{
m_limitState = b2LimitState.e_inactiveLimit;
m_impulse.z = 0.0f;
}
}
else
{
m_limitState = b2LimitState.e_inactiveLimit;
m_impulse.z = 0.0f;
}
if (m_enableMotor == false)
{
m_motorImpulse = 0.0f;
}
if (data.step.warmStarting)
{
// Account for variable time step.
m_impulse *= data.step.dtRatio;
m_motorImpulse *= data.step.dtRatio;
b2Vec2 P = m_impulse.x * m_perp + (m_motorImpulse + m_impulse.z) * m_axis;
float LA = m_impulse.x * m_s1 + m_impulse.y + (m_motorImpulse + m_impulse.z) * m_a1;
float LB = m_impulse.x * m_s2 + m_impulse.y + (m_motorImpulse + m_impulse.z) * m_a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
else
{
m_impulse.SetZero();
m_motorImpulse = 0.0f;
}
m_bodyA.InternalVelocity.v = vA;
m_bodyA.InternalVelocity.w = wA;
m_bodyB.InternalVelocity.v = vB;
m_bodyB.InternalVelocity.w = wB;
}
public override void SolveVelocityConstraints(b2SolverData data)
{
b2Vec2 vA = m_bodyA.InternalVelocity.v;
float wA = m_bodyA.InternalVelocity.w;
b2Vec2 vB = m_bodyB.InternalVelocity.v;
float wB = m_bodyB.InternalVelocity.w;
float mA = InvertedMassA, mB = InvertedMassB;
float iA = InvertedIA, iB = InvertedIB;
// Solve linear motor constraint.
if (m_enableMotor && m_limitState != b2LimitState.e_equalLimits)
{
float Cdot = b2Math.b2Dot(m_axis, vB - vA) + m_a2 * wB - m_a1 * wA;
float impulse = m_motorMass * (m_motorSpeed - Cdot);
float oldImpulse = m_motorImpulse;
float maxImpulse = data.step.dt * m_maxMotorForce;
m_motorImpulse = b2Math.b2Clamp(m_motorImpulse + impulse, -maxImpulse, maxImpulse);
impulse = m_motorImpulse - oldImpulse;
b2Vec2 P = impulse * m_axis;
float LA = impulse * m_a1;
float LB = impulse * m_a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
b2Vec2 Cdot1 = new b2Vec2();
Cdot1.x = b2Math.b2Dot(m_perp, vB - vA) + m_s2 * wB - m_s1 * wA;
Cdot1.y = wB - wA;
if (m_enableLimit && m_limitState != b2LimitState.e_inactiveLimit)
{
// Solve prismatic and limit constraint in block form.
float Cdot2;
Cdot2 = b2Math.b2Dot(m_axis, vB - vA) + m_a2 * wB - m_a1 * wA;
b2Vec3 Cdot = new b2Vec3(Cdot1.x, Cdot1.y, Cdot2);
b2Vec3 f1 = m_impulse;
b2Vec3 df = m_K.Solve33(-Cdot);
m_impulse += df;
if (m_limitState == b2LimitState.e_atLowerLimit)
{
m_impulse.z = Math.Max(m_impulse.z, 0.0f);
}
else if (m_limitState == b2LimitState.e_atUpperLimit)
{
m_impulse.z = Math.Min(m_impulse.z, 0.0f);
}
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
b2Vec2 b = -Cdot1 - (m_impulse.z - f1.z) * (new b2Vec2(m_K.ez.x, m_K.ez.y));
b2Vec2 f2r = m_K.Solve22(b) + (new b2Vec2(f1.x, f1.y));
m_impulse.x = f2r.x;
m_impulse.y = f2r.y;
df = m_impulse - f1;
b2Vec2 P = df.x * m_perp + df.z * m_axis;
float LA = df.x * m_s1 + df.y + df.z * m_a1;
float LB = df.x * m_s2 + df.y + df.z * m_a2;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
}
else
{
// Limit is inactive, just solve the prismatic constraint in block form.
b2Vec2 df = m_K.Solve22(-Cdot1);
m_impulse.x += df.x;
m_impulse.y += df.y;
b2Vec2 P = df.x * m_perp;
float LA = df.x * m_s1 + df.y;
float LB = df.x * m_s2 + df.y;
vA -= mA * P;
wA -= iA * LA;
vB += mB * P;
wB += iB * LB;
b2Vec2 Cdot10 = Cdot1;
Cdot1.x = b2Math.b2Dot(m_perp, vB - vA) + m_s2 * wB - m_s1 * wA;
Cdot1.y = wB - wA;
if (b2Math.b2Abs(Cdot1.x) > 0.01f || b2Math.b2Abs(Cdot1.y) > 0.01f)
{
b2Vec2 test = b2Math.b2Mul22(m_K, df);
Cdot1.x += 0.0f;
}
}
m_bodyA.InternalVelocity.v = vA;
m_bodyA.InternalVelocity.w = wA;
m_bodyB.InternalVelocity.v = vB;
m_bodyB.InternalVelocity.w = wB;
}
public override bool SolvePositionConstraints(b2SolverData data)
{
b2Vec2 cA = m_bodyA.InternalPosition.c;
float aA = m_bodyA.InternalPosition.a;
b2Vec2 cB = m_bodyB.InternalPosition.c;
float aB = m_bodyB.InternalPosition.a;
b2Rot qA = new b2Rot(aA);
b2Rot qB = new b2Rot(aB);
float mA = InvertedMassA, mB = InvertedMassB;
float iA = InvertedIA, iB = InvertedIB;
// Compute fresh Jacobians
b2Vec2 rA = b2Math.b2Mul(qA, m_localAnchorA - m_localCenterA);
b2Vec2 rB = b2Math.b2Mul(qB, m_localAnchorB - m_localCenterB);
b2Vec2 d = cB + rB - cA - rA;
b2Vec2 axis = b2Math.b2Mul(qA, m_localXAxisA);
float a1 = b2Math.b2Cross(d + rA, axis);
float a2 = b2Math.b2Cross(rB, axis);
b2Vec2 perp = b2Math.b2Mul(qA, m_localYAxisA);
float s1 = b2Math.b2Cross(d + rA, perp);
float s2 = b2Math.b2Cross(rB, perp);
b2Vec3 impulse;
b2Vec2 C1 = new b2Vec2();
C1.x = b2Math.b2Dot(perp, d);
C1.y = aB - aA - m_referenceAngle;
float linearError = b2Math.b2Abs(C1.x);
float angularError = b2Math.b2Abs(C1.y);
bool active = false;
float C2 = 0.0f;
if (m_enableLimit)
{
float translation = b2Math.b2Dot(axis, d);
if (b2Math.b2Abs(m_upperTranslation - m_lowerTranslation) < 2.0f * b2Settings.b2_linearSlop)
{
// Prevent large angular corrections
C2 = b2Math.b2Clamp(translation, -b2Settings.b2_maxLinearCorrection, b2Settings.b2_maxLinearCorrection);
linearError = Math.Max(linearError, b2Math.b2Abs(translation));
active = true;
}
else if (translation <= m_lowerTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = b2Math.b2Clamp(translation - m_lowerTranslation + b2Settings.b2_linearSlop, -b2Settings.b2_maxLinearCorrection, 0.0f);
linearError = Math.Max(linearError, m_lowerTranslation - translation);
active = true;
}
else if (translation >= m_upperTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = b2Math.b2Clamp(translation - m_upperTranslation - b2Settings.b2_linearSlop, 0.0f, b2Settings.b2_maxLinearCorrection);
linearError = Math.Max(linearError, translation - m_upperTranslation);
active = true;
}
}
if (active)
{
float k11 = mA + mB + iA * s1 * s1 + iB * s2 * s2;
float k12 = iA * s1 + iB * s2;
float k13 = iA * s1 * a1 + iB * s2 * a2;
float k22 = iA + iB;
if (k22 == 0.0f)
{
// For fixed rotation
k22 = 1.0f;
}
float k23 = iA * a1 + iB * a2;
float k33 = mA + mB + iA * a1 * a1 + iB * a2 * a2;
b2Mat33 K = new b2Mat33(
new b2Vec3(k11, k12, k13),
new b2Vec3(k12, k22, k23),
new b2Vec3(k13, k23, k33));
b2Vec3 C = new b2Vec3(C1.x, C1.y, C2);
impulse = K.Solve33(-C);
}
else
{
float k11 = mA + mB + iA * s1 * s1 + iB * s2 * s2;
float k12 = iA * s1 + iB * s2;
float k22 = iA + iB;
if (k22 == 0.0f)
{
k22 = 1.0f;
}
b2Mat22 K = new b2Mat22();
K.ex.Set(k11, k12);
K.ey.Set(k12, k22);
b2Vec2 impulse1 = K.Solve(-C1);
impulse = new b2Vec3();
impulse.x = impulse1.x;
impulse.y = impulse1.y;
impulse.z = 0.0f;
}
b2Vec2 P = impulse.x * perp + impulse.z * axis;
float LA = impulse.x * s1 + impulse.y + impulse.z * a1;
float LB = impulse.x * s2 + impulse.y + impulse.z * a2;
cA -= mA * P;
aA -= iA * LA;
cB += mB * P;
aB += iB * LB;
m_bodyA.InternalPosition.c = cA;
m_bodyA.InternalPosition.a = aA;
m_bodyB.InternalPosition.c = cB;
m_bodyB.InternalPosition.a = aB;
return linearError <= b2Settings.b2_linearSlop && angularError <= b2Settings.b2_angularSlop;
}
public virtual b2Vec2 GetLocalXAxisA() {
return (m_localXAxisA);
}
public override b2Vec2 GetAnchorA()
{
return m_bodyA.GetWorldPoint(m_localAnchorA);
}
public override b2Vec2 GetAnchorB()
{
return m_bodyB.GetWorldPoint(m_localAnchorB);
}
public virtual b2Vec2 GetReactionForce(float inv_dt)
{
return inv_dt * (m_impulse.x * m_perp + (m_motorImpulse + m_impulse.z) * m_axis);
}
public virtual float GetReactionTorque(float inv_dt)
{
return inv_dt * m_impulse.y;
}
public virtual float GetJointTranslation()
{
b2Vec2 pA = m_bodyA.GetWorldPoint(m_localAnchorA);
b2Vec2 pB = m_bodyB.GetWorldPoint(m_localAnchorB);
b2Vec2 d = pB - pA;
b2Vec2 axis = m_bodyA.GetWorldVector(m_localXAxisA);
float translation = b2Math.b2Dot(d, axis);
return translation;
}
public virtual float GetJointSpeed()
{
b2Body bA = m_bodyA;
b2Body bB = m_bodyB;
b2Vec2 rA = b2Math.b2Mul(bA.XF.q, m_localAnchorA - bA.Sweep.localCenter);
b2Vec2 rB = b2Math.b2Mul(bB.XF.q, m_localAnchorB - bB.Sweep.localCenter);
b2Vec2 p1 = bA.Sweep.c + rA;
b2Vec2 p2 = bB.Sweep.c + rB;
b2Vec2 d = p2 - p1;
b2Vec2 axis = b2Math.b2Mul(bA.XF.q, m_localXAxisA);
b2Vec2 vA = bA.LinearVelocity;
b2Vec2 vB = bB.LinearVelocity;
float wA = bA.AngularVelocity;
float wB = bB.AngularVelocity;
float speed = b2Math.b2Dot(d, b2Math.b2Cross(wA, ref axis)) + b2Math.b2Dot(axis, vB + b2Math.b2Cross(wB, ref rB) - vA - b2Math.b2Cross(wA, ref rA));
return speed;
}
public virtual bool IsLimitEnabled()
{
return m_enableLimit;
}
public virtual void EnableLimit(bool flag)
{
if (flag != m_enableLimit)
{
m_bodyA.SetAwake(true);
m_bodyB.SetAwake(true);
m_enableLimit = flag;
m_impulse.z = 0.0f;
}
}
public virtual float GetLowerLimit()
{
return m_lowerTranslation;
}
public virtual float GetUpperLimit()
{
return m_upperTranslation;
}
public virtual void SetLimits(float lower, float upper)
{
System.Diagnostics.Debug.Assert(lower <= upper);
if (lower != m_lowerTranslation || upper != m_upperTranslation)
{
m_bodyA.SetAwake(true);
m_bodyB.SetAwake(true);
m_lowerTranslation = lower;
m_upperTranslation = upper;
m_impulse.z = 0.0f;
}
}
public virtual bool IsMotorEnabled()
{
return m_enableMotor;
}
public virtual void EnableMotor(bool flag)
{
m_bodyA.SetAwake(true);
m_bodyB.SetAwake(true);
m_enableMotor = flag;
}
public virtual void SetMotorSpeed(float speed)
{
m_bodyA.SetAwake(true);
m_bodyB.SetAwake(true);
m_motorSpeed = speed;
}
public virtual void SetMaxMotorForce(float force)
{
m_bodyA.SetAwake(true);
m_bodyB.SetAwake(true);
m_maxMotorForce = force;
}
public virtual float GetMotorForce(float inv_dt)
{
return inv_dt * m_motorImpulse;
}
public override void Dump()
{
int indexA = m_bodyA.IslandIndex;
int indexB = m_bodyB.IslandIndex;
b2Settings.b2Log(" b2PrismaticJointDef jd;\n");
b2Settings.b2Log(" jd.bodyA = bodies[{0}];\n", indexA);
b2Settings.b2Log(" jd.bodyB = bodies[{0}];\n", indexB);
b2Settings.b2Log(" jd.collideConnected = bool(%d);\n", m_collideConnected);
b2Settings.b2Log(" jd.localAnchorA.Set({0:F5}, {1:F5});\n", m_localAnchorA.x, m_localAnchorA.y);
b2Settings.b2Log(" jd.localAnchorB.Set({0:F5}, {1:F5});\n", m_localAnchorB.x, m_localAnchorB.y);
b2Settings.b2Log(" jd.localAxisA.Set({0:F5}, {1:F5});\n", m_localXAxisA.x, m_localXAxisA.y);
b2Settings.b2Log(" jd.referenceAngle = {0:F5};\n", m_referenceAngle);
b2Settings.b2Log(" jd.enableLimit = bool({0});\n", m_enableLimit);
b2Settings.b2Log(" jd.lowerTranslation = {0:F5};\n", m_lowerTranslation);
b2Settings.b2Log(" jd.upperTranslation = {0:F5};\n", m_upperTranslation);
b2Settings.b2Log(" jd.enableMotor = bool({0});\n", m_enableMotor);
b2Settings.b2Log(" jd.motorSpeed = {0:F5};\n", m_motorSpeed);
b2Settings.b2Log(" jd.maxMotorForce = {0:F5};\n", m_maxMotorForce);
b2Settings.b2Log(" joints[{0}] = m_world.CreateJoint(&jd);\n", m_index);
}
}
}
| |
#region License
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
using System;
using System.Collections;
using System.Data;
using System.Globalization;
using System.Threading;
using Spring.Core;
using Spring.Threading;
using Spring.Util;
namespace Spring.Transaction.Support
{
/// <summary>
/// Internal class that manages resources and transaction synchronizations per thread.
/// </summary>
/// <remarks>
/// Supports one resource per key without overwriting, i.e. a resource needs to
/// be removed before a new one can be set for the same key.
/// Supports a list of transaction synchronizations if synchronization is active.
/// <p>
/// Resource management code should check for thread-bound resources via GetResource().
/// It is normally not supposed
/// to bind resources to threads, as this is the responsiblity of transaction managers.
/// A further option is to lazily bind on first use if transaction synchronization
/// is active, for performing transactions that span an arbitrary number of resources.
/// </p>
/// <p>
/// Transaction synchronization must be activated and deactivated by a transaction
/// manager via
/// <see cref="Spring.Transaction.Support.TransactionSynchronizationManager.InitSynchronization">InitSynchronization</see>
/// and
/// <see cref="Spring.Transaction.Support.TransactionSynchronizationManager.ClearSynchronization">ClearSynchronization</see>.
/// This is automatically supported by
/// <see cref="Spring.Transaction.Support.AbstractPlatformTransactionManager"/>.
/// </p>
/// <p>
/// Resource management code should only register synchronizations when this
/// manager is active, and perform resource cleanup immediately else.
/// If transaction synchronization isn't active, there is either no current
/// transaction, or the transaction manager doesn't support synchronizations.
/// </p>
/// Note that this class uses following naming convention for the
/// named 'data slots' for storage of thread local data, 'Spring.Transaction:Name'
/// where Name is either
/// </remarks>
/// <author>Juergen Hoeller</author>
/// <author>Griffin Caprio (.NET)</author>
/// <author>Mark Pollack (.NET)</author>
public sealed class TransactionSynchronizationManager
{
#region Logging
private static readonly Common.Logging.ILog LOG = Common.Logging.LogManager.GetLogger(typeof (TransactionSynchronizationManager));
#endregion
#region Fields
private static readonly string syncsDataSlotName = "Spring.Transactions:syncList";
private static readonly string resourcesDataSlotName = "Spring.Transactions:resources";
private static readonly string currentTxReadOnlyDataSlotName = "Spring.Transactions:currentTxReadOnly";
private static readonly string currentTxNameDataSlotName = "Spring.Transactions:currentTxName";
private static readonly string currentTxIsolationLevelDataSlotName = "Spring.Transactions:currentTxIsolationLevel";
private static readonly string actualTxActiveDataSlotName = "Spring.Transactions:actualTxActive";
private static IComparer syncComparer = new OrderComparator();
#endregion
#region Management of transaction-associated resource handles
/// <summary>
/// Return all resources that are bound to the current thread.
/// </summary>
/// <remarks>Main for debugging purposes. Resource manager should always
/// invoke HasResource for a specific resource key that they are interested in.
/// </remarks>
/// <returns>IDictionary with resource keys and resource objects or empty
/// dictionary if none is bound.</returns>
public static IDictionary ResourceDictionary
{
get
{
IDictionary resources = LogicalThreadContext.GetData(resourcesDataSlotName) as IDictionary;
if (resources != null)
{
//TODO add readonly wrapper in Spring.Collections.
return resources;
}
else
{
return new Hashtable();
}
}
}
/// <summary>
/// Check if there is a resource for the given key bound to the current thread.
/// </summary>
/// <param name="key">key to check</param>
/// <returns>if there is a value bound to the current thread</returns>
public static bool HasResource(Object key)
{
AssertUtils.ArgumentNotNull(key, "Key must not be null");
return ResourceDictionary.Contains(key);
}
/// <summary>
/// Retrieve a resource for the given key that is bound to the current thread.
/// </summary>
/// <param name="key">key to check</param>
/// <returns>a value bound to the current thread, or null if none.</returns>
public static object GetResource(Object key)
{
AssertUtils.ArgumentNotNull(key, "Key must not be null");
IDictionary resources = LogicalThreadContext.GetData(resourcesDataSlotName) as IDictionary;
if (resources == null)
{
return null;
}
//Check for contains since indexer returning null behavior changes in 2.0
if (!resources.Contains(key))
{
return null;
}
object val = resources[key];
if (val != null && LOG.IsDebugEnabled)
{
LOG.Debug("Retrieved value [" + Describe(val) + "] for key [" + Describe(key) + "] bound to thread [" +
SystemUtils.ThreadId + "]");
}
return val;
}
/// <summary>
/// Bind the given resource for teh given key to the current thread
/// </summary>
/// <param name="key">key to bind the value to</param>
/// <param name="value">value to bind</param>
public static void BindResource(Object key, Object value)
{
AssertUtils.ArgumentNotNull(key, "Key value for thread local storage of transactional resources must not be null");
AssertUtils.ArgumentNotNull(value, "Transactional resource to bind to thread local storage must not be null" );
IDictionary resources = LogicalThreadContext.GetData(resourcesDataSlotName) as IDictionary;
//Set thread local resource storage if not found
if (resources == null)
{
resources = new Hashtable();
LogicalThreadContext.SetData(resourcesDataSlotName, resources);
}
if (resources.Contains(key))
{
throw new InvalidOperationException("Already value [" + resources[key] + "] for key [" + key +
"] bound to thread [" + SystemUtils.ThreadId + "]");
}
resources.Add(key, value);
if (LOG.IsDebugEnabled)
{
LOG.Debug("Bound value [" + Describe(value) + "] for key [" + Describe(key) + "] to thread [" +
SystemUtils.ThreadId + "]");
}
}
/// <summary>
/// Unbind a resource for the given key from the current thread
/// </summary>
/// <param name="key">key to check</param>
/// <returns>the previously bound value</returns>
/// <exception cref="InvalidOperationException">if there is no value bound to the thread</exception>
public static object UnbindResource(Object key)
{
AssertUtils.ArgumentNotNull(key, "Key must not be null");
IDictionary resources = LogicalThreadContext.GetData(resourcesDataSlotName) as IDictionary;
if (resources == null || !resources.Contains(key))
{
throw new InvalidOperationException("No value for key [" + key + "] bound to thread [" +
SystemUtils.ThreadId + "]");
}
Object val = resources[key];
resources.Remove(key);
if (resources.Count == 0)
{
LogicalThreadContext.FreeNamedDataSlot(resourcesDataSlotName);
}
if (LOG.IsDebugEnabled)
{
LOG.Debug("Removed value [" + Describe(val) + "] for key [" + Describe(key) + "] from thread [" +
SystemUtils.ThreadId + "]");
}
return val;
}
#endregion
/// <summary>
/// Activate transaction synchronization for the current thread.
/// </summary>
/// <remarks>
/// Called by transaction manager at the beginning of a transaction.
/// </remarks>
/// <exception cref="System.InvalidOperationException">
/// If synchronization is already active.
/// </exception>
public static void InitSynchronization()
{
if ( SynchronizationActive )
{
throw new InvalidOperationException( "Cannot activate transaction synchronization - already active" );
}
if (LOG.IsDebugEnabled)
{
LOG.Debug("Initializing transaction synchronization");
}
ArrayList syncs = new ArrayList();
LogicalThreadContext.SetData(syncsDataSlotName, syncs);
}
/// <summary>
/// Deactivate transaction synchronization for the current thread.
/// </summary>
/// <remarks>
/// Called by transaction manager on transaction cleanup.
/// </remarks>
/// <exception cref="System.InvalidOperationException">
/// If synchronization is not active.
/// </exception>
public static void ClearSynchronization()
{
if ( !SynchronizationActive )
{
throw new InvalidOperationException( "Cannot deactivate transaction synchronization - not active" );
}
if (LOG.IsDebugEnabled)
{
LOG.Debug("Clearing transaction synchronization");
}
LogicalThreadContext.FreeNamedDataSlot(syncsDataSlotName);
}
/// <summary>
/// Clears the entire transaction synchronization state for the current thread, registered
/// synchronizations as well as the various transaction characteristics.
/// </summary>
public static void Clear()
{
ClearSynchronization();
CurrentTransactionName = null;
CurrentTransactionReadOnly = false;
CurrentTransactionIsolationLevel = IsolationLevel.Unspecified;
ActualTransactionActive = false;
}
/// <summary>
/// Register a new transaction synchronization for the current thread.
/// </summary>
/// <remarks>
/// Typically called by resource management code.
/// </remarks>
/// <exception cref="System.InvalidOperationException">
/// If synchronization is not active.
/// </exception>
public static void RegisterSynchronization( ITransactionSynchronization synchronization )
{
AssertUtils.ArgumentNotNull(synchronization, "TransactionSynchronization must not be null");
if ( !SynchronizationActive )
{
throw new InvalidOperationException( "Transaction synchronization is not active" );
}
ArrayList syncs = LogicalThreadContext.GetData(syncsDataSlotName) as ArrayList;
if (syncs != null)
{
object root = syncs.SyncRoot;
lock (root)
{
syncs.Add(synchronization);
}
}
}
private static string Describe(object obj)
{
return obj == null ? "" : obj + "@" + obj.GetHashCode().ToString("X");
}
#region Properties
/// <summary>
/// Return an unmodifiable list of all registered synchronizations
/// for the current thread.
/// </summary>
/// <returns>
/// A list of <see cref="Spring.Transaction.Support.ITransactionSynchronization"/>
/// instances.
/// </returns>
/// <exception cref="System.InvalidOperationException">
/// If synchronization is not active.
/// </exception>
public static IList Synchronizations
{
get
{
if ( ! SynchronizationActive )
{
throw new InvalidOperationException( "Transaction synchronization is not active" );
}
ArrayList syncs = LogicalThreadContext.GetData(syncsDataSlotName) as ArrayList;
if (syncs != null)
{
// Sort lazily here, not in registerSynchronization.
object root = syncs.SyncRoot;
lock (root)
{
// #SPRNET-1160, tx Ben Rowlands
CollectionUtils.StableSortInPlace(syncs, syncComparer);
}
// Return unmodifiable snapshot, to avoid exceptions
// while iterating and invoking synchronization callbacks that in turn
// might register further synchronizations.
return ArrayList.ReadOnly(syncs);
}
else
{
return ArrayList.ReadOnly(new ArrayList());
}
}
}
/// <summary>
/// Return if transaction synchronization is active for the current thread.
/// </summary>
/// <remarks>
/// Can be called before
/// <see cref="Spring.Transaction.Support.TransactionSynchronizationManager.RegisterSynchronization">InitSynchronization</see>
/// to avoid unnecessary instance creation.
/// </remarks>
public static bool SynchronizationActive
{
get
{
IList syncs = LogicalThreadContext.GetData(syncsDataSlotName) as IList;
return syncs != null;
}
}
/// <summary>
/// Gets or sets a value indicating whether the
/// current transaction is read only.
/// </summary>
/// <remarks>
/// Called by transaction manager on transaction begin and on cleanup.
/// Return whether the current transaction is marked as read-only.
/// To be called by resource management code when preparing a newly
/// created resource (for example, a Hibernate Session).
/// <p>Note that transaction synchronizations receive the read-only flag
/// as argument for the <code>beforeCommit</code> callback, to be able
/// to suppress change detection on commit. The present method is meant
/// to be used for earlier read-only checks, for example to set the
/// flush mode of a Hibernate Session to FlushMode.Never upfront.
/// </p>
/// </remarks>
/// <value>
/// <c>true</c> if current transaction read only; otherwise, <c>false</c>.
/// </value>
public static bool CurrentTransactionReadOnly
{
get
{
return LogicalThreadContext.GetData(currentTxReadOnlyDataSlotName) != null;
}
set
{
if (value)
{
LogicalThreadContext.SetData(currentTxReadOnlyDataSlotName, true);
}
else
{
LogicalThreadContext.FreeNamedDataSlot(currentTxReadOnlyDataSlotName);
}
}
}
/// <summary>
/// Gets or sets the name of the current transaction, if any.
/// </summary>
/// <remarks>Called by the transaction manager on transaction begin and on cleanup.
/// To be called by resource management code for optimizations per use case, for
/// example to optimize fetch strategies for specific named transactions.</remarks>
/// <value>The name of the current transactio or null if none set.</value>
public static string CurrentTransactionName
{
get
{
return LogicalThreadContext.GetData(currentTxNameDataSlotName) as string;
}
set
{
LogicalThreadContext.SetData(currentTxNameDataSlotName, value);
}
}
/// <summary>
/// Gets or sets a value indicating whether there currently is an actual transaction
/// active.
/// </summary>
/// <remarks>This indicates wheter the current thread is associated with an actual
/// transaction rather than just with active transaction synchronization.
/// <para>Called by the transaction manager on transaction begin and on cleanup.</para>
/// <para>To be called by resource management code that wants to discriminate between
/// active transaction synchronization (with or without backing resource transaction;
/// also on PROPAGATION_SUPPORTS) and an actual transaction being active; on
/// PROPAGATION_REQUIRES, PROPAGATION_REQUIRES_NEW, etC)</para></remarks>
/// <value>
/// <c>true</c> if [actual transaction active]; otherwise, <c>false</c>.
/// </value>
public static bool ActualTransactionActive
{
get
{
return LogicalThreadContext.GetData(actualTxActiveDataSlotName) != null;
}
set
{
if (value)
{
LogicalThreadContext.SetData(actualTxActiveDataSlotName, value);
}
else
{
LogicalThreadContext.FreeNamedDataSlot(actualTxActiveDataSlotName);
}
}
}
/// <summary>
/// Gets or sets the current transaction isolation level, if any.
/// </summary>
/// <remarks>Called by the transaction manager on transaction begin and on cleanup.</remarks>
/// <value>The current transaction isolation level. If no current transaction is
/// active, retrun IsolationLevel.Unspecified</value>
public static IsolationLevel CurrentTransactionIsolationLevel
{
get
{
object data =
LogicalThreadContext.GetData(currentTxIsolationLevelDataSlotName);
if (data != null)
{
return (IsolationLevel) data;
}
else
{
return IsolationLevel.Unspecified;
}
}
set
{
LogicalThreadContext.SetData(currentTxIsolationLevelDataSlotName, value);
}
}
#endregion
}
}
| |
using System;
using GitTools.Testing;
using GitVersion;
using GitVersion.Extensions;
using GitVersion.Logging;
using GitVersionCore.Tests.Helpers;
using GitVersionCore.Tests.IntegrationTests;
using Microsoft.Extensions.DependencyInjection;
using NUnit.Framework;
using Shouldly;
namespace GitVersionCore.Tests
{
[TestFixture]
public class RepositoryMetadataProviderTests : TestBase
{
private readonly ILog log;
public RepositoryMetadataProviderTests()
{
var sp = ConfigureServices();
log = sp.GetService<ILog>();
}
[Test]
public void FindsCorrectMergeBaseForForwardMerge()
{
//*9dfb8b4 49 minutes ago(develop)
//*54f21b2 53 minutes ago
// |\
// | | *a219831 51 minutes ago(HEAD -> release-2.0.0)
// | |/
// | *4441531 54 minutes ago
// | *89840df 56 minutes ago
// |/
//*91bf945 58 minutes ago(master)
using var fixture = new EmptyRepositoryFixture();
fixture.MakeACommit("initial");
fixture.BranchTo("develop");
var expectedReleaseMergeBase = fixture.Repository.Head.Tip;
// Create release from develop
fixture.BranchTo("release-2.0.0");
// Make some commits on release
fixture.MakeACommit("release 1");
fixture.MakeACommit("release 2");
var expectedDevelopMergeBase = fixture.Repository.Head.Tip;
// First forward merge release to develop
fixture.Checkout("develop");
fixture.MergeNoFF("release-2.0.0");
// Make some new commit on release
fixture.Checkout("release-2.0.0");
fixture.MakeACommit("release 3 - after first merge");
// Make new commit on develop
fixture.Checkout("develop");
// Checkout to release (no new commits)
fixture.Checkout("release-2.0.0");
var develop = fixture.Repository.FindBranch("develop");
var release = fixture.Repository.FindBranch("release-2.0.0");
var gitRepoMetadataProvider = new RepositoryMetadataProvider(log, fixture.Repository);
var releaseBranchMergeBase = gitRepoMetadataProvider.FindMergeBase(release, develop);
var developMergeBase = gitRepoMetadataProvider.FindMergeBase(develop, release);
fixture.Repository.DumpGraph(Console.WriteLine);
releaseBranchMergeBase.ShouldBe(expectedReleaseMergeBase);
developMergeBase.ShouldBe(expectedDevelopMergeBase);
}
[Test]
public void FindsCorrectMergeBaseForForwardMergeMovesOn()
{
//*9dfb8b4 49 minutes ago(develop)
//*54f21b2 53 minutes ago
// |\
// | | *a219831 51 minutes ago(HEAD -> release-2.0.0)
// | |/
// | *4441531 54 minutes ago
// | *89840df 56 minutes ago
// |/
//*91bf945 58 minutes ago(master)
using var fixture = new EmptyRepositoryFixture();
fixture.MakeACommit("initial");
fixture.BranchTo("develop");
var expectedReleaseMergeBase = fixture.Repository.Head.Tip;
// Create release from develop
fixture.BranchTo("release-2.0.0");
// Make some commits on release
fixture.MakeACommit("release 1");
fixture.MakeACommit("release 2");
var expectedDevelopMergeBase = fixture.Repository.Head.Tip;
// First forward merge release to develop
fixture.Checkout("develop");
fixture.MergeNoFF("release-2.0.0");
// Make some new commit on release
fixture.Checkout("release-2.0.0");
fixture.MakeACommit("release 3 - after first merge");
// Make new commit on develop
fixture.Checkout("develop");
// Checkout to release (no new commits)
fixture.MakeACommit("develop after merge");
// Checkout to release (no new commits)
fixture.Checkout("release-2.0.0");
var develop = fixture.Repository.FindBranch("develop");
var release = fixture.Repository.FindBranch("release-2.0.0");
var gitRepoMetadataProvider = new RepositoryMetadataProvider(log, fixture.Repository);
var releaseBranchMergeBase = gitRepoMetadataProvider.FindMergeBase(release, develop);
var developMergeBase = gitRepoMetadataProvider.FindMergeBase(develop, release);
fixture.Repository.DumpGraph(Console.WriteLine);
releaseBranchMergeBase.ShouldBe(expectedReleaseMergeBase);
developMergeBase.ShouldBe(expectedDevelopMergeBase);
}
[Test]
public void FindsCorrectMergeBaseForMultipleForwardMerges()
{
//*403b294 44 minutes ago(develop)
//|\
//| *306b243 45 minutes ago(HEAD -> release-2.0.0)
//| *4cf5969 47 minutes ago
//| *4814083 51 minutes ago
//* | cddd3cc 49 minutes ago
//* | 2b2b52a 53 minutes ago
//|\ \
//| |/
//| *8113776 54 minutes ago
//| *3c0235e 56 minutes ago
//|/
//*f6f1283 58 minutes ago(master)
using var fixture = new EmptyRepositoryFixture();
fixture.MakeACommit("initial");
fixture.BranchTo("develop");
var expectedReleaseMergeBase = fixture.Repository.Head.Tip;
// Create release from develop
fixture.BranchTo("release-2.0.0");
// Make some commits on release
fixture.MakeACommit("release 1");
fixture.MakeACommit("release 2");
// First forward merge release to develop
fixture.Checkout("develop");
fixture.MergeNoFF("release-2.0.0");
// Make some new commit on release
fixture.Checkout("release-2.0.0");
fixture.Repository.MakeACommit("release 3 - after first merge");
// Make new commit on develop
fixture.Checkout("develop");
// Checkout to release (no new commits)
fixture.Checkout("release-2.0.0");
fixture.Checkout("develop");
fixture.Repository.MakeACommit("develop after merge");
// Checkout to release (no new commits)
fixture.Checkout("release-2.0.0");
// Make some new commit on release
fixture.Repository.MakeACommit("release 4");
fixture.Repository.MakeACommit("release 5");
var expectedDevelopMergeBase = fixture.Repository.Head.Tip;
// Second merge release to develop
fixture.Checkout("develop");
fixture.MergeNoFF("release-2.0.0");
// Checkout to release (no new commits)
fixture.Checkout("release-2.0.0");
var develop = fixture.Repository.FindBranch("develop");
var release = fixture.Repository.FindBranch("release-2.0.0");
var gitRepoMetadataProvider = new RepositoryMetadataProvider(log, fixture.Repository);
var releaseBranchMergeBase = gitRepoMetadataProvider.FindMergeBase(release, develop);
var developMergeBase = gitRepoMetadataProvider.FindMergeBase(develop, release);
fixture.Repository.DumpGraph(Console.WriteLine);
releaseBranchMergeBase.ShouldBe(expectedReleaseMergeBase);
developMergeBase.ShouldBe(expectedDevelopMergeBase);
}
}
}
| |
using OpenKh.Common.Exceptions;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
namespace OpenKh.Kh2.Messages
{
public partial class MsgSerializer
{
private class SerializerModel
{
public string Name { get; set; }
public MessageCommand Command { get; set; }
public Func<MessageCommandModel, string> Serializer { get; set; }
public Func<string, byte[]> Deserializer { get; set; }
}
private static List<SerializerModel> _serializeModel = new List<SerializerModel>
{
new SerializerModel
{
Name = "end",
Command = MessageCommand.End
},
new SerializerModel
{
Name = "text",
Command = MessageCommand.PrintText,
Serializer = x => x.Text,
Deserializer = null
},
new SerializerModel
{
Name = "complex",
Command = MessageCommand.PrintComplex,
Serializer = x => x.Text,
Deserializer = null
},
new SerializerModel
{
Name = "tabulation",
Command = MessageCommand.Tabulation,
},
new SerializerModel
{
Name = "reset",
Command = MessageCommand.Reset,
},
new SerializerModel
{
Name = "theme",
Command = MessageCommand.Theme,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk05",
Command = MessageCommand.Unknown05,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk06",
Command = MessageCommand.Unknown06,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "color",
Command = MessageCommand.Color,
Serializer = x => $"#{x.Data[0]:X02}{x.Data[1]:X02}{x.Data[2]:X02}{x.Data[3]:X02}",
Deserializer = x => DeserializeColor(x)
},
new SerializerModel
{
Name = "unk08",
Command = MessageCommand.Unknown08,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "icon",
Command = MessageCommand.PrintIcon,
Serializer = x => _icons[x.Data[0]],
Deserializer = x => DeserializeIcon(x)
},
new SerializerModel
{
Name = "scale",
Command = MessageCommand.TextScale,
Serializer = x => x.Data[0].ToString(),
Deserializer = x => DeserializeScale(x)
},
new SerializerModel
{
Name = "width",
Command = MessageCommand.TextWidth,
Serializer = x => x.Data[0].ToString(),
Deserializer = x => DeserializeWidth(x)
},
new SerializerModel
{
Name = "linespacing",
Command = MessageCommand.LineSpacing,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk0d",
Command = MessageCommand.Unknown0d,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => new byte[0]
},
new SerializerModel
{
Name = "unk0e",
Command = MessageCommand.Unknown0e,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk0f",
Command = MessageCommand.Unknown0f,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "clear",
Command = MessageCommand.Clear,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => new byte[0]
},
new SerializerModel
{
Name = "position",
Command = MessageCommand.Position,
Serializer = x => ToPosition(x),
Deserializer = x => FromPosition(x)
},
new SerializerModel
{
Name = "unk12",
Command = MessageCommand.Unknown12,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk13",
Command = MessageCommand.Unknown13,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "delay",
Command = MessageCommand.Delay,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "chardelay",
Command = MessageCommand.CharDelay,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk16",
Command = MessageCommand.Unknown16,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "delayandfade",
Command = MessageCommand.DelayAndFade,
Serializer = x => ToDelayAndFade(x.Data),
Deserializer = x => FromDelayAndFade(x)
},
new SerializerModel
{
Name = "unk18",
Command = MessageCommand.Unknown18,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t2",
Command = MessageCommand.Table2,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t3",
Command = MessageCommand.Table3,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t4",
Command = MessageCommand.Table4,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t5",
Command = MessageCommand.Table5,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t6",
Command = MessageCommand.Table6,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t7",
Command = MessageCommand.Table7,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "t8",
Command = MessageCommand.Table8,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
new SerializerModel
{
Name = "unk",
Command = MessageCommand.Unsupported,
Serializer = x => ToStringRawData(x.Data),
Deserializer = x => FromStringToByte(x)
},
};
private static Dictionary<MessageCommand, SerializerModel> _serializer =
_serializeModel.ToDictionary(x => x.Command, x => x);
private static Dictionary<string, SerializerModel> _deserializer =
_serializeModel.ToDictionary(x => x.Name, x => x);
private static Dictionary<byte, string> _icons =
new Dictionary<byte, string>()
{
[0] = "item-consumable",
[1] = "item-tent",
[2] = "item-key",
[3] = "ability-unequip",
[4] = "weapon-keyblade",
[5] = "weapon-staff",
[6] = "weapon-shield",
[7] = "armor",
[8] = "magic",
[9] = "material",
[10] = "exclamation-mark",
[11] = "question-mark",
[12] = "auto-equip",
[13] = "ability-equip",
[14] = "weapon-keyblade-equip",
[15] = "weapon-staff-equip",
[16] = "weapon-shield-equip",
[17] = "accessory",
[18] = "magic-nocharge",
[19] = "party",
[20] = "button-select",
[21] = "button-start",
[22] = "button-dpad",
[23] = "tranquil",
[24] = "remembrance",
[25] = "form",
[26] = "ai-mode-frequent",
[27] = "ai-mode-moderate",
[28] = "ai-mode-rare",
[29] = "ai-settings",
[30] = "button-r1",
[31] = "button-r2",
[32] = "button-l1",
[33] = "button-l2",
[34] = "button-triangle",
[35] = "button-cross",
[36] = "button-square",
[37] = "button-circle",
[38] = "gem-dark",
[39] = "gem-blaze",
[40] = "gem-frost",
[41] = "gem-lightning",
[42] = "gem-power",
[43] = "gem-lucid",
[44] = "gem-dense",
[45] = "gem-twilight",
[46] = "gem-mythril",
[47] = "gem-bright",
[48] = "gem-energy",
[49] = "gem-serenity",
[50] = "gem-orichalcum",
[51] = "rank-s",
[52] = "rank-a",
[53] = "rank-b",
[54] = "rank-c",
[55] = "gumi-brush",
[56] = "gumi-blueprint",
[57] = "gumi-ship",
[58] = "gumi-block",
[59] = "gumi-gear",
};
private static Dictionary<string, byte> _iconsDeserialize =
_icons.ToDictionary(x => x.Value, x => x.Key);
private static byte[] DeserializeScale(string parameter) => new byte[] { byte.Parse(parameter) };
private static byte[] DeserializeWidth(string parameter) => new byte[] { byte.Parse(parameter) };
private static byte[] FromStringToByte(string parameter) =>
parameter.Split(' ').Select(x => byte.Parse(x, NumberStyles.HexNumber)).ToArray();
private static byte[] DeserializeColor(string value)
{
if (value[0] == '#')
value = value.Substring(1);
// horrible piece of crap, but works
return new byte[]
{
byte.Parse(value.Substring(0, 2), NumberStyles.HexNumber),
byte.Parse(value.Substring(2, 2), NumberStyles.HexNumber),
byte.Parse(value.Substring(4, 2), NumberStyles.HexNumber),
byte.Parse(value.Substring(6, 2), NumberStyles.HexNumber),
};
}
private static byte[] DeserializeIcon(string value)
{
if (!_iconsDeserialize.TryGetValue(value, out var data))
throw new ParseException(value, 0, "Icon not supported");
return new byte[] { data };
}
private static string ToPosition(MessageCommandModel command) =>
$"{command.PositionX},{command.PositionY}";
private static byte[] FromPosition(string text)
{
var parameters = text.Split(',')
.Select(x => short.TryParse(x.Trim(), out var result) ? result : 0)
.ToArray();
var xCoord = parameters.Length > 0 ? parameters[0] : 0;
var yCoord = parameters.Length > 1 ? parameters[1] : 0;
return new byte[4]
{
(byte)((ushort)xCoord & 0xFF),
(byte)(((ushort)xCoord >> 8) & 0xFF),
(byte)((ushort)yCoord & 0xFF),
(byte)(((ushort)yCoord >> 8) & 0xFF),
};
}
private static string ToDelayAndFade(byte[] data) => ToStringRawData(data);
private static byte[] FromDelayAndFade(string text) => FromStringToByte(text);
private static string ToStringRawData(byte[] data) =>
string.Join(" ", data.Select(x => $"{x:X02}"));
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Snippets;
using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem;
using Microsoft.VisualStudio.TextManager.Interop;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.Snippets
{
internal abstract class AbstractSnippetInfoService : ForegroundThreadAffinitizedObject, ISnippetInfoService, IVsExpansionEvents
{
private readonly Guid _languageGuidForSnippets;
private readonly IVsExpansionManager _expansionManager;
/// <summary>
/// This service is created on the UI thread during package initialization, but it must not
/// block the initialization process. Getting snippet information from the <see cref="IVsExpansionManager"/>
/// must be done on the UI thread, so do this work in a task that will run on the UI thread
/// with lower priority.
/// </summary>
protected readonly Task InitialCachePopulationTask;
protected object cacheGuard = new object();
// Initialize these to empty values. When returning from GetSnippetsIfAvailable and
// SnippetShortcutExists_NonBlocking, we can return without checking the status
// of InitialCachePopulationTask.
protected IList<SnippetInfo> snippets = SpecializedCollections.EmptyList<SnippetInfo>();
protected ISet<string> snippetShortcuts = SpecializedCollections.EmptySet<string>();
public bool InsertSnippetCommandBound { get; private set; }
public AbstractSnippetInfoService(
Shell.SVsServiceProvider serviceProvider,
Guid languageGuidForSnippets,
IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners)
{
_languageGuidForSnippets = languageGuidForSnippets;
if (serviceProvider != null)
{
var textManager = (IVsTextManager2)serviceProvider.GetService(typeof(SVsTextManager));
if (textManager.GetExpansionManager(out _expansionManager) == VSConstants.S_OK)
{
ComEventSink.Advise<IVsExpansionEvents>(_expansionManager, this);
}
}
IAsynchronousOperationListener waiter = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.Snippets);
var token = waiter.BeginAsyncOperation(GetType().Name + ".Start");
InitialCachePopulationTask = Task.Factory.StartNew(() => PopulateSnippetCaches(),
CancellationToken.None,
TaskCreationOptions.None,
ForegroundTaskScheduler).CompletesAsyncOperation(token);
}
public int OnAfterSnippetsUpdate()
{
PopulateSnippetCaches();
return VSConstants.S_OK;
}
public int OnAfterSnippetsKeyBindingChange([ComAliasName("Microsoft.VisualStudio.OLE.Interop.DWORD")]uint dwCmdGuid, [ComAliasName("Microsoft.VisualStudio.OLE.Interop.DWORD")]uint dwCmdId, [ComAliasName("Microsoft.VisualStudio.OLE.Interop.BOOL")]int fBound)
{
InsertSnippetCommandBound = fBound != 0;
return VSConstants.S_OK;
}
public IEnumerable<SnippetInfo> GetSnippetsIfAvailable()
{
// This function used to be async and wait for the cache population task.
// Since the cache population task must run on the UI thread, this could
// deadlock if completion blocked on the UI thread before the
// population task started. We now simply return whatever snippets were
// there. When we're told to update snippets, we'll return stale data
// until that process is complete, which is fine.
lock (cacheGuard)
{
return snippets;
}
}
public bool SnippetShortcutExists_NonBlocking(string shortcut)
{
// This function used to be async and wait for the cache population task.
// Since the cache population task must run on the UI thread, this could
// deadlock if completion blocked on the UI thread before the
// population task started. We now simply return whatever snippets were
// there. When we're told to update snippets, we'll return stale data
// until that process is complete, which is fine.
lock (cacheGuard)
{
return snippetShortcuts.Contains(shortcut);
}
}
public virtual bool ShouldFormatSnippet(SnippetInfo snippetInfo)
{
return false;
}
private void PopulateSnippetCaches()
{
var updatedSnippets = GetSnippetInfoList();
var updatedSnippetShortcuts = GetShortcutsHashFromSnippets(updatedSnippets);
lock (cacheGuard)
{
snippets = updatedSnippets;
snippetShortcuts = updatedSnippetShortcuts;
}
}
protected static HashSet<string> GetShortcutsHashFromSnippets(IList<SnippetInfo> updatedSnippets)
{
return new HashSet<string>(updatedSnippets.Select(s => s.Shortcut), StringComparer.OrdinalIgnoreCase);
}
private IList<SnippetInfo> GetSnippetInfoList()
{
IVsExpansionEnumeration expansionEnumerator;
if (TryGetVsSnippets(out expansionEnumerator))
{
return ExtractSnippetInfo(expansionEnumerator);
}
return SpecializedCollections.EmptyList<SnippetInfo>();
}
private bool TryGetVsSnippets(out IVsExpansionEnumeration expansionEnumerator)
{
expansionEnumerator = null;
if (_expansionManager != null)
{
_expansionManager.EnumerateExpansions(
_languageGuidForSnippets,
fShortCutOnly: 0,
bstrTypes: null,
iCountTypes: 0,
fIncludeNULLType: 1,
fIncludeDuplicates: 1, // Allows snippets with the same title but different shortcuts
pEnum: out expansionEnumerator);
}
return expansionEnumerator != null;
}
private static IList<SnippetInfo> ExtractSnippetInfo(IVsExpansionEnumeration expansionEnumerator)
{
IList<SnippetInfo> snippetList = new List<SnippetInfo>();
uint count = 0;
uint fetched = 0;
VsExpansion snippetInfo = new VsExpansion();
IntPtr[] pSnippetInfo = new IntPtr[1];
try
{
// Allocate enough memory for one VSExpansion structure. This memory is filled in by the Next method.
pSnippetInfo[0] = Marshal.AllocCoTaskMem(Marshal.SizeOf(snippetInfo));
expansionEnumerator.GetCount(out count);
for (uint i = 0; i < count; i++)
{
expansionEnumerator.Next(1, pSnippetInfo, out fetched);
if (fetched > 0)
{
// Convert the returned blob of data into a structure that can be read in managed code.
snippetInfo = ConvertToVsExpansionAndFree(pSnippetInfo[0]);
if (!string.IsNullOrEmpty(snippetInfo.shortcut))
{
snippetList.Add(new SnippetInfo(snippetInfo.shortcut, snippetInfo.title, snippetInfo.description, snippetInfo.path));
}
}
}
}
finally
{
Marshal.FreeCoTaskMem(pSnippetInfo[0]);
}
return snippetList;
}
private static VsExpansion ConvertToVsExpansionAndFree(IntPtr expansionPtr)
{
var buffer = (VsExpansionWithIntPtrs)Marshal.PtrToStructure(expansionPtr, typeof(VsExpansionWithIntPtrs));
var expansion = new VsExpansion();
ConvertToStringAndFree(ref buffer.DescriptionPtr, ref expansion.description);
ConvertToStringAndFree(ref buffer.PathPtr, ref expansion.path);
ConvertToStringAndFree(ref buffer.ShortcutPtr, ref expansion.shortcut);
ConvertToStringAndFree(ref buffer.TitlePtr, ref expansion.title);
return expansion;
}
private static void ConvertToStringAndFree(ref IntPtr ptr, ref string str)
{
if (ptr != IntPtr.Zero)
{
str = Marshal.PtrToStringBSTR(ptr);
Marshal.FreeBSTR(ptr);
ptr = IntPtr.Zero;
}
}
/// <summary>
/// This structure is used to facilitate the interop calls with IVsExpansionEnumeration.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct VsExpansionWithIntPtrs
{
public IntPtr PathPtr;
public IntPtr TitlePtr;
public IntPtr ShortcutPtr;
public IntPtr DescriptionPtr;
}
}
}
| |
using Interviewer.Common;
using Interviewer.Data;
using Interviewer.Services;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Runtime.Serialization.Json;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Input;
using Windows.Storage;
using Windows.UI.Xaml;
namespace Interviewer
{
public class MainViewModel : BaseModel
{
private class RunTestCommand : ICommand
{
public event EventHandler CanExecuteChanged;
public bool CanExecute(object parameter)
{
return !MainViewModel.ViewModel.IsRunning;
}
public void Execute(object parameter)
{
MainViewModel.ViewModel.RunQuestions(int.Parse(parameter.ToString()));
}
}
private class RunQuestionCommand : ICommand
{
private readonly int _rating;
public event EventHandler CanExecuteChanged;
public RunQuestionCommand(int rating)
{
_rating = rating;
}
public bool CanExecute(object parameter)
{
return MainViewModel.ViewModel.SelectedQuestion != null;
}
public void Execute(object parameter)
{
MainViewModel.ViewModel.QuestionAnswered(_rating, parameter as Question);
}
}
private class RunShowAddQuestionsCommand : ICommand
{
public event EventHandler CanExecuteChanged;
public RunShowAddQuestionsCommand()
{
}
public bool CanExecute(object parameter)
{
return null != MainViewModel.ViewModel.SelectedArea;
}
public void Execute(object parameter)
{
//new AddQuestions().ShowDialog();
}
}
private static MainViewModel _viewModel;
private configuration _selectedConfiguration;
private Profile _selectedProfile;
private Platform _selectedPlatform;
private KnowledgeArea _selectedKnowledgeArea;
private Area _selectedArea;
private Question _selectedQuestion;
private int _maxQuestionsCount = 10;
private int _questionsCount = 5;
private bool _isLoaded = false;
private bool _isRunning = false;
private int _passedCount = 0;
private int _failedCount = 0;
private int _undefinedCount = 0;
private int _totalQuestions = 0;
private string _interviewedPerson;
private string _apiBaseUrl = "http://localhost:52485/api/";
public string ServicesUrl
{
get { return _apiBaseUrl; }
set
{
if (!string.IsNullOrWhiteSpace(value))
{
Uri uri = null;
if (Uri.TryCreate(value, UriKind.Absolute, out uri))
{
_apiBaseUrl = value;
OnPropertyChanged();
SetServicesSetting(value);
}
}
}
}
private void SetServicesSetting(string url)
{
var settings = ApplicationData.Current.RoamingSettings;
if (null == settings.Values["Services.Url"])
{
settings.Values.Add("Services.Url", url);
}
else
{
settings.Values["Services.Url"] = url;
}
}
public MainViewModel()
{
var settings = ApplicationData.Current.RoamingSettings;
if(null == settings.Values["Services.Url"])
{
settings.Values.Add("Services.Url", _apiBaseUrl);
}
_apiBaseUrl = (string)settings.Values["Services.Url"];
}
public static MainViewModel ViewModel
{
get
{
if (null == _viewModel)
{
_viewModel = new MainViewModel();
}
return _viewModel;
}
}
public override bool IsValid()
{
return SelectedConfiguration != null && SelectedConfiguration.IsValid();
}
public async Task<configuration> LoadConfiguration()
{
if (!_isLoaded)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
SelectedConfiguration = await client.GetConfiguration();
}
RunQuestionsCommand.Execute(1);
_isLoaded = true;
}
return SelectedConfiguration;
}
public async void SavePendingChanges()
{
if (SelectedConfiguration == null) return;
var result = 0;
foreach(var p in this.Platforms)
{
if(p.IsDirty && p.IsValid())
{
if(p.Id == 0)
{
result = await InsertPlatform(p);
p.Id = result;
}
else
{
result = await UpdatePlatform(p);
}
p.IsDirty = result == 0;
}
foreach (var ka in p.KnowledgeArea)
{
if(ka.IsDirty && ka.IsValid())
{
if(ka.Id == 0)
{
result = await InsertKnowledgeArea(ka);
ka.Id = result;
}
else
{
result = await UpdateKnowledgeArea(ka);
}
ka.IsDirty = result == 0;
}
foreach(var a in ka.Area)
{
if(a.IsDirty && a.IsValid())
{
if(a.Id == 0)
{
result = await InsertArea(a);
a.Id = result;
}
else
{
result = await UpdateArea(a);
}
a.IsDirty = result == 0;
}
foreach (var q in a.Question)
{
if(q.IsDirty && q.IsValid())
{
if(q.Id == 0)
{
result = await InsertQuestion(q);
q.Id = result;
}
else
{
result = await UpdateQuestion(q);
}
q.IsDirty = result == 0;
}
}
}
}
}
}
public async Task<int> InsertPlatform(Platform item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> UpdatePlatform(Platform item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.UpdateItem(item);
}
}
public async Task<int> DeletePlatform(Platform item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> InsertKnowledgeArea(KnowledgeArea item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> UpdateKnowledgeArea(KnowledgeArea item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.UpdateItem(item);
}
}
public async Task<int> DeleteKnowledgeArea(KnowledgeArea item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> InsertArea(Area item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> UpdateArea(Area item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.UpdateItem(item);
}
}
public async Task<int> DeleteArea(Area item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> InsertQuestion(Question item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public async Task<int> UpdateQuestion(Question item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.UpdateItem(item);
}
}
public async Task<int> DeleteQuestion(Question item)
{
using (var client = ApiServiceFactory.CreateService(_apiBaseUrl))
{
return await client.AddItem(item);
}
}
public configuration SelectedConfiguration
{
get
{
return _selectedConfiguration;
}
set
{
_selectedConfiguration = value;
OnPropertyChanged("SelectedConfiguration");
OnPropertyChanged("Platforms");
OnPropertyChanged("Profiles");
OnPropertyChanged("QuestionsCountRange");
}
}
public IEnumerable<Profile> Profiles
{
get
{
var profiles = SelectedConfiguration.Profile.Any()
? SelectedConfiguration.Profile
: from p in SelectedConfiguration.Platform
from prof in p.Profile
from req in prof.Requirement
where req.PlatformId == p.Id
select prof;
SelectedProfile = profiles.FirstOrDefault();
return profiles.Distinct();
}
}
public Profile SelectedProfile
{
get
{
return _selectedProfile;
}
set
{
_selectedProfile = value;
OnPropertyChanged("SelectedProfile");
}
}
public IEnumerable<Platform> Platforms
{
get
{
SelectedPlatform = SelectedConfiguration.Platform.FirstOrDefault();
return SelectedConfiguration.Platform;
}
}
public Platform SelectedPlatform
{
get
{
return _selectedPlatform;
}
set
{
_selectedPlatform = value;
OnPropertyChanged("SelectedPlatform");
if (value == null) return;
SelectedKnowledgeArea = value.KnowledgeArea.FirstOrDefault();
}
}
public KnowledgeArea SelectedKnowledgeArea
{
get
{
return _selectedKnowledgeArea;
}
set
{
_selectedKnowledgeArea = value;
OnPropertyChanged("SelectedKnowledgeArea");
if (value == null) return;
SelectedArea = value.Area.FirstOrDefault();
}
}
public Area SelectedArea
{
get
{
return _selectedArea;
}
set
{
_selectedArea = value;
OnPropertyChanged("SelectedArea");
if (value == null) return;
SelectedQuestion = value.Question.FirstOrDefault();
}
}
public Question SelectedQuestion
{
get
{
return _selectedQuestion;
}
set
{
_selectedQuestion = value;
OnPropertyChanged("SelectedQuestion");
}
}
#region get items
public async Task<Platform> GetPlatform(int id)
{
return (await _viewModel.LoadConfiguration())
.Platform.FirstOrDefault(x => x.Id == id);
}
public async Task<Profile> GetProfile(int id)
{
return (await _viewModel.LoadConfiguration())
.Profile.FirstOrDefault(x => x.Id == id);
}
public async Task<KnowledgeArea> GetKnowledgeArea(int id)
{
return (from p in (await _viewModel.LoadConfiguration()).Platform
from ka in p.KnowledgeArea
where ka.Id == id
select ka).FirstOrDefault();
}
public async Task<Area> GetArea(int id)
{
return (from p in (await _viewModel.LoadConfiguration()).Platform
from ka in p.KnowledgeArea
from a in ka.Area
where a.Id == id
select a).FirstOrDefault();
}
public async Task<Question> GetQuestion(int id)
{
return (from p in (await _viewModel.LoadConfiguration()).Platform
from ka in p.KnowledgeArea
from a in ka.Area
from q in a.Question
where q.Id == id
select q).FirstOrDefault();
}
#endregion
public int QuestionsCount
{
get
{
return _questionsCount;
}
set
{
_questionsCount = value;
OnPropertyChanged("QuestionsCount");
}
}
public IEnumerable<int> QuestionsCountRange
{
get
{
var list = new List<int>();
for (var i = 1; i <= _maxQuestionsCount; i++)
{
list.Add(i);
}
return list;
}
}
public bool IsRunning
{
get
{
return _isRunning;
}
set
{
_isRunning = value;
OnPropertyChanged("IsRunning");
}
}
private ICommand _runQuestionsCommand;
public ICommand RunQuestionsCommand
{
get
{
return _runQuestionsCommand ?? (_runQuestionsCommand = new MainViewModel.RunTestCommand());
}
}
private ICommand _questionUpCommand;
public ICommand QuestionUpCommand
{
get
{
return _questionUpCommand ?? (_questionUpCommand = new MainViewModel.RunQuestionCommand(1));
}
}
private ICommand _questionDownCommand;
public ICommand QuestionDownCommand
{
get
{
return _questionDownCommand ?? (_questionDownCommand = new MainViewModel.RunQuestionCommand(0));
}
}
private ICommand _questionUndefCommand;
public ICommand QuestionUndefCommand
{
get
{
return _questionUndefCommand ?? (_questionUndefCommand = new MainViewModel.RunQuestionCommand(-1));
}
}
private ICommand _showAddQuestionsCommand;
public ICommand ShowAddQuestionsCommand
{
get
{
return _showAddQuestionsCommand ?? (_showAddQuestionsCommand = new MainViewModel.RunShowAddQuestionsCommand());
}
}
private RelayCommand _addPlatform;
public RelayCommand AddPlatform
{
get
{
return _addPlatform ?? (_addPlatform = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.SelectedConfiguration.Platform
.Add(new Platform
{
Id = 0,
Name = "Undefined",
KnowledgeArea = new ObservableCollection<KnowledgeArea>(),
Profile = new ObservableCollection<Profile>(),
IsDirty = true
});
},
(object p) => MainViewModel.ViewModel.SelectedConfiguration != null
));
}
}
private RelayCommand _addKnowdlegeArea;
public RelayCommand AddKnowledgeArea
{
get
{
return _addKnowdlegeArea ?? (_addKnowdlegeArea = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.Platforms.First(x => x.Id == (int)p)
.KnowledgeArea
.Add(new KnowledgeArea {
PlatformId = MainViewModel.ViewModel.SelectedPlatform.Id,
Id = 0,
Name = "Undefined",
Area = new ObservableCollection<Area>(),
IsDirty = true
});
},
(object p) => MainViewModel.ViewModel.Platforms.Any(x => x.Id == (int)p)
));
}
}
private RelayCommand _addArea;
public RelayCommand AddArea
{
get
{
return _addArea ?? (_addArea = new RelayCommand(
(object p) => {
(from pf in MainViewModel.ViewModel.Platforms
from ka in pf.KnowledgeArea
where ka.Id == (int)p
select ka)
.First().Area
.Add(new Area
{
KnowledgeAreaId = MainViewModel.ViewModel.SelectedKnowledgeArea.Id,
Id = 0,
Name = "Undefined",
Question = new ObservableCollection<Question>(),
IsDirty = true
});
},
(object p) => (int)p > 0
));
}
}
private RelayCommand _addQuestion;
public RelayCommand AddQuestion
{
get
{
return _addQuestion ?? (_addQuestion = new RelayCommand(
(object p) => {
(from pf in MainViewModel.ViewModel.Platforms
from ka in pf.KnowledgeArea
from a in ka.Area
where a.Id == (int)p
select a)
.First().Question
.Add(new Question
{
AreaId = MainViewModel.ViewModel.SelectedArea.Id,
Id = 0,
Name = "Undefined",
Value = "Undefined",
Level = 1,
Weight = 1,
IsDirty = true
});
},
(object p) => (int)p > 0
));
}
}
private bool _isEditingPlatformProps = false;
public bool IsEditingPlatformProps {
get { return _isEditingPlatformProps; }
set
{
_isEditingPlatformProps = value;
OnPropertyChanged();
OnPropertyChanged("PlatformEditVisibility");
}
}
public Visibility PlatformEditVisibility
{
get { return _isEditingPlatformProps ? Visibility.Visible : Visibility.Collapsed; }
}
private RelayCommand _editingPlatformProps;
public RelayCommand EditingPlatformProps
{
get
{
return _editingPlatformProps ?? (_editingPlatformProps = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.IsEditingPlatformProps = !MainViewModel.ViewModel.IsEditingPlatformProps;
},
(object p) => MainViewModel.ViewModel.SelectedPlatform != null
));
}
}
private bool _isEditingKnowledgeAreaProps = false;
public bool IsEditingKnowledgeAreaProps
{
get { return _isEditingKnowledgeAreaProps; }
set
{
_isEditingKnowledgeAreaProps = value;
OnPropertyChanged();
OnPropertyChanged("KnowledgeAreaEditVisibility");
}
}
public Visibility KnowledgeAreaEditVisibility
{
get { return _isEditingKnowledgeAreaProps ? Visibility.Visible : Visibility.Collapsed; }
}
private RelayCommand _editingKnowledgeAreaProps;
public RelayCommand EditingKnowledgeAreaProps
{
get
{
return _editingKnowledgeAreaProps ?? (_editingKnowledgeAreaProps = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.IsEditingKnowledgeAreaProps= !MainViewModel.ViewModel.IsEditingKnowledgeAreaProps;
},
(object p) => MainViewModel.ViewModel.SelectedKnowledgeArea != null
));
}
}
private bool _isEditingAreaProps = false;
public bool IsEditingAreaProps
{
get { return _isEditingAreaProps; }
set
{
_isEditingAreaProps = value;
OnPropertyChanged();
OnPropertyChanged("AreaEditVisibility");
}
}
public Visibility AreaEditVisibility
{
get { return _isEditingAreaProps ? Visibility.Visible : Visibility.Collapsed; }
}
private RelayCommand _editingAreaProps;
public RelayCommand EditingAreaProps
{
get
{
return _editingAreaProps ?? (_editingAreaProps = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.IsEditingAreaProps = !MainViewModel.ViewModel.IsEditingAreaProps;
},
(object p) => MainViewModel.ViewModel.SelectedArea != null
));
}
}
private bool _isEditingQuestionProps = false;
public bool IsEditingQuestionProps
{
get { return _isEditingQuestionProps; }
set
{
_isEditingQuestionProps = value;
OnPropertyChanged();
OnPropertyChanged("QuestionEditVisibility");
}
}
public Visibility QuestionEditVisibility
{
get { return _isEditingQuestionProps ? Visibility.Visible : Visibility.Collapsed; }
}
private RelayCommand _editingQuestionProps;
public RelayCommand EditingQuestionProps
{
get
{
return _editingQuestionProps ?? (_editingQuestionProps = new RelayCommand(
(object p) => {
MainViewModel.ViewModel.IsEditingQuestionProps = !MainViewModel.ViewModel.IsEditingQuestionProps;
},
(object p) => MainViewModel.ViewModel.SelectedQuestion != null
));
}
}
public int PassedCount
{
get
{
return _passedCount;
}
set
{
_passedCount = value;
OnPropertyChanged("PassedCount");
OnPropertyChanged("AppliedQuestions");
}
}
public int FailedCount
{
get
{
return _failedCount;
}
set
{
_failedCount = value;
OnPropertyChanged("FailedCount");
OnPropertyChanged("AppliedQuestions");
}
}
public int UndefinedCount
{
get
{
return _undefinedCount;
}
set
{
_undefinedCount = value;
OnPropertyChanged("UndefinedCount");
OnPropertyChanged("AppliedQuestions");
}
}
public int TotalQuestions
{
get
{
return _totalQuestions;
}
set
{
_totalQuestions = value;
PassedCount = 0;
FailedCount = 0;
UndefinedCount = 0;
OnPropertyChanged("TotalQuestions");
OnPropertyChanged("AppliedQuestions");
}
}
public int AppliedQuestions
{
get
{
return Math.Min(TotalQuestions, PassedCount + FailedCount + UndefinedCount + 1);
}
}
public string InterviewedPerson
{
get
{
return _interviewedPerson;
}
set
{
_interviewedPerson = value;
OnPropertyChanged("InterviewedPerson");
}
}
private void QuestionAnswered(int rating, Question question)
{
SelectedQuestion = question;
SelectedQuestion.AlreadyAnswered = true;
SelectedQuestion.rating = rating;
switch (rating)
{
case -1:
UndefinedCount++;
break;
case 0:
FailedCount++;
break;
case 1:
PassedCount++;
break;
}
SelectedQuestion = GetNextQuestion();
}
private void RunQuestions(int mode)
{
IsRunning = true;
try
{
if (mode == 1) //get random questions
{
ToogleAnsweredFlag(true);
var questions = from p in Platforms
from ka in p.KnowledgeArea
from a in ka.Area
select a.Question;
foreach (var q in questions.Where(x => x.Count() > 0))
{
var randomIndexes = GetRandomIndexes(q.Count());
foreach (var i in randomIndexes)
{
q.ElementAt(i).AlreadyAnswered = false;
}
}
TotalQuestions = GetPendingQuestions();
}
else
{
ToogleAnsweredFlag(false);
}
OnPropertyChanged("Platforms");
if(mode == 1)
{
//var window = new AskQuestions();
//window.ShowDialog();
}
}
finally
{
IsRunning = false;
}
}
private IEnumerable<int> GetRandomIndexes(int itemsCount)
{
var rand = new Random(100);
var list = new List<int>();
if (itemsCount <= QuestionsCount)
{
for(var i=0;i<itemsCount;i++)
list.Add(i);
}
else
{
if (Math.Floor(Convert.ToDecimal(itemsCount)/Convert.ToDecimal(QuestionsCount)) >= 2M)
{
while (list.Count < Math.Min(QuestionsCount, itemsCount))
{
var randIndex = rand.Next(0, itemsCount - 1);
if (!list.Contains(randIndex))
list.Add(randIndex);
}
}
else
{
var dismissList = new List<int>();
for (var i = 0; i < itemsCount - QuestionsCount; i++)
{
var randIndex = rand.Next(0, itemsCount - 1);
if (!dismissList.Contains(randIndex))
dismissList.Add(randIndex);
}
for (var i = 0; i < itemsCount; i++)
if(!dismissList.Contains(i))
list.Add(i);
}
}
return list.ToArray();
}
private void ToogleAnsweredFlag(bool answered)
{
foreach (var q2 in from p in Platforms
from ka in p.KnowledgeArea
from a in ka.Area
from q in a.Question
select q)
{
q2.AlreadyAnswered = answered;
}
}
private int GetPendingQuestions()
{
var pendingQuestions = 0;
foreach (var q2 in from p in Platforms
from ka in p.KnowledgeArea
from a in ka.Area
from q in a.Question
select q)
{
pendingQuestions += q2.AlreadyAnswered ? 0 : 1;
}
return pendingQuestions;
}
private Question GetNextQuestion()
{
foreach(var p in Platforms)
{
foreach(var ka in p.KnowledgeArea)
{
foreach(var a in ka.Area)
{
foreach(var q in a.Question)
{
if (!q.AlreadyAnswered)
{
if (p != SelectedPlatform)
SelectedPlatform = p;
if (ka != SelectedKnowledgeArea)
SelectedKnowledgeArea = ka;
if (a != SelectedArea)
SelectedArea = a;
return q;
}
}
}
}
}
return null;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Win32.SafeHandles;
namespace System.IO
{
/// <summary>Provides an implementation of a file stream for Unix files.</summary>
internal sealed class UnixFileStream : FileStreamBase
{
/// <summary>The file descriptor wrapped in a file handle.</summary>
private readonly SafeFileHandle _fileHandle;
/// <summary>The path to the opened file.</summary>
private readonly string _path;
/// <summary>File mode.</summary>
private readonly FileMode _mode;
/// <summary>Whether the file is opened for reading, writing, or both.</summary>
private readonly FileAccess _access;
/// <summary>Advanced options requested when opening the file.</summary>
private readonly FileOptions _options;
/// <summary>If the file was opened with FileMode.Append, the length of the file when opened; otherwise, -1.</summary>
private readonly long _appendStart = -1;
/// <summary>Whether asynchronous read/write/flush operations should be performed using async I/O.</summary>
private readonly bool _useAsyncIO;
/// <summary>The length of the _buffer.</summary>
private readonly int _bufferLength;
/// <summary>Lazily-initialized buffer data from Write waiting to be written to the underlying handle, or data read from the underlying handle and waiting to be Read.</summary>
private byte[] _buffer;
/// <summary>The number of valid bytes in _buffer.</summary>
private int _readLength;
/// <summary>The next available byte to be read from the _buffer.</summary>
private int _readPos;
/// <summary>The next location in which a write should occur to the buffer.</summary>
private int _writePos;
/// <summary>Lazily-initialized value for whether the file supports seeking.</summary>
private bool? _canSeek;
/// <summary>Whether the file stream's handle has been exposed.</summary>
private bool _exposedHandle;
/// <summary>
/// Currently cached position in the stream. This should always mirror the underlying file descriptor's actual position,
/// and should only ever be out of sync if another stream with access to this same file descriptor manipulates it, at which
/// point we attempt to error out.
/// </summary>
private long _filePosition;
/// <summary>Initializes a stream for reading or writing a Unix file.</summary>
/// <param name="path">The path to the file.</param>
/// <param name="mode">How the file should be opened.</param>
/// <param name="access">Whether the file will be read, written, or both.</param>
/// <param name="share">What other access to the file should be allowed. This is currently ignored.</param>
/// <param name="bufferSize">The size of the buffer to use when buffering.</param>
/// <param name="options">Additional options for working with the file.</param>
internal UnixFileStream(String path, FileMode mode, FileAccess access, FileShare share, int bufferSize, FileOptions options, FileStream parent)
: base(parent)
{
// FileStream performs most of the general argument validation. We can assume here that the arguments
// are all checked and consistent (e.g. non-null-or-empty path; valid enums in mode, access, share, and options; etc.)
// Store the arguments
_path = path;
_access = access;
_mode = mode;
_options = options;
_bufferLength = bufferSize;
_useAsyncIO = (options & FileOptions.Asynchronous) != 0;
// Translate the arguments into arguments for an open call
Interop.libc.OpenFlags openFlags = PreOpenConfigurationFromOptions(mode, access, options); // FileShare currently ignored
Interop.libc.Permissions openPermissions = Interop.libc.Permissions.S_IRWXU; // creator has read/write/execute permissions; no permissions for anyone else
// Open the file and store the safe handle. Subsequent code in this method expects the safe handle to be initialized.
_fileHandle = SafeFileHandle.Open(path, openFlags, (int)openPermissions);
_fileHandle.IsAsync = _useAsyncIO;
// Lock the file if requested via FileShare. This is only advisory locking. FileShare.None implies an exclusive
// lock on the file and all other modes use a shared lock. While this is not as granular as Windows, not mandatory,
// and not atomic with file opening, it's better than nothing.
try
{
Interop.libc.LockOperations lockOperation = (share == FileShare.None) ? Interop.libc.LockOperations.LOCK_EX : Interop.libc.LockOperations.LOCK_SH;
SysCall<Interop.libc.LockOperations, int>((fd, op, _) => Interop.libc.flock(fd, op), lockOperation | Interop.libc.LockOperations.LOCK_NB);
}
catch
{
_fileHandle.Dispose();
throw;
}
// Support additional options after the file has been opened.
// These provide hints around how the file will be accessed.
Interop.libc.Advice fadv =
_options == FileOptions.RandomAccess ? Interop.libc.Advice.POSIX_FADV_RANDOM :
_options == FileOptions.SequentialScan ? Interop.libc.Advice.POSIX_FADV_SEQUENTIAL :
0;
if (fadv != 0)
{
SysCall<Interop.libc.Advice, int>((fd, advice, _) => Interop.libc.posix_fadvise(fd, 0, 0, advice), fadv);
}
// Jump to the end of the file if opened as Append.
if (_mode == FileMode.Append)
{
_appendStart = SeekCore(0, SeekOrigin.End);
}
}
/// <summary>Initializes a stream from an already open file handle (file descriptor).</summary>
/// <param name="handle">The handle to the file.</param>
/// <param name="access">Whether the file will be read, written, or both.</param>
/// <param name="bufferSize">The size of the buffer to use when buffering.</param>
/// <param name="useAsyncIO">Whether access to the stream is performed asynchronously.</param>
internal UnixFileStream(SafeFileHandle handle, FileAccess access, int bufferSize, bool useAsyncIO, FileStream parent)
: base(parent)
{
// Make sure the handle is open
if (handle.IsInvalid)
throw new ArgumentException(SR.Arg_InvalidHandle, "handle");
if (access < FileAccess.Read || access > FileAccess.ReadWrite)
throw new ArgumentOutOfRangeException("access", SR.ArgumentOutOfRange_Enum);
if (bufferSize < 0) // allow bufferSize == 0 for no buffering
throw new ArgumentOutOfRangeException("bufferSize", SR.ArgumentOutOfRange_NeedNonNegNum);
_fileHandle = handle;
_access = access;
_exposedHandle = true;
_bufferLength = bufferSize;
_useAsyncIO = useAsyncIO;
if (_parent.CanSeek)
{
SeekCore(0, SeekOrigin.Current);
}
}
/// <summary>Gets the array used for buffering reading and writing. If the array hasn't been allocated, this will lazily allocate it.</summary>
/// <returns>The buffer.</returns>
private byte[] GetBuffer()
{
Debug.Assert(_buffer == null || _buffer.Length == _bufferLength);
return _buffer ?? (_buffer = new byte[_bufferLength]);
}
/// <summary>Translates the FileMode, FileAccess, and FileOptions values into flags to be passed when opening the file.</summary>
/// <param name="mode">The FileMode provided to the stream's constructor.</param>
/// <param name="access">The FileAccess provided to the stream's constructor</param>
/// <param name="options">The FileOptions provided to the stream's constructor</param>
/// <returns>The flags value to be passed to the open system call.</returns>
private static Interop.libc.OpenFlags PreOpenConfigurationFromOptions(FileMode mode, FileAccess access, FileOptions options)
{
// Translate FileMode. Most of the values map cleanly to one or more options for open.
Interop.libc.OpenFlags flags = Interop.libc.OpenFlags.O_LARGEFILE;
switch (mode)
{
default:
case FileMode.Open: // Open maps to the default behavior for open(...). No flags needed.
break;
case FileMode.Append: // Append is the same as OpenOrCreate, except that we'll also separately jump to the end later
case FileMode.OpenOrCreate:
flags |= Interop.libc.OpenFlags.O_CREAT;
break;
case FileMode.Create:
flags |= (Interop.libc.OpenFlags.O_CREAT | Interop.libc.OpenFlags.O_TRUNC);
break;
case FileMode.CreateNew:
flags |= (Interop.libc.OpenFlags.O_CREAT | Interop.libc.OpenFlags.O_EXCL);
break;
case FileMode.Truncate:
flags |= Interop.libc.OpenFlags.O_TRUNC;
break;
}
// Translate FileAccess. All possible values map cleanly to corresponding values for open.
switch (access)
{
case FileAccess.Read:
flags |= Interop.libc.OpenFlags.O_RDONLY;
break;
case FileAccess.ReadWrite:
flags |= Interop.libc.OpenFlags.O_RDWR;
break;
case FileAccess.Write:
flags |= Interop.libc.OpenFlags.O_WRONLY;
break;
}
// Translate some FileOptions; some just aren't supported, and others will be handled after calling open.
switch (options)
{
case FileOptions.Asynchronous: // Handled in ctor, setting _useAsync and SafeFileHandle.IsAsync to true
case FileOptions.DeleteOnClose: // DeleteOnClose doesn't have a Unix equivalent, but we approximate it in Dispose
case FileOptions.Encrypted: // Encrypted does not have an equivalent on Unix and is ignored.
case FileOptions.RandomAccess: // Implemented after open via posix_fadvise
case FileOptions.SequentialScan: // Implemented after open via posix_fadvise
break;
case FileOptions.WriteThrough:
flags |= Interop.libc.OpenFlags.O_SYNC;
break;
}
return flags;
}
/// <summary>Gets a value indicating whether the current stream supports reading.</summary>
public override bool CanRead
{
[Pure]
get { return !_fileHandle.IsClosed && (_access & FileAccess.Read) != 0; }
}
/// <summary>Gets a value indicating whether the current stream supports writing.</summary>
public override bool CanWrite
{
[Pure]
get { return !_fileHandle.IsClosed && (_access & FileAccess.Write) != 0; }
}
/// <summary>Gets a value indicating whether the current stream supports seeking.</summary>
public override bool CanSeek
{
get
{
if (_fileHandle.IsClosed)
{
return false;
}
if (!_canSeek.HasValue)
{
// Lazily-initialize whether we're able to seek, tested by seeking to our current location.
_canSeek = SysCall<int, int>((fd, _, __) => Interop.libc.lseek(fd, 0, Interop.libc.SeekWhence.SEEK_CUR), throwOnError: false) >= 0;
}
return _canSeek.Value;
}
}
/// <summary>Gets a value indicating whether the stream was opened for I/O to be performed synchronously or asynchronously.</summary>
public override bool IsAsync
{
get { return _useAsyncIO; }
}
/// <summary>Gets the length of the stream in bytes.</summary>
public override long Length
{
get
{
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
if (!_parent.CanSeek)
{
throw __Error.GetSeekNotSupported();
}
// Get the length of the file as reported by the OS
long length = SysCall<int, int>((fd, _, __) =>
{
Interop.libc.structStat stat;
int result = Interop.libc.fstat(fd, out stat);
return result >= 0 ? stat.st_size : result;
});
// But we may have buffered some data to be written that puts our length
// beyond what the OS is aware of. Update accordingly.
if (_writePos > 0 && _filePosition + _writePos > length)
{
length = _writePos + _filePosition;
}
return length;
}
}
/// <summary>Gets the path that was passed to the constructor.</summary>
public override String Name { get { return _path; } }
/// <summary>Gets the SafeFileHandle for the file descriptor encapsulated in this stream.</summary>
public override SafeFileHandle SafeFileHandle
{
get
{
_parent.Flush();
_exposedHandle = true;
return _fileHandle;
}
}
/// <summary>Gets or sets the position within the current stream</summary>
public override long Position
{
get
{
VerifyBufferInvariants();
VerifyOSHandlePosition();
// We may have read data into our buffer from the handle, such that the handle position
// is artificially further along than the consumer's view of the stream's position.
// Thus, when reading, our position is really starting from the handle position negatively
// offset by the number of bytes in the buffer and positively offset by the number of
// bytes into that buffer we've read. When writing, both the read length and position
// must be zero, and our position is just the handle position offset positive by how many
// bytes we've written into the buffer.
return (_filePosition - _readLength) + _readPos + _writePos;
}
set
{
if (value < 0)
{
throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_NeedNonNegNum);
}
_parent.Seek(value, SeekOrigin.Begin);
}
}
/// <summary>Verifies that state relating to the read/write buffer is consistent.</summary>
[Conditional("DEBUG")]
private void VerifyBufferInvariants()
{
// Read buffer values must be in range: 0 <= _bufferReadPos <= _bufferReadLength <= _bufferLength
Debug.Assert(0 <= _readPos && _readPos <= _readLength && _readLength <= _bufferLength);
// Write buffer values must be in range: 0 <= _bufferWritePos <= _bufferLength
Debug.Assert(0 <= _writePos && _writePos <= _bufferLength);
// Read buffering and write buffering can't both be active
Debug.Assert((_readPos == 0 && _readLength == 0) || _writePos == 0);
}
/// <summary>
/// Verify that the actual position of the OS's handle equals what we expect it to.
/// This will fail if someone else moved the UnixFileStream's handle or if
/// our position updating code is incorrect.
/// </summary>
private void VerifyOSHandlePosition()
{
bool verifyPosition = _exposedHandle; // in release, only verify if we've given out the handle such that someone else could be manipulating it
#if DEBUG
verifyPosition = true; // in debug, always make sure our position matches what the OS says it should be
#endif
if (verifyPosition && _parent.CanSeek)
{
long oldPos = _filePosition; // SeekCore will override the current _position, so save it now
long curPos = SeekCore(0, SeekOrigin.Current);
if (oldPos != curPos)
{
// For reads, this is non-fatal but we still could have returned corrupted
// data in some cases, so discard the internal buffer. For writes,
// this is a problem; discard the buffer and error out.
_readPos = _readLength = 0;
if (_writePos > 0)
{
_writePos = 0;
throw new IOException(SR.IO_FileStreamHandlePosition);
}
}
}
}
/// <summary>Releases the unmanaged resources used by the stream.</summary>
/// <param name="disposing">true to release both managed and unmanaged resources; false to release only unmanaged resources.</param>
protected override void Dispose(bool disposing)
{
// Flush and close the file
try
{
if (_fileHandle != null && !_fileHandle.IsClosed)
{
FlushWriteBuffer();
// Unix doesn't directly support DeleteOnClose but we can mimick it.
if ((_options & FileOptions.DeleteOnClose) != 0)
{
// Since we still have the file open, this will end up deleting
// it (assuming we're the only link to it) once it's closed.
Interop.libc.unlink(_path); // ignore any error
}
}
}
finally
{
if (_fileHandle != null && !_fileHandle.IsClosed)
{
_fileHandle.Dispose();
}
base.Dispose(disposing);
}
}
/// <summary>Finalize the stream.</summary>
~UnixFileStream()
{
Dispose(false);
}
/// <summary>Clears buffers for this stream and causes any buffered data to be written to the file.</summary>
public override void Flush()
{
_parent.Flush(flushToDisk: false);
}
/// <summary>
/// Clears buffers for this stream, and if <param name="flushToDisk"/> is true,
/// causes any buffered data to be written to the file.
/// </summary>
public override void Flush(Boolean flushToDisk)
{
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
FlushInternalBuffer();
if (flushToDisk && _parent.CanWrite)
{
FlushOSBuffer();
}
}
/// <summary>Flushes the OS buffer. This does not flush the internal read/write buffer.</summary>
private void FlushOSBuffer()
{
SysCall<int, int>((fd, _, __) => Interop.libc.fsync(fd));
}
/// <summary>
/// Flushes the internal read/write buffer for this stream. If write data has been buffered,
/// that data is written out to the underlying file. Or if data has been buffered for
/// reading from the stream, the data is dumped and our position in the underlying file
/// is rewound as necessary. This does not flush the OS buffer.
/// </summary>
private void FlushInternalBuffer()
{
VerifyBufferInvariants();
if (_writePos > 0)
{
FlushWriteBuffer();
}
else if (_readPos < _readLength && _parent.CanSeek)
{
FlushReadBuffer();
}
}
/// <summary>Writes any data in the write buffer to the underlying stream and resets the buffer.</summary>
private void FlushWriteBuffer()
{
VerifyBufferInvariants();
if (_writePos > 0)
{
WriteCore(GetBuffer(), 0, _writePos);
_writePos = 0;
}
}
/// <summary>Dumps any read data in the buffer and rewinds our position in the stream, accordingly, as necessary.</summary>
private void FlushReadBuffer()
{
VerifyBufferInvariants();
int rewind = _readPos - _readLength;
if (rewind != 0)
{
SeekCore(rewind, SeekOrigin.Current);
}
_readPos = _readLength = 0;
}
/// <summary>Asynchronously clears all buffers for this stream, causing any buffered data to be written to the underlying device.</summary>
/// <param name="cancellationToken">The token to monitor for cancellation requests.</param>
/// <returns>A task that represents the asynchronous flush operation.</returns>
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled(cancellationToken);
}
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
// As with Win32FileStream, flush the buffers synchronously to avoid race conditions.
try
{
FlushInternalBuffer();
}
catch (Exception e)
{
return Task.FromException(e);
}
// We then separately flush to disk asynchronously. This is only
// necessary if we support writing; otherwise, we're done.
if (_parent.CanWrite)
{
return Task.Factory.StartNew(
state => ((UnixFileStream)state).FlushOSBuffer(),
this,
cancellationToken,
TaskCreationOptions.DenyChildAttach,
TaskScheduler.Default);
}
else
{
return Task.CompletedTask;
}
}
/// <summary>Sets the length of this stream to the given value.</summary>
/// <param name="value">The new length of the stream.</param>
public override void SetLength(long value)
{
if (value < 0)
{
throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
if (!_parent.CanSeek)
{
throw __Error.GetSeekNotSupported();
}
if (!_parent.CanWrite)
{
throw __Error.GetWriteNotSupported();
}
FlushInternalBuffer();
if (_appendStart != -1 && value < _appendStart)
{
throw new IOException(SR.IO_SetLengthAppendTruncate);
}
long origPos = _filePosition;
VerifyOSHandlePosition();
if (_filePosition != value)
{
SeekCore(value, SeekOrigin.Begin);
}
SysCall<long, int>((fd, length, _) => Interop.libc.ftruncate64(fd, length), value);
// Return file pointer to where it was before setting length
if (origPos != value)
{
if (origPos < value)
{
SeekCore(origPos, SeekOrigin.Begin);
}
else
{
SeekCore(0, SeekOrigin.End);
}
}
}
/// <summary>Reads a block of bytes from the stream and writes the data in a given buffer.</summary>
/// <param name="array">
/// When this method returns, contains the specified byte array with the values between offset and
/// (offset + count - 1) replaced by the bytes read from the current source.
/// </param>
/// <param name="offset">The byte offset in array at which the read bytes will be placed.</param>
/// <param name="count">The maximum number of bytes to read. </param>
/// <returns>
/// The total number of bytes read into the buffer. This might be less than the number of bytes requested
/// if that number of bytes are not currently available, or zero if the end of the stream is reached.
/// </returns>
public override int Read([In, Out] byte[] array, int offset, int count)
{
ValidateReadWriteArgs(array, offset, count);
PrepareForReading();
// Are there any bytes available in the read buffer? If yes,
// we can just return from the buffer. If the buffer is empty
// or has no more available data in it, we can either refill it
// (and then read from the buffer into the user's buffer) or
// we can just go directly into the user's buffer, if they asked
// for more data than we'd otherwise buffer.
int numBytesAvailable = _readLength - _readPos;
if (numBytesAvailable == 0)
{
// If we're not able to seek, then we're not able to rewind the stream (i.e. flushing
// a read buffer), in which case we don't want to use a read buffer. Similarly, if
// the user has ssked for more data than we can buffer, we also want to skip the buffer.
if (!_parent.CanSeek || (count >= _bufferLength))
{
// Read directly into the user's buffer
int bytesRead = ReadCore(array, offset, count);
_readPos = _readLength = 0; // reset after the read just in case read experiences an exception
return bytesRead;
}
else
{
// Read into our buffer.
_readLength = numBytesAvailable = ReadCore(GetBuffer(), 0, _bufferLength);
_readPos = 0;
if (numBytesAvailable == 0)
{
return 0;
}
}
}
// Now that we know there's data in the buffer, read from it into
// the user's buffer.
int bytesToRead = Math.Min(numBytesAvailable, count);
Buffer.BlockCopy(GetBuffer(), _readPos, array, offset, bytesToRead);
_readPos += bytesToRead;
return bytesToRead;
}
/// <summary>Unbuffered, reads a block of bytes from the stream and writes the data in a given buffer.</summary>
/// <param name="array">
/// When this method returns, contains the specified byte array with the values between offset and
/// (offset + count - 1) replaced by the bytes read from the current source.
/// </param>
/// <param name="offset">The byte offset in array at which the read bytes will be placed.</param>
/// <param name="count">The maximum number of bytes to read. </param>
/// <returns>
/// The total number of bytes read into the buffer. This might be less than the number of bytes requested
/// if that number of bytes are not currently available, or zero if the end of the stream is reached.
/// </returns>
private unsafe int ReadCore(byte[] array, int offset, int count)
{
FlushWriteBuffer(); // we're about to read; dump the write buffer
VerifyOSHandlePosition();
int bytesRead;
fixed (byte* bufPtr = array)
{
bytesRead = (int)SysCall((fd, ptr, len) =>
{
long result = (long)Interop.libc.read(fd, (byte*)ptr, (IntPtr)len);
Debug.Assert(result <= len);
return result;
}, (IntPtr)(bufPtr + offset), count);
}
_filePosition += bytesRead;
return bytesRead;
}
/// <summary>
/// Asynchronously reads a sequence of bytes from the current stream and advances
/// the position within the stream by the number of bytes read.
/// </summary>
/// <param name="buffer">The buffer to write the data into.</param>
/// <param name="offset">The byte offset in buffer at which to begin writing data from the stream.</param>
/// <param name="count">The maximum number of bytes to read.</param>
/// <param name="cancellationToken">The token to monitor for cancellation requests.</param>
/// <returns>A task that represents the asynchronous read operation.</returns>
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_useAsyncIO)
{
// TODO: Use async I/O instead of sync I/O
}
return base.ReadAsync(buffer, offset, count, cancellationToken);
}
/// <summary>
/// Reads a byte from the stream and advances the position within the stream
/// by one byte, or returns -1 if at the end of the stream.
/// </summary>
/// <returns>The unsigned byte cast to an Int32, or -1 if at the end of the stream.</returns>
public override int ReadByte()
{
PrepareForReading();
byte[] buffer = GetBuffer();
if (_readPos == _readLength)
{
_readLength = ReadCore(buffer, 0, _bufferLength);
_readPos = 0;
if (_readLength == 0)
{
return -1;
}
}
return buffer[_readPos++];
}
/// <summary>Validates that we're ready to read from the stream.</summary>
private void PrepareForReading()
{
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
if (_readLength == 0 && !_parent.CanRead)
{
throw __Error.GetReadNotSupported();
}
VerifyBufferInvariants();
}
/// <summary>Writes a block of bytes to the file stream.</summary>
/// <param name="array">The buffer containing data to write to the stream.</param>
/// <param name="offset">The zero-based byte offset in array from which to begin copying bytes to the stream.</param>
/// <param name="count">The maximum number of bytes to write.</param>
public override void Write(byte[] array, int offset, int count)
{
ValidateReadWriteArgs(array, offset, count);
PrepareForWriting();
// If no data is being written, nothing more to do.
if (count == 0)
{
return;
}
// If there's already data in our write buffer, then we need to go through
// our buffer to ensure data isn't corrupted.
if (_writePos > 0)
{
// If there's space remaining in the buffer, then copy as much as
// we can from the user's buffer into ours.
int spaceRemaining = _bufferLength - _writePos;
if (spaceRemaining > 0)
{
int bytesToCopy = Math.Min(spaceRemaining, count);
Buffer.BlockCopy(array, offset, GetBuffer(), _writePos, bytesToCopy);
_writePos += bytesToCopy;
// If we've successfully copied all of the user's data, we're done.
if (count == bytesToCopy)
{
return;
}
// Otherwise, keep track of how much more data needs to be handled.
offset += bytesToCopy;
count -= bytesToCopy;
}
// At this point, the buffer is full, so flush it out.
FlushWriteBuffer();
}
// Our buffer is now empty. If using the buffer would slow things down (because
// the user's looking to write more data than we can store in the buffer),
// skip the buffer. Otherwise, put the remaining data into the buffer.
Debug.Assert(_writePos == 0);
if (count >= _bufferLength)
{
WriteCore(array, offset, count);
}
else
{
Buffer.BlockCopy(array, offset, GetBuffer(), _writePos, count);
_writePos = count;
}
}
/// <summary>Unbuffered, writes a block of bytes to the file stream.</summary>
/// <param name="array">The buffer containing data to write to the stream.</param>
/// <param name="offset">The zero-based byte offset in array from which to begin copying bytes to the stream.</param>
/// <param name="count">The maximum number of bytes to write.</param>
private unsafe void WriteCore(byte[] array, int offset, int count)
{
VerifyOSHandlePosition();
long bytesWritten;
fixed (byte* bufPtr = array)
{
bytesWritten = SysCall((fd, ptr, len) =>
{
long result = (long)Interop.libc.write(fd, (byte*)ptr, (IntPtr)len);
Debug.Assert(result <= len);
return result;
}, (IntPtr)(bufPtr + offset), count);
}
_filePosition += bytesWritten;
}
/// <summary>
/// Asynchronously writes a sequence of bytes to the current stream, advances
/// the current position within this stream by the number of bytes written, and
/// monitors cancellation requests.
/// </summary>
/// <param name="buffer">The buffer to write data from.</param>
/// <param name="offset">The zero-based byte offset in buffer from which to begin copying bytes to the stream.</param>
/// <param name="count">The maximum number of bytes to write.</param>
/// <param name="cancellationToken">The token to monitor for cancellation requests.</param>
/// <returns>A task that represents the asynchronous write operation.</returns>
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_useAsyncIO)
{
// TODO: Use async I/O instead of sync I/O
}
return base.WriteAsync(buffer, offset, count, cancellationToken);
}
/// <summary>
/// Writes a byte to the current position in the stream and advances the position
/// within the stream by one byte.
/// </summary>
/// <param name="value">The byte to write to the stream.</param>
public override void WriteByte(byte value) // avoids an array allocation in the base implementation
{
PrepareForWriting();
// Flush the write buffer if it's full
if (_writePos == _bufferLength)
{
FlushWriteBuffer();
}
// We now have space in the buffer. Store the byte.
GetBuffer()[_writePos++] = value;
}
/// <summary>
/// Validates that we're ready to write to the stream,
/// including flushing a read buffer if necessary.
/// </summary>
private void PrepareForWriting()
{
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
// Make sure we're good to write. We only need to do this if there's nothing already
// in our write buffer, since if there is something in the buffer, we've already done
// this checking and flushing.
if (_writePos == 0)
{
if (!_parent.CanWrite) throw __Error.GetWriteNotSupported();
FlushReadBuffer();
}
}
/// <summary>Validates arguments to Read and Write and throws resulting exceptions.</summary>
/// <param name="array">The buffer to read from or write to.</param>
/// <param name="offset">The zero-based offset into the array.</param>
/// <param name="count">The maximum number of bytes to read or write.</param>
private void ValidateReadWriteArgs(byte[] array, int offset, int count)
{
if (array == null)
{
throw new ArgumentNullException("array", SR.ArgumentNull_Buffer);
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - offset < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
}
/// <summary>Sets the current position of this stream to the given value.</summary>
/// <param name="offset">The point relative to origin from which to begin seeking. </param>
/// <param name="origin">
/// Specifies the beginning, the end, or the current position as a reference
/// point for offset, using a value of type SeekOrigin.
/// </param>
/// <returns>The new position in the stream.</returns>
public override long Seek(long offset, SeekOrigin origin)
{
if (origin < SeekOrigin.Begin || origin > SeekOrigin.End)
{
throw new ArgumentException(SR.Argument_InvalidSeekOrigin);
}
if (_fileHandle.IsClosed)
{
throw __Error.GetFileNotOpen();
}
if (!_parent.CanSeek)
{
throw __Error.GetSeekNotSupported();
}
VerifyOSHandlePosition();
// Flush our write/read buffer. FlushWrite will output any write buffer we have and reset _bufferWritePos.
// We don't call FlushRead or FlushInternalBuffer, as that will do an unnecessary seek to rewind the read
// buffer, and since we're about to seek and update our position, we can simply dump the buffer and reset
// our read position. In the future, for some simple cases we could potentially add an optimization here to just
// move data around in the buffer for short jumps, to avoid re-reading the data from disk.
FlushWriteBuffer();
_readPos = _readLength = 0;
// Keep track of where we were, in case we're in append mode and need to verify
long oldPos = 0;
if (_appendStart >= 0)
{
oldPos = SeekCore(0, SeekOrigin.Current);
}
// Jump to the new location
long pos = SeekCore(offset, origin);
// Prevent users from overwriting data in a file that was opened in append mode.
if (_appendStart != -1 && pos < _appendStart)
{
SeekCore(oldPos, SeekOrigin.Begin);
throw new IOException(SR.IO_SeekAppendOverwrite);
}
// Return the new position
return pos;
}
/// <summary>Sets the current position of this stream to the given value.</summary>
/// <param name="offset">The point relative to origin from which to begin seeking. </param>
/// <param name="origin">
/// Specifies the beginning, the end, or the current position as a reference
/// point for offset, using a value of type SeekOrigin.
/// </param>
/// <returns>The new position in the stream.</returns>
private long SeekCore(long offset, SeekOrigin origin)
{
Debug.Assert(!_fileHandle.IsClosed && CanSeek);
Debug.Assert(origin >= SeekOrigin.Begin && origin <= SeekOrigin.End);
long pos = SysCall((fd, off, or) => Interop.libc.lseek(fd, off, or), offset, (Interop.libc.SeekWhence)(int)origin); // SeekOrigin values are the same as Interop.libc.SeekWhence values
_filePosition = pos;
return pos;
}
/// <summary>
/// Helper for making system calls that involve the stream's file descriptor.
/// System calls are expected to return greather than or equal to zero on success,
/// and less than zero on failure. In the case of failure, errno is expected to
/// be set to the relevant error code.
/// </summary>
/// <typeparam name="TArg1">Specifies the type of an argument to the system call.</typeparam>
/// <typeparam name="TArg2">Specifies the type of another argument to the system call.</typeparam>
/// <param name="sysCall">A delegate that invokes the system call.</param>
/// <param name="arg1">The first argument to be passed to the system call, after the file descriptor.</param>
/// <param name="arg2">The second argument to be passed to the system call.</param>
/// <param name="throwOnError">true to throw an exception if a non-interuption error occurs; otherwise, false.</param>
/// <returns>The return value of the system call.</returns>
/// <remarks>
/// Arguments are expected to be passed via <paramref name="arg1"/> and <paramref name="arg2"/>
/// so as to avoid delegate and closure allocations at the call sites.
/// </remarks>
private long SysCall<TArg1, TArg2>(
Func<int, TArg1, TArg2, long> sysCall,
TArg1 arg1 = default(TArg1), TArg2 arg2 = default(TArg2),
bool throwOnError = true)
{
bool gotRefOnHandle = false;
try
{
// Get the file descriptor from the handle. We increment the ref count to help
// ensure it's not closed out from under us.
_fileHandle.DangerousAddRef(ref gotRefOnHandle);
Debug.Assert(gotRefOnHandle);
int fd = (int)_fileHandle.DangerousGetHandle();
Debug.Assert(fd >= 0);
// System calls may fail due to EINTR (signal interruption). We need to retry in those cases.
while (true)
{
long result = sysCall(fd, arg1, arg2);
if (result < 0)
{
int errno = Marshal.GetLastWin32Error();
if (errno == Interop.Errors.EINTR)
{
continue;
}
else if (throwOnError)
{
throw Interop.GetExceptionForIoErrno(errno, _path, isDirectory: false);
}
}
return result;
}
}
finally
{
if (gotRefOnHandle)
{
_fileHandle.DangerousRelease();
}
else
{
throw new ObjectDisposedException(SR.ObjectDisposed_FileClosed);
}
}
}
}
}
| |
//-----------------------------------------------------------------------------
// Torque
// Copyright GarageGames, LLC 2011
//-----------------------------------------------------------------------------
// The TSShapeConstructor object allows you to apply a set of transformations
// to a 3space shape after it is loaded by Torque, but _before_ the shape is used
// by any other object (eg. Player, StaticShape etc). The sort of transformations
// available include adding, renaming and removing nodes and sequences. This GUI
// is a visual wrapper around TSShapeConstructor which allows you to build up the
// transformation set without having to get your hands dirty with TorqueScript.
//
// Removing a node, sequence, mesh or detail poses a problem. These operations
// permanently delete a potentially large amount of data scattered throughout
// the shape, and there is no easy way to restore it if the user 'undoes' the
// delete. Although it is possible to store the deleted data somewhere and restore
// it on undo, it is not easy to get right, and ugly as hell to implement. For
// example, removing a node would require storing the node name, the
// translation/rotation/scale matters bit for each sequence, all node transform
// keyframes, the IDs of any objects that were attached to the node, skin weights
// etc, then restoring all that data into the original place on undo. Frankly,
// TSShape was never designed to be modified dynamically like that.
//
// So......currently we wimp out completely and just don't support undo for those
// remove operations. Lame, I know, but the best I can do for now.
//
// This file implements all of the actions that can be applied by the GUI. Each
// action has 3 methods:
//
// doit: called the first time the action is performed
// undo: called to undo the action
// redo: called to redo the action (usually the same as doit)
//
// In each case, the appropriate change is made to the shape, and the GUI updated.
//
// TSShapeConstructor keeps track of all the changes made and provides a simple
// way to save the modifications back out to a script file.
// The ShapeEditor uses its own UndoManager
if ( !isObject( ShapeEdUndoManager ) )
new UndoManager( ShapeEdUndoManager );
function ShapeEdUndoManager::updateUndoMenu( %this, %editMenu )
{
%undoName = %this.getNextUndoName();
%redoName = %this.getNextRedoName();
%editMenu.setItemName( 0, "Undo " @ %undoName );
%editMenu.setItemName( 1, "Redo " @ %redoName );
%editMenu.enableItem( 0, %undoName !$= "" );
%editMenu.enableItem( 1, %redoName !$= "" );
}
//------------------------------------------------------------------------------
// Helper functions for creating and applying GUI operations
function ShapeEditor::createAction( %this, %class, %desc )
{
pushInstantGroup();
%action = new UndoScriptAction()
{
class = %class;
superClass = BaseShapeEdAction;
actionName = %desc;
done = 0;
};
popInstantGroup();
return %action;
}
function ShapeEditor::doAction( %this, %action )
{
if ( %action.doit() )
{
ShapeEditor.setDirty( true );
%action.addToManager( ShapeEdUndoManager );
}
else
{
MessageBoxOK( "Error", %action.actionName SPC "failed. Check the console for error messages.", "" );
}
}
function BaseShapeEdAction::redo( %this )
{
// Default redo action is the same as the doit action
if ( %this.doit() )
{
ShapeEditor.setDirty( true );
}
else
{
MessageBoxOK( "Error", "Redo" SPC %this.actionName SPC "failed. Check the console for error messages.", "" );
}
}
function BaseShapeEdAction::undo( %this )
{
ShapeEditor.setDirty( true );
}
//------------------------------------------------------------------------------
function ShapeEditor::doRemoveShapeData( %this, %type, %name )
{
// Removing data from the shape cannot be undone => so warn the user first
MessageBoxYesNo( "Warning", "Deleting a " @ %type @ " cannot be undone. Do " @
"you want to continue?", "ShapeEditor.doRemove" @ %type @ "( \"" @ %name @ "\" );", "" );
}
//------------------------------------------------------------------------------
// Add node
function ShapeEditor::doAddNode( %this, %nodeName, %parentName, %transform )
{
%action = %this.createAction( ActionAddNode, "Add node" );
%action.nodeName = %nodeName;
%action.parentName = %parentName;
%action.transform = %transform;
%this.doAction( %action );
}
function ActionAddNode::doit( %this )
{
if ( ShapeEditor.shape.addNode( %this.nodeName, %this.parentName, %this.transform ) )
{
ShapeEdPropWindow.update_onNodeAdded( %this.nodeName, -1 );
return true;
}
return false;
}
function ActionAddNode::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.removeNode( %this.nodeName ) )
ShapeEdPropWindow.update_onNodeRemoved( %this.nodeName, 1 );
}
//------------------------------------------------------------------------------
// Remove node
function ShapeEditor::doRemoveNode( %this, %nodeName )
{
%action = %this.createAction( ActionRemoveNode, "Remove node" );
%action.nodeName =%nodeName;
%action.nodeChildIndex = ShapeEdNodeTreeView.getChildIndexByName( %nodeName );
// Need to delete all child nodes of this node as well, so recursively collect
// all of the names.
%action.nameList = %this.getNodeNames( %nodeName, "" );
%action.nameCount = getFieldCount( %action.nameList );
for ( %i = 0; %i < %action.nameCount; %i++ )
%action.names[%i] = getField( %action.nameList, %i );
%this.doAction( %action );
}
function ActionRemoveNode::doit( %this )
{
for ( %i = 0; %i < %this.nameCount; %i++ )
ShapeEditor.shape.removeNode( %this.names[%i] );
// Update GUI
ShapeEdPropWindow.update_onNodeRemoved( %this.nameList, %this.nameCount );
return true;
}
function ActionRemoveNode::undo( %this )
{
Parent::undo( %this );
}
//------------------------------------------------------------------------------
// Rename node
function ShapeEditor::doRenameNode( %this, %oldName, %newName )
{
%action = %this.createAction( ActionRenameNode, "Rename node" );
%action.oldName = %oldName;
%action.newName = %newName;
%this.doAction( %action );
}
function ActionRenameNode::doit( %this )
{
if ( ShapeEditor.shape.renameNode( %this.oldName, %this.newName ) )
{
ShapeEdPropWindow.update_onNodeRenamed( %this.oldName, %this.newName );
return true;
}
return false;
}
function ActionRenameNode::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.renameNode( %this.newName, %this.oldName ) )
ShapeEdPropWindow.update_onNodeRenamed( %this.newName, %this.oldName );
}
//------------------------------------------------------------------------------
// Set node parent
function ShapeEditor::doSetNodeParent( %this, %name, %parent )
{
if ( %parent $= "<root>" )
%parent = "";
%action = %this.createAction( ActionSetNodeParent, "Set parent node" );
%action.nodeName = %name;
%action.parentName = %parent;
%action.oldParentName = ShapeEditor.shape.getNodeParentName( %name );
%this.doAction( %action );
}
function ActionSetNodeParent::doit( %this )
{
if ( ShapeEditor.shape.setNodeParent( %this.nodeName, %this.parentName ) )
{
ShapeEdPropWindow.update_onNodeParentChanged( %this.nodeName );
return true;
}
return false;
}
function ActionSetNodeParent::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setNodeParent( %this.nodeName, %this.oldParentName ) )
ShapeEdPropWindow.update_onNodeParentChanged( %this.nodeName );
}
//------------------------------------------------------------------------------
// Edit node transform
function ShapeEditor::doEditNodeTransform( %this, %nodeName, %newTransform, %isWorld, %gizmoID )
{
// If dragging the 3D gizmo, combine all movement into a single action. Undoing
// that action will return the node to where it was when the gizmo drag started.
%last = ShapeEdUndoManager.getUndoAction( ShapeEdUndoManager.getUndoCount() - 1 );
if ( ( %last != -1 ) && ( %last.class $= ActionEditNodeTransform ) &&
( %last.nodeName $= %nodeName ) && ( %last.gizmoID != -1 ) && ( %last.gizmoID == %gizmoID ) )
{
// Use the last action to do the edit, and modify it so it only applies
// the latest transform
%last.newTransform = %newTransform;
%last.isWorld = %isWorld;
%last.doit();
ShapeEditor.setDirty( true );
}
else
{
%action = %this.createAction( ActionEditNodeTransform, "Edit node transform" );
%action.nodeName = %nodeName;
%action.newTransform = %newTransform;
%action.isWorld = %isWorld;
%action.gizmoID = %gizmoID;
%action.oldTransform = %this.shape.getNodeTransform( %nodeName, %isWorld );
%this.doAction( %action );
}
}
function ActionEditNodeTransform::doit( %this )
{
ShapeEditor.shape.setNodeTransform( %this.nodeName, %this.newTransform, %this.isWorld );
ShapeEdPropWindow.update_onNodeTransformChanged();
return true;
}
function ActionEditNodeTransform::undo( %this )
{
Parent::undo( %this );
ShapeEditor.shape.setNodeTransform( %this.nodeName, %this.oldTransform, %this.isWorld );
ShapeEdPropWindow.update_onNodeTransformChanged();
}
//------------------------------------------------------------------------------
// Add sequence
function ShapeEditor::doAddSequence( %this, %seqName, %from, %start, %end )
{
%action = %this.createAction( ActionAddSequence, "Add sequence" );
%action.seqName = %seqName;
%action.origFrom = %from;
%action.from = %from;
%action.start = %start;
%action.end = %end;
%this.doAction( %action );
}
function ActionAddSequence::doit( %this )
{
// If adding this sequence from an existing sequence, make a backup copy of
// the existing sequence first, so we can edit the start/end frames later
// without having to worry if the original source sequence has changed.
if ( ShapeEditor.shape.getSequenceIndex( %this.from ) >= 0 )
{
%this.from = ShapeEditor.getUniqueName( "sequence", "__backup__" @ %this.origFrom @ "_" );
ShapeEditor.shape.addSequence( %this.origFrom, %this.from );
}
// Add the sequence
$collada::forceLoadDAE = EditorSettings.value( "forceLoadDAE" );
%success = ShapeEditor.shape.addSequence( %this.from, %this.seqName, %this.start, %this.end );
$collada::forceLoadDAE = false;
if ( %success )
{
ShapeEdPropWindow.update_onSequenceAdded( %this.seqName, -1 );
return true;
}
return false;
}
function ActionAddSequence::undo( %this )
{
Parent::undo( %this );
// Remove the backup sequence if one was created
if ( %this.origFrom !$= %this.from )
{
ShapeEditor.shape.removeSequence( %this.from );
%this.from = %this.origFrom;
}
// Remove the actual sequence
if ( ShapeEditor.shape.removeSequence( %this.seqName ) )
ShapeEdPropWindow.update_onSequenceRemoved( %this.seqName );
}
//------------------------------------------------------------------------------
// Remove sequence
function ShapeEditor::doRemoveSequence( %this, %seqName )
{
%action = %this.createAction( ActionRemoveSequence, "Remove sequence" );
%action.seqName = %seqName;
%this.doAction( %action );
}
function ActionRemoveSequence::doit( %this )
{
if ( ShapeEditor.shape.removeSequence( %this.seqName ) )
{
ShapeEdPropWindow.update_onSequenceRemoved( %this.seqName );
return true;
}
return false;
}
function ActionRemoveSequence::undo( %this )
{
Parent::undo( %this );
}
//------------------------------------------------------------------------------
// Rename sequence
function ShapeEditor::doRenameSequence( %this, %oldName, %newName )
{
%action = %this.createAction( ActionRenameSequence, "Rename sequence" );
%action.oldName = %oldName;
%action.newName = %newName;
%this.doAction( %action );
}
function ActionRenameSequence::doit( %this )
{
if ( ShapeEditor.shape.renameSequence( %this.oldName, %this.newName ) )
{
ShapeEdPropWindow.update_onSequenceRenamed( %this.oldName, %this.newName );
return true;
}
return false;
}
function ActionRenameSequence::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.renameSequence( %this.newName, %this.oldName ) )
ShapeEdPropWindow.update_onSequenceRenamed( %this.newName, %this.oldName );
}
//------------------------------------------------------------------------------
// Edit sequence source data ( parent, start or end )
function ShapeEditor::doEditSeqSource( %this, %seqName, %from, %start, %end )
{
%action = %this.createAction( ActionEditSeqSource, "Edit sequence source data" );
%action.seqName = %seqName;
%action.origFrom = %from;
%action.from = %from;
%action.start = %start;
%action.end = %end;
// To support undo, the sequence will be renamed instead of removed (undo just
// removes the added sequence and renames the original back). Generate a unique
// name for the backed up sequence
%action.seqBackup = ShapeEditor.getUniqueName( "sequence", "__backup__" @ %action.seqName @ "_" );
// If editing an internal sequence, the source is the renamed backup
if ( %action.from $= %action.seqName )
%action.from = %action.seqBackup;
%this.doAction( %action );
}
function ActionEditSeqSource::doit( %this )
{
// If changing the source to an existing sequence, make a backup copy of
// the existing sequence first, so we can edit the start/end frames later
// without having to worry if the original source sequence has changed.
if ( !startswith( %this.from, "__backup__" ) &&
ShapeEditor.shape.getSequenceIndex( %this.from ) >= 0 )
{
%this.from = ShapeEditor.getUniqueName( "sequence", "__backup__" @ %this.origFrom @ "_" );
ShapeEditor.shape.addSequence( %this.origFrom, %this.from );
}
// Get settings we want to retain
%priority = ShapeEditor.shape.getSequencePriority( %this.seqName );
%cyclic = ShapeEditor.shape.getSequenceCyclic( %this.seqName );
%blend = ShapeEditor.shape.getSequenceBlend( %this.seqName );
// Rename this sequence (instead of removing it) so we can undo this action
ShapeEditor.shape.renameSequence( %this.seqName, %this.seqBackup );
// Add the new sequence
if ( ShapeEditor.shape.addSequence( %this.from, %this.seqName, %this.start, %this.end ) )
{
// Restore original settings
if ( ShapeEditor.shape.getSequencePriority ( %this.seqName ) != %priority )
ShapeEditor.shape.setSequencePriority( %this.seqName, %priority );
if ( ShapeEditor.shape.getSequenceCyclic( %this.seqName ) != %cyclic )
ShapeEditor.shape.setSequenceCyclic( %this.seqName, %cyclic );
%newBlend = ShapeEditor.shape.getSequenceBlend( %this.seqName );
if ( %newBlend !$= %blend )
{
// Undo current blend, then apply new one
ShapeEditor.shape.setSequenceBlend( %this.seqName, 0, getField( %newBlend, 1 ), getField( %newBlend, 2 ) );
if ( getField( %blend, 0 ) == 1 )
ShapeEditor.shape.setSequenceBlend( %this.seqName, getField( %blend, 0 ), getField( %blend, 1 ), getField( %blend, 2 ) );
}
if ( ShapeEdSequenceList.getSelectedName() $= %this.seqName )
{
ShapeEdSequenceList.editColumn( %this.seqName, 3, %this.end - %this.start + 1 );
ShapeEdPropWindow.syncPlaybackDetails();
}
return true;
}
return false;
}
function ActionEditSeqSource::undo( %this )
{
Parent::undo( %this );
// Remove the source sequence backup if one was created
if ( ( %this.from !$= %this.origFrom ) && ( %this.from !$= %this.seqBackup ) )
{
ShapeEditor.shape.removeSequence( %this.from );
%this.from = %this.origFrom;
}
// Remove the added sequence, and rename the backup back
if ( ShapeEditor.shape.removeSequence( %this.seqName ) &&
ShapeEditor.shape.renameSequence( %this.seqBackup, %this.seqName ) )
{
if ( ShapeEdSequenceList.getSelectedName() $= %this.seqName )
{
ShapeEdSequenceList.editColumn( %this.seqName, 3, %this.end - %this.start + 1 );
ShapeEdPropWindow.syncPlaybackDetails();
}
}
}
//------------------------------------------------------------------------------
// Edit cyclic flag
function ShapeEditor::doEditCyclic( %this, %seqName, %cyclic )
{
%action = %this.createAction( ActionEditCyclic, "Toggle cyclic flag" );
%action.seqName = %seqName;
%action.cyclic = %cyclic;
%this.doAction( %action );
}
function ActionEditCyclic::doit( %this )
{
if ( ShapeEditor.shape.setSequenceCyclic( %this.seqName, %this.cyclic ) )
{
ShapeEdPropWindow.update_onSequenceCyclicChanged( %this.seqName, %this.cyclic );
return true;
}
return false;
}
function ActionEditCyclic::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setSequenceCyclic( %this.seqName, !%this.cyclic ) )
ShapeEdPropWindow.update_onSequenceCyclicChanged( %this.seqName, !%this.cyclic );
}
//------------------------------------------------------------------------------
// Edit blend properties
function ShapeEditor::doEditBlend( %this, %seqName, %blend, %blendSeq, %blendFrame )
{
%action = %this.createAction( ActionEditBlend, "Edit blend properties" );
%action.seqName = %seqName;
%action.blend = %blend;
%action.blendSeq = %blendSeq;
%action.blendFrame = %blendFrame;
// Store the current blend settings
%oldBlend = ShapeEditor.shape.getSequenceBlend( %seqName );
%action.oldBlend = getField( %oldBlend, 0 );
%action.oldBlendSeq = getField( %oldBlend, 1 );
%action.oldBlendFrame = getField( %oldBlend, 2 );
// Use new values if the old ones do not exist ( for blend sequences embedded
// in the DTS/DSQ file )
if ( %action.oldBlendSeq $= "" )
%action.oldBlendSeq = %action.blendSeq;
if ( %action.oldBlendFrame $= "" )
%action.oldBlendFrame = %action.blendFrame;
%this.doAction( %action );
}
function ActionEditBlend::doit( %this )
{
// If we are changing the blend reference ( rather than just toggling the flag )
// we need to undo the current blend first.
if ( %this.blend && %this.oldBlend )
{
if ( !ShapeEditor.shape.setSequenceBlend( %this.seqName, false, %this.oldBlendSeq, %this.oldBlendFrame ) )
return false;
}
if ( ShapeEditor.shape.setSequenceBlend( %this.seqName, %this.blend, %this.blendSeq, %this.blendFrame ) )
{
ShapeEdPropWindow.update_onSequenceBlendChanged( %this.seqName, %this.blend,
%this.oldBlendSeq, %this.oldBlendFrame, %this.blendSeq, %this.blendFrame );
return true;
}
return false;
}
function ActionEditBlend::undo( %this )
{
Parent::undo( %this );
// If we are changing the blend reference ( rather than just toggling the flag )
// we need to undo the current blend first.
if ( %this.blend && %this.oldBlend )
{
if ( !ShapeEditor.shape.setSequenceBlend( %this.seqName, false, %this.blendSeq, %this.blendFrame ) )
return;
}
if ( ShapeEditor.shape.setSequenceBlend( %this.seqName, %this.oldBlend, %this.oldBlendSeq, %this.oldBlendFrame ) )
{
ShapeEdPropWindow.update_onSequenceBlendChanged( %this.seqName, !%this.blend,
%this.blendSeq, %this.blendFrame, %this.oldBlendSeq, %this.oldBlendFrame );
}
}
//------------------------------------------------------------------------------
// Edit sequence priority
function ShapeEditor::doEditSequencePriority( %this, %seqName, %newPriority )
{
%action = %this.createAction( ActionEditSequencePriority, "Edit sequence priority" );
%action.seqName = %seqName;
%action.newPriority = %newPriority;
%action.oldPriority = %this.shape.getSequencePriority( %seqName );
%this.doAction( %action );
}
function ActionEditSequencePriority::doit( %this )
{
if ( ShapeEditor.shape.setSequencePriority( %this.seqName, %this.newPriority ) )
{
ShapeEdPropWindow.update_onSequencePriorityChanged( %this.seqName );
return true;
}
return false;
}
function ActionEditSequencePriority::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setSequencePriority( %this.seqName, %this.oldPriority ) )
ShapeEdPropWindow.update_onSequencePriorityChanged( %this.seqName );
}
//------------------------------------------------------------------------------
// Edit sequence ground speed
function ShapeEditor::doEditSequenceGroundSpeed( %this, %seqName, %newSpeed )
{
%action = %this.createAction( ActionEditSequenceGroundSpeed, "Edit sequence ground speed" );
%action.seqName = %seqName;
%action.newSpeed = %newSpeed;
%action.oldSpeed = %this.shape.getSequenceGroundSpeed( %seqName );
%this.doAction( %action );
}
function ActionEditSequenceGroundSpeed::doit( %this )
{
if ( ShapeEditor.shape.setSequenceGroundSpeed( %this.seqName, %this.newSpeed ) )
{
ShapeEdPropWindow.update_onSequenceGroundSpeedChanged( %this.seqName );
return true;
}
return false;
}
function ActionEditSequenceGroundSpeed::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setSequenceGroundSpeed( %this.seqName, %this.oldSpeed ) )
ShapeEdPropWindow.update_onSequenceGroundSpeedChanged( %this.seqName );
}
//------------------------------------------------------------------------------
// Add trigger
function ShapeEditor::doAddTrigger( %this, %seqName, %frame, %state )
{
%action = %this.createAction( ActionAddTrigger, "Add trigger" );
%action.seqName = %seqName;
%action.frame = %frame;
%action.state = %state;
%this.doAction( %action );
}
function ActionAddTrigger::doit( %this )
{
if ( ShapeEditor.shape.addTrigger( %this.seqName, %this.frame, %this.state ) )
{
ShapeEdPropWindow.update_onTriggerAdded( %this.seqName, %this.frame, %this.state );
return true;
}
return false;
}
function ActionAddTrigger::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.removeTrigger( %this.seqName, %this.frame, %this.state ) )
ShapeEdPropWindow.update_onTriggerRemoved( %this.seqName, %this.frame, %this.state );
}
//------------------------------------------------------------------------------
// Remove trigger
function ShapeEditor::doRemoveTrigger( %this, %seqName, %frame, %state )
{
%action = %this.createAction( ActionRemoveTrigger, "Remove trigger" );
%action.seqName = %seqName;
%action.frame = %frame;
%action.state = %state;
%this.doAction( %action );
}
function ActionRemoveTrigger::doit( %this )
{
if ( ShapeEditor.shape.removeTrigger( %this.seqName, %this.frame, %this.state ) )
{
ShapeEdPropWindow.update_onTriggerRemoved( %this.seqName, %this.frame, %this.state );
return true;
}
return false;
}
function ActionRemoveTrigger::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.addTrigger( %this.seqName, %this.frame, %this.state ) )
ShapeEdPropWindow.update_onTriggerAdded( %this.seqName, %this.frame, %this.state );
}
//------------------------------------------------------------------------------
// Edit trigger
function ShapeEditor::doEditTrigger( %this, %seqName, %oldFrame, %oldState, %frame, %state )
{
%action = %this.createAction( ActionEditTrigger, "Edit trigger" );
%action.seqName = %seqName;
%action.oldFrame = %oldFrame;
%action.oldState = %oldState;
%action.frame = %frame;
%action.state = %state;
%this.doAction( %action );
}
function ActionEditTrigger::doit( %this )
{
if ( ShapeEditor.shape.addTrigger( %this.seqName, %this.frame, %this.state ) &&
ShapeEditor.shape.removeTrigger( %this.seqName, %this.oldFrame, %this.oldState ) )
{
ShapeEdTriggerList.updateItem( %this.oldFrame, %this.oldState, %this.frame, %this.state );
return true;
}
return false;
}
function ActionEditTrigger::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.addTrigger( %this.seqName, %this.oldFrame, %this.oldState ) &&
ShapeEditor.shape.removeTrigger( %this.seqName, %this.frame, %this.state ) )
ShapeEdTriggerList.updateItem( %this.frame, %this.state, %this.oldFrame, %this.oldState );
}
//------------------------------------------------------------------------------
// Rename detail
function ShapeEditor::doRenameDetail( %this, %oldName, %newName )
{
%action = %this.createAction( ActionRenameDetail, "Rename detail" );
%action.oldName = %oldName;
%action.newName = %newName;
%this.doAction( %action );
}
function ActionRenameDetail::doit( %this )
{
if ( ShapeEditor.shape.renameDetailLevel( %this.oldName, %this.newName ) )
{
ShapeEdPropWindow.update_onDetailRenamed( %this.oldName, %this.newName );
return true;
}
return false;
}
function ActionRenameDetail::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.renameDetailLevel( %this.newName, %this.oldName ) )
ShapeEdPropWindow.update_onDetailRenamed( %this.newName, %this.oldName );
}
//------------------------------------------------------------------------------
// Edit detail size
function ShapeEditor::doEditDetailSize( %this, %oldSize, %newSize )
{
%action = %this.createAction( ActionEditDetailSize, "Edit detail size" );
%action.oldSize = %oldSize;
%action.newSize = %newSize;
%this.doAction( %action );
}
function ActionEditDetailSize::doit( %this )
{
%dl = ShapeEditor.shape.setDetailLevelSize( %this.oldSize, %this.newSize );
if ( %dl != -1 )
{
ShapeEdPropWindow.update_onDetailSizeChanged( %this.oldSize, %this.newSize );
return true;
}
return false;
}
function ActionEditDetailSize::undo( %this )
{
Parent::undo( %this );
%dl = ShapeEditor.shape.setDetailLevelSize( %this.newSize, %this.oldSize );
if ( %dl != -1 )
ShapeEdPropWindow.update_onDetailSizeChanged( %this.newSize, %this.oldSize );
}
//------------------------------------------------------------------------------
// Rename object
function ShapeEditor::doRenameObject( %this, %oldName, %newName )
{
%action = %this.createAction( ActionRenameObject, "Rename object" );
%action.oldName = %oldName;
%action.newName = %newName;
%this.doAction( %action );
}
function ActionRenameObject::doit( %this )
{
if ( ShapeEditor.shape.renameObject( %this.oldName, %this.newName ) )
{
ShapeEdPropWindow.update_onObjectRenamed( %this.oldName, %this.newName );
return true;
}
return false;
}
function ActionRenameObject::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.renameObject( %this.newName, %this.oldName ) )
ShapeEdPropWindow.update_onObjectRenamed( %this.newName, %this.oldName );
}
//------------------------------------------------------------------------------
// Edit mesh size
function ShapeEditor::doEditMeshSize( %this, %meshName, %size )
{
%action = %this.createAction( ActionEditMeshSize, "Edit mesh size" );
%action.meshName = stripTrailingNumber( %meshName );
%action.oldSize = getTrailingNumber( %meshName );
%action.newSize = %size;
%this.doAction( %action );
}
function ActionEditMeshSize::doit( %this )
{
if ( ShapeEditor.shape.setMeshSize( %this.meshName SPC %this.oldSize, %this.newSize ) )
{
ShapeEdPropWindow.update_onMeshSizeChanged( %this.meshName, %this.oldSize, %this.newSize );
return true;
}
return false;
}
function ActionEditMeshSize::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setMeshSize( %this.meshName SPC %this.newSize, %this.oldSize ) )
ShapeEdPropWindow.update_onMeshSizeChanged( %this.meshName, %this.oldSize, %this.oldSize );
}
//------------------------------------------------------------------------------
// Edit billboard type
function ShapeEditor::doEditMeshBillboard( %this, %meshName, %type )
{
%action = %this.createAction( ActionEditMeshBillboard, "Edit mesh billboard" );
%action.meshName = %meshName;
%action.oldType = %this.shape.getMeshType( %meshName );
%action.newType = %type;
%this.doAction( %action );
}
function ActionEditMeshBillboard::doit( %this )
{
if ( ShapeEditor.shape.setMeshType( %this.meshName, %this.newType ) )
{
switch$ ( ShapeEditor.shape.getMeshType( %this.meshName ) )
{
case "normal": ShapeEdDetails-->bbType.setSelected( 0, false );
case "billboard": ShapeEdDetails-->bbType.setSelected( 1, false );
case "billboardzaxis": ShapeEdDetails-->bbType.setSelected( 2, false );
}
return true;
}
return false;
}
function ActionEditMeshBillboard::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setMeshType( %this.meshName, %this.oldType ) )
{
%id = ShapeEdDetailTree.getSelectedItem();
if ( ( %id > 1 ) && ( ShapeEdDetailTree.getItemText( %id ) $= %this.meshName ) )
{
switch$ ( ShapeEditor.shape.getMeshType( %this.meshName ) )
{
case "normal": ShapeEdDetails-->bbType.setSelected( 0, false );
case "billboard": ShapeEdDetails-->bbType.setSelected( 1, false );
case "billboardzaxis": ShapeEdDetails-->bbType.setSelected( 2, false );
}
}
}
}
//------------------------------------------------------------------------------
// Edit object node
function ShapeEditor::doSetObjectNode( %this, %objName, %node )
{
%action = %this.createAction( ActionSetObjectNode, "Set object node" );
%action.objName = %objName;
%action.oldNode = %this.shape.getObjectNode( %objName );
%action.newNode = %node;
%this.doAction( %action );
}
function ActionSetObjectNode::doit( %this )
{
if ( ShapeEditor.shape.setObjectNode( %this.objName, %this.newNode ) )
{
ShapeEdPropWindow.update_onObjectNodeChanged( %this.objName );
return true;
}
return false;
}
function ActionSetObjectNode::undo( %this )
{
Parent::undo( %this );
if ( ShapeEditor.shape.setObjectNode( %this.objName, %this.oldNode ) )
ShapeEdPropWindow.update_onObjectNodeChanged( %this.objName );
}
//------------------------------------------------------------------------------
// Remove mesh
function ShapeEditor::doRemoveMesh( %this, %meshName )
{
%action = %this.createAction( ActionRemoveMesh, "Remove mesh" );
%action.meshName = %meshName;
%this.doAction( %action );
}
function ActionRemoveMesh::doit( %this )
{
if ( ShapeEditor.shape.removeMesh( %this.meshName ) )
{
ShapeEdPropWindow.update_onMeshRemoved( %this.meshName );
return true;
}
return false;
}
function ActionRemoveMesh::undo( %this )
{
Parent::undo( %this );
}
//------------------------------------------------------------------------------
// Add meshes from file
function ShapeEditor::doAddMeshFromFile( %this, %filename, %size )
{
%action = %this.createAction( ActionAddMeshFromFile, "Add mesh from file" );
%action.filename = %filename;
%action.size = %size;
%this.doAction( %action );
}
function ActionAddMeshFromFile::doit( %this )
{
%this.meshList = ShapeEditor.addLODFromFile( ShapeEditor.shape, %this.filename, %this.size, 1 );
if ( %this.meshList !$= "" )
{
%count = getFieldCount( %this.meshList );
for ( %i = 0; %i < %count; %i++ )
ShapeEdPropWindow.update_onMeshAdded( getField( %this.meshList, %i ) );
ShapeEdMaterials.updateMaterialList();
return true;
}
return false;
}
function ActionAddMeshFromFile::undo( %this )
{
// Remove all the meshes we added
%count = getFieldCount( %this.meshList );
for ( %i = 0; %i < %count; %i ++ )
{
%name = getField( %this.meshList, %i );
ShapeEditor.shape.removeMesh( %name );
ShapeEdPropWindow.update_onMeshRemoved( %name );
}
ShapeEdMaterials.updateMaterialList();
}
//------------------------------------------------------------------------------
// Add/edit collision geometry
function ShapeEditor::doEditCollision( %this, %type, %target, %depth, %merge, %concavity,
%maxVerts, %boxMax, %sphereMax, %capsuleMax )
{
%colData = ShapeEdColWindow.lastColSettings;
%action = %this.createAction( ActionEditCollision, "Edit shape collision" );
%action.oldType = getField( %colData, 0 );
%action.oldTarget = getField( %colData, 1 );
%action.oldDepth = getField( %colData, 2 );
%action.oldMerge = getField( %colData, 3 );
%action.oldConcavity = getField( %colData, 4 );
%action.oldMaxVerts = getField( %colData, 5 );
%action.oldBoxMax = getField( %colData, 6 );
%action.oldSphereMax = getField( %colData, 7 );
%action.oldCapsuleMax = getField( %colData, 8 );
%action.newType = %type;
%action.newTarget = %target;
%action.newDepth = %depth;
%action.newMerge = %merge;
%action.newConcavity = %concavity;
%action.newMaxVerts = %maxVerts;
%action.newBoxMax = %boxMax;
%action.newSphereMax = %sphereMax;
%action.newCapsuleMax = %capsuleMax;
%this.doAction( %action );
}
function ActionEditCollision::updateCollision( %this, %type, %target, %depth, %merge, %concavity,
%maxVerts, %boxMax, %sphereMax, %capsuleMax )
{
%colDetailSize = -1;
%colNode = "Col" @ %colDetailSize;
// TreeView items are case sensitive, but TSShape names are not, so fixup case
// if needed
%index = ShapeEditor.shape.getNodeIndex( %colNode );
if ( %index != -1 )
%colNode = ShapeEditor.shape.getNodeName( %index );
// First remove the old detail and collision nodes
%meshList = ShapeEditor.getDetailMeshList( %colDetailSize );
%meshCount = getFieldCount( %meshList );
if ( %meshCount > 0 )
{
ShapeEditor.shape.removeDetailLevel( %colDetailSize );
for ( %i = 0; %i < %meshCount; %i++ )
ShapeEdPropWindow.update_onMeshRemoved( getField( %meshList, %i ) );
}
%nodeList = ShapeEditor.getNodeNames( %colNode, "" );
%nodeCount = getFieldCount( %nodeList );
if ( %nodeCount > 0 )
{
for ( %i = 0; %i < %nodeCount; %i++ )
ShapeEditor.shape.removeNode( getField( %nodeList, %i ) );
ShapeEdPropWindow.update_onNodeRemoved( %nodeList, %nodeCount );
}
// Add the new node and geometry
if ( %type $= "" )
return;
if ( !ShapeEditor.shape.addCollisionDetail( %colDetailSize, %type, %target,
%depth, %merge, %concavity, %maxVerts,
%boxMax, %sphereMax, %capsuleMax ) )
return false;
// Update UI
%meshList = ShapeEditor.getDetailMeshList( %colDetailSize );
ShapeEdPropWindow.update_onNodeAdded( %colNode, ShapeEditor.shape.getNodeCount() ); // will also add child nodes
%count = getFieldCount( %meshList );
for ( %i = 0; %i < %count; %i++ )
ShapeEdPropWindow.update_onMeshAdded( getField( %meshList, %i ) );
ShapeEdColWindow.lastColSettings = %type TAB %target TAB %depth TAB %merge TAB
%concavity TAB %maxVerts TAB %boxMax TAB %sphereMax TAB %capsuleMax;
ShapeEdColWindow.update_onCollisionChanged();
return true;
}
function ActionEditCollision::doit( %this )
{
ShapeEdWaitGui.show( "Generating collision geometry..." );
%success = %this.updateCollision( %this.newType, %this.newTarget, %this.newDepth, %this.newMerge,
%this.newConcavity, %this.newMaxVerts, %this.newBoxMax,
%this.newSphereMax, %this.newCapsuleMax );
ShapeEdWaitGui.hide();
return %success;
}
function ActionEditCollision::undo( %this )
{
Parent::undo( %this );
ShapeEdWaitGui.show( "Generating collision geometry..." );
%this.updateCollision( %this.oldType, %this.oldTarget, %this.oldDepth, %this.oldMerge,
%this.oldConcavity, %this.oldMaxVerts, %this.oldBoxMax,
%this.oldSphereMax, %this.oldCapsuleMax );
ShapeEdWaitGui.hide();
}
//------------------------------------------------------------------------------
// Remove Detail
function ShapeEditor::doRemoveDetail( %this, %size )
{
%action = %this.createAction( ActionRemoveDetail, "Remove detail level" );
%action.size = %size;
%this.doAction( %action );
}
function ActionRemoveDetail::doit( %this )
{
%meshList = ShapeEditor.getDetailMeshList( %this.size );
if ( ShapeEditor.shape.removeDetailLevel( %this.size ) )
{
%meshCount = getFieldCount( %meshList );
for ( %i = 0; %i < %meshCount; %i++ )
ShapeEdPropWindow.update_onMeshRemoved( getField( %meshList, %i ) );
return true;
}
return false;
}
function ActionRemoveDetail::undo( %this )
{
Parent::undo( %this );
}
//------------------------------------------------------------------------------
// Update bounds
function ShapeEditor::doSetBounds( %this )
{
%action = %this.createAction( ActionSetBounds, "Set bounds" );
%action.oldBounds = ShapeEditor.shape.getBounds();
%action.newBounds = ShapeEdShapeView.computeShapeBounds();
%this.doAction( %action );
}
function ActionSetBounds::doit( %this )
{
return ShapeEditor.shape.setBounds( %this.newBounds );
}
function ActionSetBounds::undo( %this )
{
Parent::undo( %this );
ShapeEditor.shape.setBounds( %this.oldBounds );
}
//------------------------------------------------------------------------------
// Add/edit imposter
function ShapeEditor::doEditImposter( %this, %dl, %detailSize, %bbEquatorSteps, %bbPolarSteps,
%bbDetailLevel, %bbDimension, %bbIncludePoles, %bbPolarAngle )
{
%action = %this.createAction( ActionEditImposter, "Edit imposter" );
%action.oldDL = %dl;
if ( %action.oldDL != -1 )
{
%action.oldSize = ShapeEditor.shape.getDetailLevelSize( %dl );
%action.oldImposter = ShapeEditor.shape.getImposterSettings( %dl );
}
%action.newSize = %detailSize;
%action.newImposter = "1" TAB %bbEquatorSteps TAB %bbPolarSteps TAB %bbDetailLevel TAB
%bbDimension TAB %bbIncludePoles TAB %bbPolarAngle;
%this.doAction( %action );
}
function ActionEditImposter::doit( %this )
{
// Unpack new imposter settings
for ( %i = 0; %i < 7; %i++ )
%val[%i] = getField( %this.newImposter, %i );
ShapeEdWaitGui.show( "Generating imposter bitmaps..." );
// Need to de-highlight the current material, or the imposter will have the
// highlight effect baked in!
ShapeEdMaterials.updateSelectedMaterial( false );
%dl = ShapeEditor.shape.addImposter( %this.newSize, %val[1], %val[2], %val[3], %val[4], %val[5], %val[6] );
ShapeEdWaitGui.hide();
// Restore highlight effect
ShapeEdMaterials.updateSelectedMaterial( ShapeEdMaterials-->highlightMaterial.getValue() );
if ( %dl != -1 )
{
ShapeEdShapeView.refreshShape();
ShapeEdShapeView.currentDL = %dl;
ShapeEdAdvancedWindow-->detailSize.setText( %this.newSize );
ShapeEdDetails-->meshSize.setText( %this.newSize );
ShapeEdDetails.update_onDetailsChanged();
return true;
}
return false;
}
function ActionEditImposter::undo( %this )
{
Parent::undo( %this );
// If this was a new imposter, just remove it. Otherwise restore the old settings
if ( %this.oldDL < 0 )
{
if ( ShapeEditor.shape.removeImposter() )
{
ShapeEdShapeView.refreshShape();
ShapeEdShapeView.currentDL = 0;
ShapeEdDetails.update_onDetailsChanged();
}
}
else
{
// Unpack old imposter settings
for ( %i = 0; %i < 7; %i++ )
%val[%i] = getField( %this.oldImposter, %i );
ShapeEdWaitGui.show( "Generating imposter bitmaps..." );
// Need to de-highlight the current material, or the imposter will have the
// highlight effect baked in!
ShapeEdMaterials.updateSelectedMaterial( false );
%dl = ShapeEditor.shape.addImposter( %this.oldSize, %val[1], %val[2], %val[3], %val[4], %val[5], %val[6] );
ShapeEdWaitGui.hide();
// Restore highlight effect
ShapeEdMaterials.updateSelectedMaterial( ShapeEdMaterials-->highlightMaterial.getValue() );
if ( %dl != -1 )
{
ShapeEdShapeView.refreshShape();
ShapeEdShapeView.currentDL = %dl;
ShapeEdAdvancedWindow-->detailSize.setText( %this.oldSize );
ShapeEdDetails-->meshSize.setText( %this.oldSize );
}
}
}
//------------------------------------------------------------------------------
// Remove imposter
function ShapeEditor::doRemoveImposter( %this )
{
%action = %this.createAction( ActionRemoveImposter, "Remove imposter" );
%dl = ShapeEditor.shape.getImposterDetailLevel();
if ( %dl != -1 )
{
%action.oldSize = ShapeEditor.shape.getDetailLevelSize( %dl );
%action.oldImposter = ShapeEditor.shape.getImposterSettings( %dl );
%this.doAction( %action );
}
}
function ActionRemoveImposter::doit( %this )
{
if ( ShapeEditor.shape.removeImposter() )
{
ShapeEdShapeView.refreshShape();
ShapeEdShapeView.currentDL = 0;
ShapeEdDetails.update_onDetailsChanged();
return true;
}
return false;
}
function ActionRemoveImposter::undo( %this )
{
Parent::undo( %this );
// Unpack the old imposter settings
for ( %i = 0; %i < 7; %i++ )
%val[%i] = getField( %this.oldImposter, %i );
ShapeEdWaitGui.show( "Generating imposter bitmaps..." );
%dl = ShapeEditor.shape.addImposter( %this.oldSize, %val[1], %val[2], %val[3], %val[4], %val[5], %val[6] );
ShapeEdWaitGui.hide();
if ( %dl != -1 )
{
ShapeEdShapeView.refreshShape();
ShapeEdShapeView.currentDL = %dl;
ShapeEdAdvancedWindow-->detailSize.setText( %this.oldSize );
ShapeEdDetails-->meshSize.setText( %this.oldSize );
ShapeEdDetails.update_onDetailsChanged();
}
}
| |
using Discord.Audio;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Model = Discord.API.Channel;
namespace Discord.Rest
{
/// <summary>
/// Represents a REST-based group-message channel.
/// </summary>
[DebuggerDisplay(@"{DebuggerDisplay,nq}")]
public class RestGroupChannel : RestChannel, IGroupChannel, IRestPrivateChannel, IRestMessageChannel, IRestAudioChannel
{
private string _iconId;
private ImmutableDictionary<ulong, RestGroupUser> _users;
/// <inheritdoc />
public string Name { get; private set; }
public IReadOnlyCollection<RestGroupUser> Users => _users.ToReadOnlyCollection();
public IReadOnlyCollection<RestGroupUser> Recipients
=> _users.Select(x => x.Value).Where(x => x.Id != Discord.CurrentUser.Id).ToReadOnlyCollection(() => _users.Count - 1);
internal RestGroupChannel(BaseDiscordClient discord, ulong id)
: base(discord, id)
{
}
internal new static RestGroupChannel Create(BaseDiscordClient discord, Model model)
{
var entity = new RestGroupChannel(discord, model.Id);
entity.Update(model);
return entity;
}
internal override void Update(Model model)
{
if (model.Name.IsSpecified)
Name = model.Name.Value;
if (model.Icon.IsSpecified)
_iconId = model.Icon.Value;
if (model.Recipients.IsSpecified)
UpdateUsers(model.Recipients.Value);
}
internal void UpdateUsers(API.User[] models)
{
var users = ImmutableDictionary.CreateBuilder<ulong, RestGroupUser>();
for (int i = 0; i < models.Length; i++)
users[models[i].Id] = RestGroupUser.Create(Discord, models[i]);
_users = users.ToImmutable();
}
/// <inheritdoc />
public override async Task UpdateAsync(RequestOptions options = null)
{
var model = await Discord.ApiClient.GetChannelAsync(Id, options).ConfigureAwait(false);
Update(model);
}
/// <inheritdoc />
public Task LeaveAsync(RequestOptions options = null)
=> ChannelHelper.DeleteAsync(this, Discord, options);
public RestUser GetUser(ulong id)
{
if (_users.TryGetValue(id, out RestGroupUser user))
return user;
return null;
}
/// <inheritdoc />
public Task<RestMessage> GetMessageAsync(ulong id, RequestOptions options = null)
=> ChannelHelper.GetMessageAsync(this, Discord, id, options);
/// <inheritdoc />
public IAsyncEnumerable<IReadOnlyCollection<RestMessage>> GetMessagesAsync(int limit = DiscordConfig.MaxMessagesPerBatch, RequestOptions options = null)
=> ChannelHelper.GetMessagesAsync(this, Discord, null, Direction.Before, limit, options);
/// <inheritdoc />
public IAsyncEnumerable<IReadOnlyCollection<RestMessage>> GetMessagesAsync(ulong fromMessageId, Direction dir, int limit = DiscordConfig.MaxMessagesPerBatch, RequestOptions options = null)
=> ChannelHelper.GetMessagesAsync(this, Discord, fromMessageId, dir, limit, options);
/// <inheritdoc />
public IAsyncEnumerable<IReadOnlyCollection<RestMessage>> GetMessagesAsync(IMessage fromMessage, Direction dir, int limit = DiscordConfig.MaxMessagesPerBatch, RequestOptions options = null)
=> ChannelHelper.GetMessagesAsync(this, Discord, fromMessage.Id, dir, limit, options);
/// <inheritdoc />
public Task<IReadOnlyCollection<RestMessage>> GetPinnedMessagesAsync(RequestOptions options = null)
=> ChannelHelper.GetPinnedMessagesAsync(this, Discord, options);
/// <inheritdoc />
public Task DeleteMessageAsync(ulong messageId, RequestOptions options = null)
=> ChannelHelper.DeleteMessageAsync(this, messageId, Discord, options);
/// <inheritdoc />
public Task DeleteMessageAsync(IMessage message, RequestOptions options = null)
=> ChannelHelper.DeleteMessageAsync(this, message.Id, Discord, options);
/// <inheritdoc />
/// <exception cref="ArgumentOutOfRangeException">Message content is too long, length must be less or equal to <see cref="DiscordConfig.MaxMessageSize"/>.</exception>
public Task<RestUserMessage> SendMessageAsync(string text = null, bool isTTS = false, Embed embed = null, RequestOptions options = null)
=> ChannelHelper.SendMessageAsync(this, Discord, text, isTTS, embed, options);
/// <inheritdoc />
/// <exception cref="ArgumentException">
/// <paramref name="filePath" /> is a zero-length string, contains only white space, or contains one or more
/// invalid characters as defined by <see cref="System.IO.Path.GetInvalidPathChars"/>.
/// </exception>
/// <exception cref="ArgumentNullException">
/// <paramref name="filePath" /> is <c>null</c>.
/// </exception>
/// <exception cref="PathTooLongException">
/// The specified path, file name, or both exceed the system-defined maximum length. For example, on
/// Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260
/// characters.
/// </exception>
/// <exception cref="DirectoryNotFoundException">
/// The specified path is invalid, (for example, it is on an unmapped drive).
/// </exception>
/// <exception cref="UnauthorizedAccessException">
/// <paramref name="filePath" /> specified a directory.-or- The caller does not have the required permission.
/// </exception>
/// <exception cref="FileNotFoundException">
/// The file specified in <paramref name="filePath" /> was not found.
/// </exception>
/// <exception cref="NotSupportedException"><paramref name="filePath" /> is in an invalid format.</exception>
/// <exception cref="IOException">An I/O error occurred while opening the file.</exception>
/// <exception cref="ArgumentOutOfRangeException">Message content is too long, length must be less or equal to <see cref="DiscordConfig.MaxMessageSize"/>.</exception>
public Task<RestUserMessage> SendFileAsync(string filePath, string text, bool isTTS = false, Embed embed = null, RequestOptions options = null, bool isSpoiler = false)
=> ChannelHelper.SendFileAsync(this, Discord, filePath, text, isTTS, embed, options, isSpoiler);
/// <inheritdoc />
/// <exception cref="ArgumentOutOfRangeException">Message content is too long, length must be less or equal to <see cref="DiscordConfig.MaxMessageSize"/>.</exception>
public Task<RestUserMessage> SendFileAsync(Stream stream, string filename, string text, bool isTTS = false, Embed embed = null, RequestOptions options = null, bool isSpoiler = false)
=> ChannelHelper.SendFileAsync(this, Discord, stream, filename, text, isTTS, embed, options, isSpoiler);
/// <inheritdoc />
public Task TriggerTypingAsync(RequestOptions options = null)
=> ChannelHelper.TriggerTypingAsync(this, Discord, options);
/// <inheritdoc />
public IDisposable EnterTypingState(RequestOptions options = null)
=> ChannelHelper.EnterTypingState(this, Discord, options);
public override string ToString() => Name;
private string DebuggerDisplay => $"{Name} ({Id}, Group)";
//ISocketPrivateChannel
IReadOnlyCollection<RestUser> IRestPrivateChannel.Recipients => Recipients;
//IPrivateChannel
IReadOnlyCollection<IUser> IPrivateChannel.Recipients => Recipients;
//IMessageChannel
async Task<IMessage> IMessageChannel.GetMessageAsync(ulong id, CacheMode mode, RequestOptions options)
{
if (mode == CacheMode.AllowDownload)
return await GetMessageAsync(id, options).ConfigureAwait(false);
else
return null;
}
IAsyncEnumerable<IReadOnlyCollection<IMessage>> IMessageChannel.GetMessagesAsync(int limit, CacheMode mode, RequestOptions options)
{
if (mode == CacheMode.AllowDownload)
return GetMessagesAsync(limit, options);
else
return AsyncEnumerable.Empty<IReadOnlyCollection<IMessage>>();
}
IAsyncEnumerable<IReadOnlyCollection<IMessage>> IMessageChannel.GetMessagesAsync(ulong fromMessageId, Direction dir, int limit, CacheMode mode, RequestOptions options)
{
if (mode == CacheMode.AllowDownload)
return GetMessagesAsync(fromMessageId, dir, limit, options);
else
return AsyncEnumerable.Empty<IReadOnlyCollection<IMessage>>();
}
IAsyncEnumerable<IReadOnlyCollection<IMessage>> IMessageChannel.GetMessagesAsync(IMessage fromMessage, Direction dir, int limit, CacheMode mode, RequestOptions options)
{
if (mode == CacheMode.AllowDownload)
return GetMessagesAsync(fromMessage, dir, limit, options);
else
return AsyncEnumerable.Empty<IReadOnlyCollection<IMessage>>();
}
async Task<IReadOnlyCollection<IMessage>> IMessageChannel.GetPinnedMessagesAsync(RequestOptions options)
=> await GetPinnedMessagesAsync(options).ConfigureAwait(false);
async Task<IUserMessage> IMessageChannel.SendFileAsync(string filePath, string text, bool isTTS, Embed embed, RequestOptions options, bool isSpoiler)
=> await SendFileAsync(filePath, text, isTTS, embed, options, isSpoiler).ConfigureAwait(false);
async Task<IUserMessage> IMessageChannel.SendFileAsync(Stream stream, string filename, string text, bool isTTS, Embed embed, RequestOptions options, bool isSpoiler)
=> await SendFileAsync(stream, filename, text, isTTS, embed, options, isSpoiler).ConfigureAwait(false);
async Task<IUserMessage> IMessageChannel.SendMessageAsync(string text, bool isTTS, Embed embed, RequestOptions options)
=> await SendMessageAsync(text, isTTS, embed, options).ConfigureAwait(false);
//IAudioChannel
/// <inheritdoc />
/// <exception cref="NotSupportedException">Connecting to a group channel is not supported.</exception>
Task<IAudioClient> IAudioChannel.ConnectAsync(bool selfDeaf, bool selfMute, bool external) { throw new NotSupportedException(); }
Task IAudioChannel.DisconnectAsync() { throw new NotSupportedException(); }
//IChannel
Task<IUser> IChannel.GetUserAsync(ulong id, CacheMode mode, RequestOptions options)
=> Task.FromResult<IUser>(GetUser(id));
IAsyncEnumerable<IReadOnlyCollection<IUser>> IChannel.GetUsersAsync(CacheMode mode, RequestOptions options)
=> ImmutableArray.Create<IReadOnlyCollection<IUser>>(Users).ToAsyncEnumerable();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Linq.Impl
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using System.Text.RegularExpressions;
/// <summary>
/// MethodCall expression visitor.
/// </summary>
internal static class MethodVisitor
{
/// <summary> Property visitors. </summary>
private static readonly Dictionary<MemberInfo, string> Properties = new Dictionary<MemberInfo, string>
{
// ReSharper disable AssignNullToNotNullAttribute
{typeof(string).GetProperty("Length"), "length"},
{typeof(DateTime).GetProperty("Year"), "year"},
{typeof(DateTime).GetProperty("Month"), "month"},
{typeof(DateTime).GetProperty("Day"), "day_of_month"},
{typeof(DateTime).GetProperty("DayOfYear"), "day_of_year"},
{typeof(DateTime).GetProperty("DayOfWeek"), "-1 + day_of_week"},
{typeof(DateTime).GetProperty("Hour"), "hour"},
{typeof(DateTime).GetProperty("Minute"), "minute"},
{typeof(DateTime).GetProperty("Second"), "second"}
// ReSharper restore AssignNullToNotNullAttribute
};
/// <summary> Method visit delegate. </summary>
private delegate void VisitMethodDelegate(MethodCallExpression expression, CacheQueryExpressionVisitor visitor);
/// <summary>
/// Delegates dictionary.
/// </summary>
private static readonly Dictionary<MethodInfo, VisitMethodDelegate> Delegates = new List
<KeyValuePair<MethodInfo, VisitMethodDelegate>>
{
GetStringMethod("ToLower", new Type[0], GetFunc("lower")),
GetStringMethod("ToUpper", new Type[0], GetFunc("upper")),
GetStringMethod("Contains", new[] {typeof (string)}, (e, v) => VisitSqlLike(e, v, "'%' || ? || '%'")),
GetStringMethod("StartsWith", new[] {typeof (string)}, (e, v) => VisitSqlLike(e, v, "? || '%'")),
GetStringMethod("EndsWith", new[] {typeof (string)}, (e, v) => VisitSqlLike(e, v, "'%' || ?")),
GetStringMethod("IndexOf", new[] {typeof (string)}, GetFunc("instr", -1)),
GetStringMethod("IndexOf", new[] {typeof (string), typeof (int)}, GetFunc("instr", -1)),
GetStringMethod("Substring", new[] {typeof (int)}, GetFunc("substring", 0, 1)),
GetStringMethod("Substring", new[] {typeof (int), typeof (int)}, GetFunc("substring", 0, 1)),
GetStringMethod("Trim", "trim"),
GetParameterizedTrimMethod("Trim", "trim"),
GetParameterizedTrimMethod("TrimStart", "ltrim"),
GetParameterizedTrimMethod("TrimEnd", "rtrim"),
#if NETCOREAPP
GetCharTrimMethod("Trim", "trim"),
GetCharTrimMethod("TrimStart", "ltrim"),
GetCharTrimMethod("TrimEnd", "rtrim"),
#endif
GetStringMethod("Replace", "replace", typeof(string), typeof(string)),
GetStringMethod("PadLeft", "lpad", typeof (int)),
GetStringMethod("PadLeft", "lpad", typeof (int), typeof (char)),
GetStringMethod("PadRight", "rpad", typeof (int)),
GetStringMethod("PadRight", "rpad", typeof (int), typeof (char)),
GetStringMethod("Compare", new[] { typeof(string), typeof(string) }, (e, v) => VisitStringCompare(e, v, false)),
GetStringMethod("Compare", new[] { typeof(string), typeof(string), typeof(bool) }, (e, v) => VisitStringCompare(e, v, GetStringCompareIgnoreCaseParameter(e.Arguments[2]))),
GetRegexMethod("Replace", "regexp_replace", typeof (string), typeof (string), typeof (string)),
GetRegexMethod("Replace", "regexp_replace", typeof (string), typeof (string), typeof (string),
typeof(RegexOptions)),
GetRegexMethod("IsMatch", "regexp_like", typeof (string), typeof (string)),
GetRegexMethod("IsMatch", "regexp_like", typeof (string), typeof (string), typeof(RegexOptions)),
GetMethod(typeof (DateTime), "ToString", new[] {typeof (string)},
(e, v) => VisitFunc(e, v, "formatdatetime", ", 'en', 'UTC'")),
GetMathMethod("Abs", typeof (int)),
GetMathMethod("Abs", typeof (long)),
GetMathMethod("Abs", typeof (float)),
GetMathMethod("Abs", typeof (double)),
GetMathMethod("Abs", typeof (decimal)),
GetMathMethod("Abs", typeof (sbyte)),
GetMathMethod("Abs", typeof (short)),
GetMathMethod("Acos", typeof (double)),
GetMathMethod("Asin", typeof (double)),
GetMathMethod("Atan", typeof (double)),
GetMathMethod("Atan2", typeof (double), typeof (double)),
GetMathMethod("Ceiling", typeof (double)),
GetMathMethod("Ceiling", typeof (decimal)),
GetMathMethod("Cos", typeof (double)),
GetMathMethod("Cosh", typeof (double)),
GetMathMethod("Exp", typeof (double)),
GetMathMethod("Floor", typeof (double)),
GetMathMethod("Floor", typeof (decimal)),
GetMathMethod("Log", typeof (double)),
GetMathMethod("Log10", typeof (double)),
GetMathMethod("Pow", "Power", typeof (double), typeof (double)),
GetMathMethod("Round", typeof (double)),
GetMathMethod("Round", typeof (double), typeof (int)),
GetMathMethod("Round", typeof (decimal)),
GetMathMethod("Round", typeof (decimal), typeof (int)),
GetMathMethod("Sign", typeof (double)),
GetMathMethod("Sign", typeof (decimal)),
GetMathMethod("Sign", typeof (float)),
GetMathMethod("Sign", typeof (int)),
GetMathMethod("Sign", typeof (long)),
GetMathMethod("Sign", typeof (short)),
GetMathMethod("Sign", typeof (sbyte)),
GetMathMethod("Sin", typeof (double)),
GetMathMethod("Sinh", typeof (double)),
GetMathMethod("Sqrt", typeof (double)),
GetMathMethod("Tan", typeof (double)),
GetMathMethod("Tanh", typeof (double)),
GetMathMethod("Truncate", typeof (double)),
GetMathMethod("Truncate", typeof (decimal)),
}.ToDictionary(x => x.Key, x => x.Value);
/// <summary> RegexOptions transformations. </summary>
private static readonly Dictionary<RegexOptions, string> RegexOptionFlags = new Dictionary<RegexOptions, string>
{
{ RegexOptions.IgnoreCase, "i" },
{ RegexOptions.Multiline, "m" }
};
/// <summary>
/// Visits the property call expression.
/// </summary>
public static bool VisitPropertyCall(MemberExpression expression, CacheQueryExpressionVisitor visitor)
{
string funcName;
if (!Properties.TryGetValue(expression.Member, out funcName))
return false;
visitor.ResultBuilder.Append(funcName).Append('(');
visitor.Visit(expression.Expression);
visitor.ResultBuilder.Append(')');
return true;
}
/// <summary>
/// Visits the method call expression.
/// </summary>
public static void VisitMethodCall(MethodCallExpression expression, CacheQueryExpressionVisitor visitor)
{
var mtd = expression.Method;
VisitMethodDelegate del;
if (!Delegates.TryGetValue(mtd, out del))
throw new NotSupportedException(string.Format("Method not supported: {0}.({1})",
mtd.DeclaringType == null ? "static" : mtd.DeclaringType.FullName, mtd));
del(expression, visitor);
}
/// <summary>
/// Visits the constant call expression.
/// </summary>
public static bool VisitConstantCall(ConstantExpression expression, CacheQueryExpressionVisitor visitor)
{
if (expression.Type != typeof(RegexOptions))
{
return false;
}
var regexOptions = expression.Value as RegexOptions? ?? RegexOptions.None;
var result = string.Empty;
foreach (var option in RegexOptionFlags)
{
if (regexOptions.HasFlag(option.Key))
{
result += option.Value;
regexOptions &= ~option.Key;
}
}
if (regexOptions != RegexOptions.None)
{
throw new NotSupportedException(string.Format("RegexOptions.{0} is not supported", regexOptions));
}
visitor.AppendParameter(result);
return true;
}
/// <summary>
/// Gets the function.
/// </summary>
private static VisitMethodDelegate GetFunc(string func, params int[] adjust)
{
return (e, v) => VisitFunc(e, v, func, null, adjust);
}
/// <summary>
/// Visits the instance function.
/// </summary>
private static void VisitFunc(MethodCallExpression expression, CacheQueryExpressionVisitor visitor,
string func, string suffix, params int[] adjust)
{
visitor.ResultBuilder.Append(func).Append('(');
var isInstanceMethod = expression.Object != null;
if (isInstanceMethod)
visitor.Visit(expression.Object);
for (int i= 0; i < expression.Arguments.Count; i++)
{
var arg = expression.Arguments[i];
if (isInstanceMethod || (i > 0))
visitor.ResultBuilder.Append(", ");
visitor.Visit(arg);
AppendAdjustment(visitor, adjust, i + 1);
}
visitor.ResultBuilder.Append(suffix).Append(')');
AppendAdjustment(visitor, adjust, 0);
}
/// <summary>
/// Visits the instance function for Trim specific handling.
/// </summary>
private static void VisitParameterizedTrimFunc(MethodCallExpression expression,
CacheQueryExpressionVisitor visitor, string func)
{
visitor.ResultBuilder.Append(func).Append('(');
visitor.Visit(expression.Object);
var arg = expression.Arguments[0];
if (arg != null)
{
visitor.ResultBuilder.Append(", ");
if (arg.NodeType == ExpressionType.Constant)
{
var constant = (ConstantExpression) arg;
if (constant.Value is char)
{
visitor.AppendParameter((char) constant.Value);
}
else
{
var args = constant.Value as IEnumerable<char>;
if (args == null)
{
throw new NotSupportedException("String.Trim function only supports IEnumerable<char>");
}
var enumeratedArgs = args.ToArray();
if (enumeratedArgs.Length != 1)
{
throw new NotSupportedException("String.Trim function only supports a single argument: " +
expression);
}
visitor.AppendParameter(enumeratedArgs[0]);
}
}
else
{
visitor.Visit(arg);
}
}
visitor.ResultBuilder.Append(')');
}
/// <summary>
/// Appends the adjustment.
/// </summary>
private static void AppendAdjustment(CacheQueryExpressionVisitor visitor, int[] adjust, int idx)
{
if (idx < adjust.Length)
{
var delta = adjust[idx];
if (delta > 0)
visitor.ResultBuilder.AppendFormat(" + {0}", delta);
else if (delta < 0)
visitor.ResultBuilder.AppendFormat(" {0}", delta);
}
}
/// <summary>
/// Visits the SQL like expression.
/// </summary>
private static void VisitSqlLike(MethodCallExpression expression, CacheQueryExpressionVisitor visitor,
string likeFormat)
{
visitor.ResultBuilder.Append('(');
visitor.Visit(expression.Object);
visitor.ResultBuilder.AppendFormat(" like {0}) ", likeFormat);
var arg = expression.Arguments[0] as ConstantExpression;
var paramValue = arg != null
? arg.Value
: ExpressionWalker.EvaluateExpression<object>(expression.Arguments[0]);
visitor.Parameters.Add(paramValue);
}
/// <summary>
/// Get IgnoreCase parameter for string.Compare method
/// </summary>
private static bool GetStringCompareIgnoreCaseParameter(Expression expression)
{
var constant = expression as ConstantExpression;
if (constant != null)
{
if (constant.Value is bool)
{
return (bool)constant.Value;
}
}
throw new NotSupportedException(
"Parameter 'ignoreCase' from 'string.Compare method should be specified as a constant expression");
}
/// <summary>
/// Visits string.Compare method
/// </summary>
private static void VisitStringCompare(MethodCallExpression expression, CacheQueryExpressionVisitor visitor, bool ignoreCase)
{
// Ex: nvl2(?, casewhen(_T0.NAME = ?, 0, casewhen(_T0.NAME >= ?, 1, -1)), 1)
visitor.ResultBuilder.Append("nvl2(");
visitor.Visit(expression.Arguments[1]);
visitor.ResultBuilder.Append(", casewhen(");
VisitArg(visitor, expression, 0, ignoreCase);
visitor.ResultBuilder.Append(" = ");
VisitArg(visitor, expression, 1, ignoreCase);
visitor.ResultBuilder.Append(", 0, casewhen(");
VisitArg(visitor, expression, 0, ignoreCase);
visitor.ResultBuilder.Append(" >= ");
VisitArg(visitor, expression, 1, ignoreCase);
visitor.ResultBuilder.Append(", 1, -1)), 1)");
}
/// <summary>
/// Visits member expression argument.
/// </summary>
private static void VisitArg(CacheQueryExpressionVisitor visitor, MethodCallExpression expression, int idx,
bool lower)
{
if (lower)
visitor.ResultBuilder.Append("lower(");
visitor.Visit(expression.Arguments[idx]);
if (lower)
visitor.ResultBuilder.Append(')');
}
/// <summary>
/// Gets the method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetMethod(Type type, string name,
Type[] argTypes = null, VisitMethodDelegate del = null)
{
var method = argTypes == null ? type.GetMethod(name) : type.GetMethod(name, argTypes);
return new KeyValuePair<MethodInfo, VisitMethodDelegate>(method, del ?? GetFunc(name));
}
/// <summary>
/// Gets the string method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetStringMethod(string name,
Type[] argTypes = null, VisitMethodDelegate del = null)
{
return GetMethod(typeof(string), name, argTypes, del);
}
/// <summary>
/// Gets the string method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetStringMethod(string name, string sqlName,
params Type[] argTypes)
{
return GetMethod(typeof(string), name, argTypes, GetFunc(sqlName));
}
/// <summary>
/// Gets the Regex method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetRegexMethod(string name, string sqlName,
params Type[] argTypes)
{
return GetMethod(typeof(Regex), name, argTypes, GetFunc(sqlName));
}
/// <summary>
/// Gets string parameterized Trim(TrimStart, TrimEnd) method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetParameterizedTrimMethod(string name,
string sqlName)
{
return GetMethod(typeof(string), name, new[] {typeof(char[])},
(e, v) => VisitParameterizedTrimFunc(e, v, sqlName));
}
#if NETCOREAPP
/// <summary>
/// Gets string parameterized Trim(TrimStart, TrimEnd) method that takes a single char.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetCharTrimMethod(string name,
string sqlName)
{
return GetMethod(typeof(string), name, new[] {typeof(char)},
(e, v) => VisitParameterizedTrimFunc(e, v, sqlName));
}
#endif
/// <summary>
/// Gets the math method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetMathMethod(string name, string sqlName,
params Type[] argTypes)
{
return GetMethod(typeof(Math), name, argTypes, GetFunc(sqlName));
}
/// <summary>
/// Gets the math method.
/// </summary>
private static KeyValuePair<MethodInfo, VisitMethodDelegate> GetMathMethod(string name, params Type[] argTypes)
{
return GetMathMethod(name, name, argTypes);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// Adapted from mandelbrot C# .NET Core #7 program
// http://benchmarksgame.alioth.debian.org/u64q/program.php?test=mandelbrot&lang=csharpcore&id=7
// aka (as of 2017-10-02) rev 1.2 of https://alioth.debian.org/scm/viewvc.php/benchmarksgame/bench/mandelbrot/mandelbrot.csharp-7.csharp?root=benchmarksgame&view=log
// Best-scoring C# .NET Core version as of 2017-10-02
/* The Computer Language Benchmarks Game
http://benchmarksgame.alioth.debian.org/
started with Java #2 program (Krause/Whipkey/Bennet/AhnTran/Enotus/Stalcup)
adapted for C# by Jan de Vaan
simplified and optimised to use TPL by Anthony Lloyd
simplified to compute Cib alongside Crb by Tanner Gooding
optimized to use Vector<double> by Tanner Gooding
*/
using System;
using System.IO;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Threading.Tasks;
using Microsoft.Xunit.Performance;
using Xunit;
[assembly: OptimizeForBenchmarks]
namespace BenchmarksGame
{
public class MandelBrot_7
{
// Vector<double>.Count is treated as a constant by the JIT, don't bother
// storing it in a temporary variable anywhere below.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe byte GetByte(double* pCrb, double Ciby, int x, int y)
{
// This currently does not do anything special for 'Count > 2'
var res = 0;
for (var i = 0; i < 8; i += 2)
{
var Crbx = Unsafe.Read<Vector<double>>(pCrb + x + i);
var Zr = Crbx;
var vCiby = new Vector<double>(Ciby);
var Zi = vCiby;
var b = 0;
var j = 49;
do
{
var nZr = Zr * Zr - Zi * Zi + Crbx;
Zi = Zr * Zi + Zr * Zi + vCiby;
Zr = nZr;
var t = Zr * Zr + Zi * Zi;
if (t[0] > 4)
{
b |= 2;
if (b == 3)
{
break;
}
}
if (t[1] > 4)
{
b |= 1;
if (b == 3)
{
break;
}
}
} while (--j > 0);
res = (res << 2) + b;
}
return (byte)(res ^ -1);
}
public static int Main(string[] args)
{
var size = (args.Length > 0) ? int.Parse(args[0]) : 80;
var lineLength = size >> 3;
var data = DoBench(size, lineLength);
var dataLength = size * lineLength;
Console.Out.Write("P4\n{0} {0}\n", size);
Console.OpenStandardOutput().Write(data, 0, dataLength);
return MatchesChecksum(data, dataLength, "3B-EF-65-05-1D-39-7F-9B-96-8D-EF-98-BF-06-CE-74") ? 100 : -1;
}
// Commented out data left in source to provide checksums for each case
[Benchmark(InnerIterationCount = 7)]
//[InlineData(1000, 125, "B2-13-51-CE-B0-29-2C-4E-75-5E-91-19-18-E4-0C-D9")]
//[InlineData(2000, 250, "5A-21-55-9B-7B-18-2F-34-9B-33-C5-F9-B5-2C-40-56")]
//[InlineData(3000, 375, "E5-82-85-0A-3C-89-69-B1-A8-21-63-52-75-B3-C8-33")]
[InlineData(4000, 500, "C7-E6-66-43-66-73-F8-A8-D3-B4-D7-97-2F-FC-A1-D3")]
//[InlineData(5000, 625, "6D-36-F1-F6-37-8F-34-EB-52-F9-2D-11-89-12-B2-2F")]
//[InlineData(6000, 750, "8B-05-78-EB-2E-0E-98-F2-C7-39-76-ED-0F-A9-D2-B8")]
//[InlineData(7000, 875, "01-F8-F2-2A-AB-70-C7-BA-E3-64-19-E7-D2-84-DF-57")]
//[InlineData(8000, 1000, "C8-ED-D7-FB-65-66-3A-D9-C6-04-9E-96-E8-CA-4F-2C")]
public static void Bench(int size, int lineLength, string checksum)
{
byte[] bytes = null;
Benchmark.Iterate(() => {
bytes = DoBench(size, lineLength);
});
Assert.True(MatchesChecksum(bytes, size * lineLength, checksum));
}
static bool MatchesChecksum(byte[] bytes, int length, string checksum)
{
using (var md5 = MD5.Create())
{
byte[] hash = md5.ComputeHash(bytes, 0, length);
return (checksum == BitConverter.ToString(hash));
}
}
static unsafe byte[] DoBench(int size, int lineLength)
{
var adjustedSize = size + (Vector<double>.Count * 8);
adjustedSize &= ~(Vector<double>.Count * 8);
var Crb = new double[adjustedSize];
var Cib = new double[adjustedSize];
fixed (double* pCrb = &Crb[0])
fixed (double* pCib = &Cib[0])
{
var invN = new Vector<double>(2.0 / size);
var onePtFive = new Vector<double>(1.5);
var step = new Vector<double>(Vector<double>.Count);
Vector<double> value;
if (Vector<double>.Count == 2)
{
// Software implementation should hit this path too
value = new Vector<double>(new double[] {
0, 1
});
}
else if (Vector<double>.Count == 4)
{
value = new Vector<double>(new double[] {
0, 1, 2, 3
});
}
else
{
// No hardware supports about 'Count == 8' today
value = new Vector<double>(new double[] {
0, 1, 2, 3, 4, 5, 6, 7
});
}
for (var i = 0; i < size; i += Vector<double>.Count)
{
var t = value * invN;
Unsafe.Write(pCrb + i, t - onePtFive);
Unsafe.Write(pCib + i, t - Vector<double>.One);
value += step;
}
}
var data = new byte[adjustedSize * lineLength];
fixed (double* pCrb = &Crb[0])
{
// C# doesn't let us pass a pinned variable to a lambda directly
var _Crb = pCrb;
Parallel.For(0, size, y => {
var offset = y * lineLength;
for (var x = 0; x < lineLength; x++)
{
data[offset + x] = GetByte(_Crb, Cib[y], x * 8, y);
}
});
}
return data;
}
}
}
| |
/************************************************************************************
Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
http://www.oculusvr.com/licenses/LICENSE-3.2
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// OVRCrosshair is a component that adds a stereoscoppic cross-hair into a scene.
/// </summary>
public class OVRCrosshair
{
#region Variables
public Texture ImageCrosshair = null;
public OVRCameraRig CameraController = null;
public OVRPlayerController PlayerController = null;
public float FadeTime = 0.3f;
public float FadeScale = 0.6f;
public float CrosshairDistance = 1.0f;
public KeyCode CrosshairKey = KeyCode.C;
private float DeadZoneX = 400.0f;
private float DeadZoneY = 75.0f;
private float ScaleSpeedX = 7.0f;
private float ScaleSpeedY = 7.0f;
private bool DisplayCrosshair;
private bool CollisionWithGeometry;
private float FadeVal;
private Transform UIAnchor;
private float XL = 0.0f;
private float YL = 0.0f;
private float ScreenWidth = 1280.0f;
private float ScreenHeight = 800.0f;
#endregion
#region Public Functions
/// <summary>
/// Sets the crosshair texture.
/// </summary>
/// <param name="image">Image.</param>
public void SetCrosshairTexture(ref Texture image)
{
ImageCrosshair = image;
}
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
UIAnchor = CameraController.centerEyeAnchor;
}
/// <summary>
/// Sets the OVR player controller.
/// </summary>
/// <param name="playerController">Player controller.</param>
public void SetOVRPlayerController(ref OVRPlayerController playerController)
{
PlayerController = playerController;
}
/// <summary>
/// Determines whether the crosshair is visible.
/// </summary>
/// <returns><c>true</c> if this instance is crosshair visible; otherwise, <c>false</c>.</returns>
public bool IsCrosshairVisible()
{
if(FadeVal > 0.0f)
return true;
return false;
}
/// <summary>
/// Init this instance.
/// </summary>
public void Init()
{
DisplayCrosshair = false;
CollisionWithGeometry = false;
FadeVal = 0.0f;
ScreenWidth = Screen.width;
ScreenHeight = Screen.height;
// Initialize screen location of cursor
XL = ScreenWidth * 0.5f;
YL = ScreenHeight * 0.5f;
}
/// <summary>
/// Updates the crosshair.
/// </summary>
public void UpdateCrosshair()
{
if (ShouldDisplayCrosshair())
{
// Do not do these tests within OnGUI since they will be called twice
CollisionWithGeometryCheck();
}
}
/// <summary>
/// The GUI crosshair event.
/// </summary>
public void OnGUICrosshair()
{
if ((DisplayCrosshair == true) && (CollisionWithGeometry == false))
FadeVal += Time.deltaTime / FadeTime;
else
FadeVal -= Time.deltaTime / FadeTime;
FadeVal = Mathf.Clamp(FadeVal, 0.0f, 1.0f);
// Check to see if crosshair influences mouse rotation
if(PlayerController != null)
PlayerController.SetSkipMouseRotation(false);
if ((ImageCrosshair != null) && (FadeVal != 0.0f))
{
// Assume cursor is on-screen (unless it goes into the dead-zone)
// Other systems will check this to see if it is false for example
// allowing rotation to take place
if(PlayerController != null)
PlayerController.SetSkipMouseRotation(true);
GUI.color = new Color(1, 1, 1, FadeVal * FadeScale);
// Calculate X
XL += Input.GetAxis("Mouse X") * ScaleSpeedX;
if(XL < DeadZoneX)
{
if(PlayerController != null)
PlayerController.SetSkipMouseRotation(false);
XL = DeadZoneX - 0.001f;
}
else if (XL > (Screen.width - DeadZoneX))
{
if(PlayerController != null)
PlayerController.SetSkipMouseRotation(false);
XL = ScreenWidth - DeadZoneX + 0.001f;
}
// Calculate Y
YL -= Input.GetAxis("Mouse Y") * ScaleSpeedY;
if(YL < DeadZoneY)
{
//CursorOnScreen = false;
if(YL < 0.0f) YL = 0.0f;
}
else if (YL > ScreenHeight - DeadZoneY)
{
//CursorOnScreen = false;
if(YL > ScreenHeight) YL = ScreenHeight;
}
// Finally draw cursor
bool skipMouseRotation = true;
if(PlayerController != null)
PlayerController.GetSkipMouseRotation(ref skipMouseRotation);
if(skipMouseRotation == true)
{
// Left
GUI.DrawTexture(new Rect( XL - (ImageCrosshair.width * 0.5f),
YL - (ImageCrosshair.height * 0.5f),
ImageCrosshair.width,
ImageCrosshair.height),
ImageCrosshair);
}
GUI.color = Color.white;
}
}
#endregion
#region Private Functions
/// <summary>
/// Shoulds the crosshair be displayed.
/// </summary>
/// <returns><c>true</c>, if display crosshair was shoulded, <c>false</c> otherwise.</returns>
bool ShouldDisplayCrosshair()
{
if(Input.GetKeyDown (CrosshairKey))
{
if(DisplayCrosshair == false)
{
DisplayCrosshair = true;
// Always initialize screen location of cursor to center
XL = ScreenWidth * 0.5f;
YL = ScreenHeight * 0.5f;
}
else
DisplayCrosshair = false;
}
return DisplayCrosshair;
}
/// <summary>
/// Do a collision raycast on geometry for crosshair.
/// </summary>
/// <returns><c>true</c>, if with geometry check was collisioned, <c>false</c> otherwise.</returns>
bool CollisionWithGeometryCheck()
{
CollisionWithGeometry = false;
Vector3 startPos = UIAnchor.position;
Vector3 dir = Vector3.forward;
dir = UIAnchor.rotation * dir;
dir *= CrosshairDistance;
Vector3 endPos = startPos + dir;
RaycastHit hit;
if (Physics.Linecast(startPos, endPos, out hit))
{
if (!hit.collider.isTrigger)
{
CollisionWithGeometry = true;
}
}
return CollisionWithGeometry;
}
#endregion
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using Xunit;
namespace System.Collections.Tests
{
/// <summary>
/// Contains tests that ensure the correctness of any class that implements the nongeneric
/// IList interface
/// </summary>
public abstract class IList_NonGeneric_Tests : ICollection_NonGeneric_Tests
{
#region IList Helper methods
/// <summary>
/// Creates an instance of an IList that can be used for testing.
/// </summary>
/// <returns>An instance of an IList that can be used for testing.</returns>
protected abstract IList NonGenericIListFactory();
/// <summary>
/// Creates an instance of an IList that can be used for testing.
/// </summary>
/// <param name="count">The number of unique items that the returned IList contains.</param>
/// <returns>An instance of an IList that can be used for testing.</returns>
protected virtual IList NonGenericIListFactory(int count)
{
IList collection = NonGenericIListFactory();
AddToCollection(collection, count);
return collection;
}
protected virtual void AddToCollection(IList collection, int numberOfItemsToAdd)
{
int seed = 9600;
while (collection.Count < numberOfItemsToAdd)
{
object toAdd = CreateT(seed++);
while (collection.Contains(toAdd) || InvalidValues.Contains(toAdd))
toAdd = CreateT(seed++);
collection.Add(toAdd);
}
}
/// <summary>
/// Creates an object that is dependent on the seed given. The object may be either
/// a value type or a reference type, chosen based on the value of the seed.
/// </summary>
protected virtual object CreateT(int seed)
{
if (seed % 2 == 0)
{
int stringLength = seed % 10 + 5;
Random rand = new Random(seed);
byte[] bytes = new byte[stringLength];
rand.NextBytes(bytes);
return Convert.ToBase64String(bytes);
}
else
{
Random rand = new Random(seed);
return rand.Next();
}
}
protected virtual bool ExpectedFixedSize => false;
protected virtual Type IList_NonGeneric_Item_InvalidIndex_ThrowType => typeof(ArgumentOutOfRangeException);
protected virtual bool IList_NonGeneric_RemoveNonExistent_Throws => false;
/// <summary>
/// When calling Current of the enumerator after the end of the list and list is extended by new items.
/// Tests are included to cover two behavioral scenarios:
/// - Throwing an InvalidOperationException
/// - Returning an undefined value.
///
/// If this property is set to true, the tests ensure that the exception is thrown. The default value is
/// the same as Enumerator_Current_UndefinedOperation_Throws.
/// </summary>
protected virtual bool IList_CurrentAfterAdd_Throws => Enumerator_Current_UndefinedOperation_Throws;
#endregion
#region ICollection Helper Methods
protected override ICollection NonGenericICollectionFactory() => NonGenericIListFactory();
protected override ICollection NonGenericICollectionFactory(int count) => NonGenericIListFactory(count);
/// <summary>
/// Returns a set of ModifyEnumerable delegates that modify the enumerable passed to them.
/// </summary>
protected override IEnumerable<ModifyEnumerable> GetModifyEnumerables(ModifyOperation operations)
{
if ((operations & ModifyOperation.Add) == ModifyOperation.Add)
{
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (!casted.IsFixedSize && !casted.IsReadOnly)
{
casted.Add(CreateT(2344));
return true;
}
return false;
};
}
if ((operations & ModifyOperation.Insert) == ModifyOperation.Insert)
{
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (!casted.IsFixedSize && !casted.IsReadOnly)
{
casted.Insert(0, CreateT(12));
return true;
}
return false;
};
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (casted.Count > 0 && !casted.IsReadOnly)
{
casted[0] = CreateT(12);
return true;
}
return false;
};
}
if ((operations & ModifyOperation.Remove) == ModifyOperation.Remove)
{
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (casted.Count > 0 && !casted.IsFixedSize && !casted.IsReadOnly)
{
casted.Remove(casted[0]);
return true;
}
return false;
};
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (casted.Count > 0 && !casted.IsFixedSize && !casted.IsReadOnly)
{
casted.RemoveAt(0);
return true;
}
return false;
};
}
if ((operations & ModifyOperation.Clear) == ModifyOperation.Clear)
{
yield return (IEnumerable enumerable) =>
{
IList casted = ((IList)enumerable);
if (casted.Count > 0 && !casted.IsFixedSize && !casted.IsReadOnly)
{
casted.Clear();
return true;
}
return false;
};
}
}
protected override void AddToCollection(ICollection collection, int numberOfItemsToAdd) => AddToCollection((IList)collection, numberOfItemsToAdd);
#endregion
#region IsFixedSize
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IsFixedSize_Validity(int count)
{
IList collection = NonGenericIListFactory(count);
Assert.Equal(ExpectedFixedSize, collection.IsFixedSize);
}
#endregion
#region IsReadOnly
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IsReadOnly_Validity(int count)
{
IList collection = NonGenericIListFactory(count);
Assert.Equal(IsReadOnly, collection.IsReadOnly);
}
#endregion
#region Item Getter
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemGet_NegativeIndex_ThrowsException(int count)
{
IList list = NonGenericIListFactory(count);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[-1]);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[int.MinValue]);
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemGet_IndexGreaterThanListCount_ThrowsException(int count)
{
IList list = NonGenericIListFactory(count);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[count]);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[count + 1]);
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemGet_ValidGetWithinListBounds(int count)
{
IList list = NonGenericIListFactory(count);
object result;
Assert.All(Enumerable.Range(0, count), index => result = list[index]);
}
#endregion
#region Item Setter
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_NegativeIndex_ThrowsException(int count)
{
if (!IsReadOnly)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(0);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[-1] = validAdd);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[int.MinValue] = validAdd);
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_IndexGreaterThanListCount_ThrowsException(int count)
{
if (!IsReadOnly)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(0);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[count] = validAdd);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list[count + 1] = validAdd);
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_OnReadOnlyList(int count)
{
if (IsReadOnly)
{
IList list = NonGenericIListFactory(count);
Assert.Throws<NotSupportedException>(() => list[count / 2] = CreateT(321432));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_FirstItemToNonNull(int count)
{
if (count > 0 && !IsReadOnly)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
list[0] = value;
Assert.Equal(value, list[0]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_FirstItemToNull(int count)
{
if (count > 0 && !IsReadOnly && NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
list[0] = value;
Assert.Equal(value, list[0]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_LastItemToNonNull(int count)
{
if (count > 0 && !IsReadOnly)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
int lastIndex = count > 0 ? count - 1 : 0;
list[lastIndex] = value;
Assert.Equal(value, list[lastIndex]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_LastItemToNull(int count)
{
if (count > 0 && !IsReadOnly && NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
int lastIndex = count > 0 ? count - 1 : 0;
list[lastIndex] = value;
Assert.Equal(value, list[lastIndex]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_DuplicateValues(int count)
{
if (count >= 2 && !IsReadOnly && DuplicateValuesAllowed)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
list[0] = value;
list[1] = value;
Assert.Equal(value, list[0]);
Assert.Equal(value, list[1]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_ItemSet_InvalidValue(int count)
{
if (!IsReadOnly)
{
Assert.All(InvalidValues, value =>
{
IList list = NonGenericIListFactory(count);
Assert.Throws<ArgumentException>(() => list[count / 2] = value);
});
}
}
#endregion
#region Add
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_Null(int count)
{
if (NullAllowed && !IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
collection.Add(null);
Assert.Equal(count + 1, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_InvalidValueToMiddleOfCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
Assert.All(InvalidValues, invalidValue =>
{
IList collection = NonGenericIListFactory(count);
collection.Add(invalidValue);
for (int i = 0; i < count; i++)
collection.Add(CreateT(i));
Assert.Equal(count * 2, collection.Count);
});
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_InvalidValueToBeginningOfCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
Assert.All(InvalidValues, invalidValue =>
{
IList collection = NonGenericIListFactory(0);
collection.Add(invalidValue);
for (int i = 0; i < count; i++)
collection.Add(CreateT(i));
Assert.Equal(count, collection.Count);
});
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_InvalidValueToEndOfCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
Assert.All(InvalidValues, invalidValue =>
{
IList collection = NonGenericIListFactory(count);
collection.Add(invalidValue);
Assert.Equal(count, collection.Count);
});
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_DuplicateValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
if (DuplicateValuesAllowed)
{
IList collection = NonGenericIListFactory(count);
object duplicateValue = CreateT(700);
collection.Add(duplicateValue);
collection.Add(duplicateValue);
Assert.Equal(count + 2, collection.Count);
}
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_AfterCallingClear(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
collection.Clear();
AddToCollection(collection, 5);
Assert.Equal(5, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_AfterRemovingAnyValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
int seed = 840;
IList collection = NonGenericIListFactory(count);
object[] items = new object[count];
collection.CopyTo(items, 0);
object toAdd = CreateT(seed++);
while (collection.Contains(toAdd))
toAdd = CreateT(seed++);
collection.Add(toAdd);
collection.RemoveAt(0);
toAdd = CreateT(seed++);
while (collection.Contains(toAdd))
toAdd = CreateT(seed++);
collection.Add(toAdd);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_AfterRemovingAllItems(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
object[] arr = new object[count];
collection.CopyTo(arr, 0);
for (int i = 0; i < count; i++)
collection.Remove(arr[i]);
collection.Add(CreateT(254));
Assert.Equal(1, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_ToReadOnlyCollection(int count)
{
if (IsReadOnly || ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
Assert.Throws<NotSupportedException>(() => collection.Add(CreateT(0)));
Assert.Equal(count, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Add_AfterRemoving(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
int seed = 840;
IList collection = NonGenericIListFactory(count);
object toAdd = CreateT(seed++);
while (collection.Contains(toAdd))
toAdd = CreateT(seed++);
collection.Add(toAdd);
collection.Remove(toAdd);
collection.Add(toAdd);
}
}
#endregion
#region Clear
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Clear(int count)
{
IList collection = NonGenericIListFactory(count);
if (IsReadOnly || ExpectedFixedSize)
{
Assert.Throws<NotSupportedException>(() => collection.Clear());
Assert.Equal(count, collection.Count);
}
else
{
collection.Clear();
Assert.Equal(0, collection.Count);
}
}
#endregion
#region Contains
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Contains_ValidValueOnCollectionNotContainingThatValue(int count)
{
IList collection = NonGenericIListFactory(count);
int seed = 4315;
object item = CreateT(seed++);
while (collection.Contains(item))
item = CreateT(seed++);
Assert.False(collection.Contains(item));
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IList_NonGeneric_Contains_ValidValueOnCollectionContainingThatValue(int count)
{
IList collection = NonGenericIListFactory(count);
foreach (object item in collection)
Assert.True(collection.Contains(item));
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Contains_NullOnCollectionNotContainingNull(int count)
{
IList collection = NonGenericIListFactory(count);
if (NullAllowed)
Assert.False(collection.Contains(null));
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Contains_NullOnCollectionContainingNull(int count)
{
IList collection = NonGenericIListFactory(count);
if (NullAllowed && !IsReadOnly && !ExpectedFixedSize)
{
collection.Add(null);
Assert.True(collection.Contains(null));
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Contains_ValidValueThatExistsTwiceInTheCollection(int count)
{
if (DuplicateValuesAllowed && !IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
object item = CreateT(12);
collection.Add(item);
collection.Add(item);
Assert.Equal(count + 2, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Contains_InvalidValue_ThrowsArgumentException(int count)
{
IList collection = NonGenericIListFactory(count);
Assert.All(InvalidValues, invalidValue =>
Assert.Throws<ArgumentException>(() => collection.Contains(invalidValue))
);
}
#endregion
#region IndexOf
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_NullNotContainedInList(int count)
{
if (NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
if (list.Contains(value))
{
if (IsReadOnly || ExpectedFixedSize)
return;
list.Remove(value);
}
Assert.Equal(-1, list.IndexOf(value));
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_NullContainedInList(int count)
{
if (count > 0 && NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
if (!list.Contains(value))
{
if (IsReadOnly || ExpectedFixedSize)
return;
list[0] = value;
}
Assert.Equal(0, list.IndexOf(value));
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_ValueInCollectionMultipleTimes(int count)
{
if (count > 0 && !IsReadOnly && !ExpectedFixedSize && DuplicateValuesAllowed)
{
// IndexOf should always return the lowest index for which a matching element is found
IList list = NonGenericIListFactory(count);
object value = CreateT(12345);
list[0] = value;
list[count / 2] = value;
Assert.Equal(0, list.IndexOf(value));
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_EachValueNoDuplicates(int count)
{
// Assumes no duplicate elements contained in the list returned by NonGenericIListFactory
IList list = NonGenericIListFactory(count);
Assert.All(Enumerable.Range(0, count), index =>
{
Assert.Equal(index, list.IndexOf(list[index]));
});
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_InvalidValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
Assert.All(InvalidValues, value =>
{
IList list = NonGenericIListFactory(count);
Assert.Throws<ArgumentException>(() => list.IndexOf(value));
});
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_IndexOf_ReturnsFirstMatchingValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object[] arr = new object[count];
list.CopyTo(arr, 0);
foreach (object duplicate in arr) // hard copies list to circumvent enumeration error
list.Add(duplicate);
object[] expected = new object[count * 2];
list.CopyTo(expected, 0);
Assert.All(Enumerable.Range(0, count), (index =>
Assert.Equal(index, list.IndexOf(expected[index]))
));
}
}
#endregion
#region Insert
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_NegativeIndex_ThrowsException(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(0);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.Insert(-1, validAdd));
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.Insert(int.MinValue, validAdd));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_IndexGreaterThanListCount_Appends(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(12350);
list.Insert(count, validAdd);
Assert.Equal(count + 1, list.Count);
Assert.Equal(validAdd, list[count]);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_ToReadOnlyList(int count)
{
if (IsReadOnly || ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
Assert.Throws<NotSupportedException>(() => list.Insert(count / 2, CreateT(321432)));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_FirstItemToNonNull(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
list.Insert(0, value);
Assert.Equal(value, list[0]);
Assert.Equal(count + 1, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_FirstItemToNull(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
list.Insert(0, value);
Assert.Equal(value, list[0]);
Assert.Equal(count + 1, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_LastItemToNonNull(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
int lastIndex = count > 0 ? count - 1 : 0;
list.Insert(lastIndex, value);
Assert.Equal(value, list[lastIndex]);
Assert.Equal(count + 1, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_LastItemToNull(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && NullAllowed)
{
IList list = NonGenericIListFactory(count);
object value = null;
int lastIndex = count > 0 ? count - 1 : 0;
list.Insert(lastIndex, value);
Assert.Equal(value, list[lastIndex]);
Assert.Equal(count + 1, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_DuplicateValues(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && DuplicateValuesAllowed)
{
IList list = NonGenericIListFactory(count);
object value = CreateT(123452);
list.Insert(0, value);
list.Insert(1, value);
Assert.Equal(value, list[0]);
Assert.Equal(value, list[1]);
Assert.Equal(count + 2, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Insert_InvalidValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
Assert.All(InvalidValues, value =>
{
IList list = NonGenericIListFactory(count);
Assert.Throws<ArgumentException>(() => list.Insert(count / 2, value));
});
}
}
#endregion
#region Remove
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IListNonGeneric_Remove_OnReadOnlyCollection_ThrowsNotSupportedException(int count)
{
if (IsReadOnly || ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
Assert.Throws<NotSupportedException>(() => collection.Remove(CreateT(34543)));
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_NullNotContainedInCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && NullAllowed && !Enumerable.Contains(InvalidValues, null))
{
int seed = count * 21;
IList collection = NonGenericIListFactory(count);
object value = null;
while (collection.Contains(value))
{
collection.Remove(value);
count--;
}
collection.Remove(value);
Assert.Equal(count, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_NonNullNotContainedInCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
int seed = count * 251;
IList list = NonGenericIListFactory(count);
object value = CreateT(seed++);
while (list.Contains(value) || Enumerable.Contains(InvalidValues, value))
value = CreateT(seed++);
list.Remove(value);
if (IList_NonGeneric_RemoveNonExistent_Throws)
{
Assert.Throws<ArgumentException>(() => list.Remove(value));
}
else
{
Assert.Equal(count, list.Count);
}
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_NullContainedInCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && NullAllowed && !Enumerable.Contains(InvalidValues, null))
{
int seed = count * 21;
IList collection = NonGenericIListFactory(count);
object value = null;
if (!collection.Contains(value))
{
collection.Add(value);
count++;
}
collection.Remove(value);
Assert.Equal(count - 1, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_NonNullContainedInCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
int seed = count * 251;
IList collection = NonGenericIListFactory(count);
object value = CreateT(seed++);
if (!collection.Contains(value))
{
collection.Add(value);
count++;
}
collection.Remove(value);
Assert.Equal(count - 1, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_ValueThatExistsTwiceInCollection(int count)
{
if (!IsReadOnly && !ExpectedFixedSize && DuplicateValuesAllowed)
{
int seed = count * 90;
IList collection = NonGenericIListFactory(count);
object value = CreateT(seed++);
collection.Add(value);
collection.Add(value);
count += 2;
collection.Remove(value);
Assert.True(collection.Contains(value));
Assert.Equal(count - 1, collection.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_EveryValue(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
object[] arr = new object[count];
collection.CopyTo(arr, 0);
Assert.All(arr, value =>
{
collection.Remove(value);
});
Assert.Empty(collection);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_Remove_InvalidValue_ThrowsArgumentException(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
Assert.All(InvalidValues, value =>
{
Assert.Throws<ArgumentException>(() => collection.Remove(value));
});
Assert.Equal(count, collection.Count);
}
}
#endregion
#region RemoveAt
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_RemoveAt_NegativeIndex_ThrowsException(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(0);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.RemoveAt(-1));
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.RemoveAt(int.MinValue));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_RemoveAt_IndexGreaterThanListCount_ThrowsException(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
object validAdd = CreateT(0);
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.RemoveAt(count));
Assert.Throws(IList_NonGeneric_Item_InvalidIndex_ThrowType, () => list.RemoveAt(count + 1));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_RemoveAt_OnReadOnlyList(int count)
{
if (IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
Assert.Throws<NotSupportedException>(() => list.RemoveAt(count / 2));
Assert.Equal(count, list.Count);
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_RemoveAt_AllValidIndices(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
Assert.Equal(count, list.Count);
Assert.All(Enumerable.Range(0, count).Reverse(), index =>
{
list.RemoveAt(index);
Assert.Equal(index, list.Count);
});
}
}
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_RemoveAt_ZeroMultipleTimes(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList list = NonGenericIListFactory(count);
Assert.All(Enumerable.Range(0, count), index =>
{
list.RemoveAt(0);
Assert.Equal(count - index - 1, list.Count);
});
}
}
#endregion
#region Enumerator.Current
// Test Enumerator.Current at end after new elements was added
[Theory]
[MemberData(nameof(ValidCollectionSizes))]
public void IList_NonGeneric_CurrentAtEnd_AfterAdd(int count)
{
if (!IsReadOnly && !ExpectedFixedSize)
{
IList collection = NonGenericIListFactory(count);
IEnumerator enumerator = collection.GetEnumerator();
while (enumerator.MoveNext()) ; // Go to end of enumerator
if (Enumerator_Current_UndefinedOperation_Throws)
{
Assert.Throws<InvalidOperationException>(() => enumerator.Current); // Enumerator.Current should fail
}
else
{
var current = enumerator.Current; // Enumerator.Current should not fail
}
// Test after add
int seed = 523561;
for (int i = 0; i < 3; i++)
{
collection.Add(CreateT(seed++));
if (IList_CurrentAfterAdd_Throws)
{
Assert.Throws<InvalidOperationException>(() => enumerator.Current); // Enumerator.Current should fail
}
else
{
var current = enumerator.Current; // Enumerator.Current should not fail
}
}
}
}
#endregion
}
}
| |
/*
* OANDA v20 REST API
*
* The full OANDA v20 REST API Specification. This specification defines how to interact with v20 Accounts, Trades, Orders, Pricing and more.
*
* OpenAPI spec version: 3.0.15
* Contact: api@oanda.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace Oanda.RestV20.Model
{
/// <summary>
/// InlineResponse20029
/// </summary>
[DataContract]
public partial class InlineResponse20029 : IEquatable<InlineResponse20029>, IValidatableObject
{
/// <summary>
/// The granularity of the candlesticks provided.
/// </summary>
/// <value>The granularity of the candlesticks provided.</value>
[JsonConverter(typeof(StringEnumConverter))]
public enum GranularityEnum
{
/// <summary>
/// Enum S5 for "S5"
/// </summary>
[EnumMember(Value = "S5")]
S5,
/// <summary>
/// Enum S10 for "S10"
/// </summary>
[EnumMember(Value = "S10")]
S10,
/// <summary>
/// Enum S15 for "S15"
/// </summary>
[EnumMember(Value = "S15")]
S15,
/// <summary>
/// Enum S30 for "S30"
/// </summary>
[EnumMember(Value = "S30")]
S30,
/// <summary>
/// Enum M1 for "M1"
/// </summary>
[EnumMember(Value = "M1")]
M1,
/// <summary>
/// Enum M2 for "M2"
/// </summary>
[EnumMember(Value = "M2")]
M2,
/// <summary>
/// Enum M4 for "M4"
/// </summary>
[EnumMember(Value = "M4")]
M4,
/// <summary>
/// Enum M5 for "M5"
/// </summary>
[EnumMember(Value = "M5")]
M5,
/// <summary>
/// Enum M10 for "M10"
/// </summary>
[EnumMember(Value = "M10")]
M10,
/// <summary>
/// Enum M15 for "M15"
/// </summary>
[EnumMember(Value = "M15")]
M15,
/// <summary>
/// Enum M30 for "M30"
/// </summary>
[EnumMember(Value = "M30")]
M30,
/// <summary>
/// Enum H1 for "H1"
/// </summary>
[EnumMember(Value = "H1")]
H1,
/// <summary>
/// Enum H2 for "H2"
/// </summary>
[EnumMember(Value = "H2")]
H2,
/// <summary>
/// Enum H3 for "H3"
/// </summary>
[EnumMember(Value = "H3")]
H3,
/// <summary>
/// Enum H4 for "H4"
/// </summary>
[EnumMember(Value = "H4")]
H4,
/// <summary>
/// Enum H6 for "H6"
/// </summary>
[EnumMember(Value = "H6")]
H6,
/// <summary>
/// Enum H8 for "H8"
/// </summary>
[EnumMember(Value = "H8")]
H8,
/// <summary>
/// Enum H12 for "H12"
/// </summary>
[EnumMember(Value = "H12")]
H12,
/// <summary>
/// Enum D for "D"
/// </summary>
[EnumMember(Value = "D")]
D,
/// <summary>
/// Enum W for "W"
/// </summary>
[EnumMember(Value = "W")]
W,
/// <summary>
/// Enum M for "M"
/// </summary>
[EnumMember(Value = "M")]
M
}
/// <summary>
/// The granularity of the candlesticks provided.
/// </summary>
/// <value>The granularity of the candlesticks provided.</value>
[DataMember(Name="granularity", EmitDefaultValue=false)]
public GranularityEnum? Granularity { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="InlineResponse20029" /> class.
/// </summary>
/// <param name="Instrument">The instrument whose Prices are represented by the candlesticks..</param>
/// <param name="Granularity">The granularity of the candlesticks provided..</param>
/// <param name="Candles">The list of candlesticks that satisfy the request..</param>
public InlineResponse20029(string Instrument = default(string), GranularityEnum? Granularity = default(GranularityEnum?), List<Candlestick> Candles = default(List<Candlestick>))
{
this.Instrument = Instrument;
this.Granularity = Granularity;
this.Candles = Candles;
}
/// <summary>
/// The instrument whose Prices are represented by the candlesticks.
/// </summary>
/// <value>The instrument whose Prices are represented by the candlesticks.</value>
[DataMember(Name="instrument", EmitDefaultValue=false)]
public string Instrument { get; set; }
/// <summary>
/// The list of candlesticks that satisfy the request.
/// </summary>
/// <value>The list of candlesticks that satisfy the request.</value>
[DataMember(Name="candles", EmitDefaultValue=false)]
public List<Candlestick> Candles { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class InlineResponse20029 {\n");
sb.Append(" Instrument: ").Append(Instrument).Append("\n");
sb.Append(" Granularity: ").Append(Granularity).Append("\n");
sb.Append(" Candles: ").Append(Candles).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as InlineResponse20029);
}
/// <summary>
/// Returns true if InlineResponse20029 instances are equal
/// </summary>
/// <param name="other">Instance of InlineResponse20029 to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(InlineResponse20029 other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Instrument == other.Instrument ||
this.Instrument != null &&
this.Instrument.Equals(other.Instrument)
) &&
(
this.Granularity == other.Granularity ||
this.Granularity != null &&
this.Granularity.Equals(other.Granularity)
) &&
(
this.Candles == other.Candles ||
this.Candles != null &&
this.Candles.SequenceEqual(other.Candles)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Instrument != null)
hash = hash * 59 + this.Instrument.GetHashCode();
if (this.Granularity != null)
hash = hash * 59 + this.Granularity.GetHashCode();
if (this.Candles != null)
hash = hash * 59 + this.Candles.GetHashCode();
return hash;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Xml;
using System.Collections;
using System.Globalization;
using System.Diagnostics;
using System.Data.Common;
namespace System.Data
{
internal sealed class XDRSchema : XMLSchema
{
internal string _schemaName;
internal string _schemaUri;
internal XmlElement _schemaRoot;
internal DataSet _ds;
private static readonly char[] s_colonArray = new char[] { ':' };
internal XDRSchema(DataSet ds, bool fInline)
{
_schemaUri = string.Empty;
_schemaName = string.Empty;
_schemaRoot = null;
_ds = ds;
}
internal void LoadSchema(XmlElement schemaRoot, DataSet ds)
{
if (schemaRoot == null)
return;
_schemaRoot = schemaRoot;
_ds = ds;
_schemaName = schemaRoot.GetAttribute(Keywords.NAME);
_schemaUri = string.Empty;
Debug.Assert(FEqualIdentity(schemaRoot, Keywords.XDR_SCHEMA, Keywords.XDRNS), "Illegal node");
// Get Locale and CaseSensitive properties
if (_schemaName == null || _schemaName.Length == 0)
_schemaName = "NewDataSet";
ds.Namespace = _schemaUri;
// Walk all the top level Element tags.
for (XmlNode n = schemaRoot.FirstChild; n != null; n = n.NextSibling)
{
if (!(n is XmlElement))
continue;
XmlElement child = (XmlElement)n;
if (FEqualIdentity(child, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS))
{
HandleTable(child);
}
}
_schemaName = XmlConvert.DecodeName(_schemaName);
if (ds.Tables[_schemaName] == null)
ds.DataSetName = _schemaName;
}
internal XmlElement FindTypeNode(XmlElement node)
{
string strType;
XmlNode vn;
XmlNode vnRoof;
Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_SCHEMA, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS),
"Invalid node type " + node.LocalName);
if (FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS))
return node;
strType = node.GetAttribute(Keywords.TYPE);
if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS))
{
if (strType == null || strType.Length == 0)
return null;
// Find an ELEMENTTYPE or ATTRIBUTETYPE with name=strType
vn = node.OwnerDocument.FirstChild;
vnRoof = node.OwnerDocument;
while (vn != vnRoof)
{
if ((FEqualIdentity(vn, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) &&
FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS)) ||
(FEqualIdentity(vn, Keywords.XDR_ATTRIBUTETYPE, Keywords.XDRNS) &&
FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS)))
{
if (vn is XmlElement && ((XmlElement)vn).GetAttribute(Keywords.NAME) == strType)
return (XmlElement)vn;
}
// Move vn node
if (vn.FirstChild != null)
vn = vn.FirstChild;
else if (vn.NextSibling != null)
vn = vn.NextSibling;
else
{
while (vn != vnRoof)
{
vn = vn.ParentNode;
if (vn.NextSibling != null)
{
vn = vn.NextSibling;
break;
}
}
}
}
return null;
}
return null;
}
internal bool IsTextOnlyContent(XmlElement node)
{
Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS), "Invalid node type " + node.LocalName);
string value = node.GetAttribute(Keywords.CONTENT);
if (value == null || value.Length == 0)
{
string type = node.GetAttribute(Keywords.DT_TYPE, Keywords.DTNS);
return !string.IsNullOrEmpty(type);
}
if (value == Keywords.EMPTY || value == Keywords.ELTONLY || value == Keywords.ELEMENTONLY || value == Keywords.MIXED)
{
return false;
}
if (value == Keywords.TEXTONLY)
{
return true;
}
throw ExceptionBuilder.InvalidAttributeValue("content", value);
}
internal bool IsXDRField(XmlElement node, XmlElement typeNode)
{
int min = 1;
int max = 1;
if (!IsTextOnlyContent(typeNode))
return false;
for (XmlNode n = typeNode.FirstChild; n != null; n = n.NextSibling)
{
if (FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS) ||
FEqualIdentity(n, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS))
return false;
}
if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS))
{
GetMinMax(node, ref min, ref max);
if (max == -1 || max > 1)
return false;
}
return true;
}
internal DataTable HandleTable(XmlElement node)
{
XmlElement typeNode;
Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS), "Invalid node type");
// Figure out if this really is a table. If not, bail out.
typeNode = FindTypeNode(node);
string occurs = node.GetAttribute(Keywords.MINOCCURS);
if (occurs != null && occurs.Length > 0)
if ((Convert.ToInt32(occurs, CultureInfo.InvariantCulture) > 1) && (typeNode == null))
{
return InstantiateSimpleTable(_ds, node);
}
occurs = node.GetAttribute(Keywords.MAXOCCURS);
if (occurs != null && occurs.Length > 0)
if (!string.Equals(occurs, "1", StringComparison.Ordinal) && (typeNode == null))
{
return InstantiateSimpleTable(_ds, node);
}
if (typeNode == null)
return null;
if (IsXDRField(node, typeNode))
return null;
return InstantiateTable(_ds, node, typeNode);
}
private sealed class NameType : IComparable
{
public string name;
public Type type;
public NameType(string n, Type t)
{
name = n;
type = t;
}
public int CompareTo(object obj) { return string.Compare(name, (string)obj, StringComparison.Ordinal); }
};
// XDR spec: http://www.ltg.ed.ac.uk/~ht/XMLData-Reduced.htm
private static NameType[] s_mapNameTypeXdr = {
new NameType("bin.base64" , typeof(byte[]) ), /* XDR */
new NameType("bin.hex" , typeof(byte[]) ), /* XDR */
new NameType("boolean" , typeof(bool) ), /* XDR */
new NameType("byte" , typeof(sbyte) ), /* XDR */
new NameType("char" , typeof(char) ), /* XDR */
new NameType("date" , typeof(DateTime)), /* XDR */
new NameType("dateTime" , typeof(DateTime)), /* XDR */
new NameType("dateTime.tz" , typeof(DateTime)), /* XDR */
new NameType("entities" , typeof(string) ), /* XDR */
new NameType("entity" , typeof(string) ), /* XDR */
new NameType("enumeration" , typeof(string) ), /* XDR */
new NameType("fixed.14.4" , typeof(decimal) ), /* XDR */
new NameType("float" , typeof(double) ), /* XDR */
new NameType("i1" , typeof(sbyte) ), /* XDR */
new NameType("i2" , typeof(short) ), /* XDR */
new NameType("i4" , typeof(int) ), /* XDR */
new NameType("i8" , typeof(long) ), /* XDR */
new NameType("id" , typeof(string) ), /* XDR */
new NameType("idref" , typeof(string) ), /* XDR */
new NameType("idrefs" , typeof(string) ), /* XDR */
new NameType("int" , typeof(int) ), /* XDR */
new NameType("nmtoken" , typeof(string) ), /* XDR */
new NameType("nmtokens" , typeof(string) ), /* XDR */
new NameType("notation" , typeof(string) ), /* XDR */
new NameType("number" , typeof(decimal) ), /* XDR */
new NameType("r4" , typeof(float) ), /* XDR */
new NameType("r8" , typeof(double) ), /* XDR */
new NameType("string" , typeof(string) ), /* XDR */
new NameType("time" , typeof(DateTime)), /* XDR */
new NameType("time.tz" , typeof(DateTime)), /* XDR */
new NameType("ui1" , typeof(byte) ), /* XDR */
new NameType("ui2" , typeof(ushort) ), /* XDR */
new NameType("ui4" , typeof(uint) ), /* XDR */
new NameType("ui8" , typeof(ulong) ), /* XDR */
new NameType("uri" , typeof(string) ), /* XDR */
new NameType("uuid" , typeof(Guid) ), /* XDR */
};
private static NameType FindNameType(string name)
{
#if DEBUG
for (int i = 1; i < s_mapNameTypeXdr.Length; ++i)
{
Debug.Assert((s_mapNameTypeXdr[i - 1].CompareTo(s_mapNameTypeXdr[i].name)) < 0, "incorrect sorting");
}
#endif
int index = Array.BinarySearch(s_mapNameTypeXdr, name);
if (index < 0)
{
#if DEBUG
// Let's check that we realy don't have this name:
foreach (NameType nt in s_mapNameTypeXdr)
{
Debug.Assert(nt.name != name, "FindNameType('" + name + "') -- failed. Existed name not found");
}
#endif
throw ExceptionBuilder.UndefinedDatatype(name);
}
Debug.Assert(s_mapNameTypeXdr[index].name == name, "FindNameType('" + name + "') -- failed. Wrong name found");
return s_mapNameTypeXdr[index];
}
private static NameType s_enumerationNameType = FindNameType("enumeration");
private Type ParseDataType(string dt, string dtValues)
{
string strType = dt;
string[] parts = dt.Split(s_colonArray); // ":"
if (parts.Length > 2)
{
throw ExceptionBuilder.InvalidAttributeValue("type", dt);
}
else if (parts.Length == 2)
{
// CONSIDER: check that we have valid prefix
strType = parts[1];
}
NameType nt = FindNameType(strType);
if (nt == s_enumerationNameType && (dtValues == null || dtValues.Length == 0))
throw ExceptionBuilder.MissingAttribute("type", Keywords.DT_VALUES);
return nt.type;
}
internal string GetInstanceName(XmlElement node)
{
string instanceName;
if (FEqualIdentity(node, Keywords.XDR_ELEMENTTYPE, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ATTRIBUTETYPE, Keywords.XDRNS))
{
instanceName = node.GetAttribute(Keywords.NAME);
if (instanceName == null || instanceName.Length == 0)
{
throw ExceptionBuilder.MissingAttribute("Element", Keywords.NAME);
}
}
else
{
instanceName = node.GetAttribute(Keywords.TYPE);
if (instanceName == null || instanceName.Length == 0)
throw ExceptionBuilder.MissingAttribute("Element", Keywords.TYPE);
}
return instanceName;
}
internal void HandleColumn(XmlElement node, DataTable table)
{
Debug.Assert(FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS) ||
FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS), "Illegal node type");
string instanceName;
string strName;
Type type;
string strType;
string strValues;
int minOccurs = 0;
int maxOccurs = 1;
string strDefault;
DataColumn column;
string strUse = node.GetAttribute(Keywords.USE);
// Get the name
if (node.Attributes.Count > 0)
{
string strRef = node.GetAttribute(Keywords.REF);
if (strRef != null && strRef.Length > 0)
return; //skip ref nodes. B2 item
strName = instanceName = GetInstanceName(node);
column = table.Columns[instanceName, _schemaUri];
if (column != null)
{
if (column.ColumnMapping == MappingType.Attribute)
{
if (FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS))
throw ExceptionBuilder.DuplicateDeclaration(strName);
}
else
{
if (FEqualIdentity(node, Keywords.XDR_ELEMENT, Keywords.XDRNS))
{
throw ExceptionBuilder.DuplicateDeclaration(strName);
}
}
instanceName = GenUniqueColumnName(strName, table);
}
}
else
{
strName = instanceName = string.Empty;
}
// Now get the type
XmlElement typeNode = FindTypeNode(node);
SimpleType xsdType = null;
if (typeNode == null)
{
strType = node.GetAttribute(Keywords.TYPE);
throw ExceptionBuilder.UndefinedDatatype(strType);
}
strType = typeNode.GetAttribute(Keywords.DT_TYPE, Keywords.DTNS);
strValues = typeNode.GetAttribute(Keywords.DT_VALUES, Keywords.DTNS);
if (strType == null || strType.Length == 0)
{
strType = string.Empty;
type = typeof(string);
}
else
{
type = ParseDataType(strType, strValues);
// HACK: temp work around special types
if (strType == "float")
{
strType = string.Empty;
}
if (strType == "char")
{
strType = string.Empty;
xsdType = SimpleType.CreateSimpleType(StorageType.Char, type);
}
if (strType == "enumeration")
{
strType = string.Empty;
xsdType = SimpleType.CreateEnumeratedType(strValues);
}
if (strType == "bin.base64")
{
strType = string.Empty;
xsdType = SimpleType.CreateByteArrayType("base64");
}
if (strType == "bin.hex")
{
strType = string.Empty;
xsdType = SimpleType.CreateByteArrayType("hex");
}
}
bool isAttribute = FEqualIdentity(node, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS);
GetMinMax(node, isAttribute, ref minOccurs, ref maxOccurs);
strDefault = null;
// Does XDR has default?
strDefault = node.GetAttribute(Keywords.DEFAULT);
bool bNullable = false;
column = new DataColumn(XmlConvert.DecodeName(instanceName), type, null,
isAttribute ? MappingType.Attribute : MappingType.Element);
SetProperties(column, node.Attributes); // xmlschema.SetProperties will skipp setting expressions
column.XmlDataType = strType;
column.SimpleType = xsdType;
column.AllowDBNull = (minOccurs == 0) || bNullable;
column.Namespace = (isAttribute) ? string.Empty : _schemaUri;
// We will skip handling expression columns in SetProperties, so we need set the expressions here
if (node.Attributes != null)
{
for (int i = 0; i < node.Attributes.Count; i++)
{
if (node.Attributes[i].NamespaceURI == Keywords.MSDNS)
{
if (node.Attributes[i].LocalName == "Expression")
{
column.Expression = node.Attributes[i].Value;
break;
}
}
}
}
string targetNamespace = node.GetAttribute(Keywords.TARGETNAMESPACE);
if (targetNamespace != null && targetNamespace.Length > 0)
column.Namespace = targetNamespace;
table.Columns.Add(column);
if (strDefault != null && strDefault.Length != 0)
try
{
column.DefaultValue = SqlConvert.ChangeTypeForXML(strDefault, type);
}
catch (System.FormatException)
{
throw ExceptionBuilder.CannotConvert(strDefault, type.FullName);
}
}
internal void GetMinMax(XmlElement elNode, ref int minOccurs, ref int maxOccurs)
{
GetMinMax(elNode, false, ref minOccurs, ref maxOccurs);
}
internal void GetMinMax(XmlElement elNode, bool isAttribute, ref int minOccurs, ref int maxOccurs)
{
string occurs = elNode.GetAttribute(Keywords.MINOCCURS);
if (occurs != null && occurs.Length > 0)
{
try
{
minOccurs = int.Parse(occurs, CultureInfo.InvariantCulture);
}
catch (Exception e) when (ADP.IsCatchableExceptionType(e))
{
throw ExceptionBuilder.AttributeValues(nameof(minOccurs), "0", "1");
}
}
occurs = elNode.GetAttribute(Keywords.MAXOCCURS);
if (occurs != null && occurs.Length > 0)
{
int bZeroOrMore = string.Compare(occurs, Keywords.STAR, StringComparison.Ordinal);
if (bZeroOrMore == 0)
{
maxOccurs = -1;
}
else
{
try
{
maxOccurs = int.Parse(occurs, CultureInfo.InvariantCulture);
}
catch (Exception e) when (ADP.IsCatchableExceptionType(e))
{
throw ExceptionBuilder.AttributeValues(nameof(maxOccurs), "1", Keywords.STAR);
}
if (maxOccurs != 1)
{
throw ExceptionBuilder.AttributeValues(nameof(maxOccurs), "1", Keywords.STAR);
}
}
}
}
internal void HandleTypeNode(XmlElement typeNode, DataTable table, ArrayList tableChildren)
{
DataTable tableChild;
for (XmlNode n = typeNode.FirstChild; n != null; n = n.NextSibling)
{
if (!(n is XmlElement))
continue;
if (FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS))
{
tableChild = HandleTable((XmlElement)n);
if (tableChild != null)
{
tableChildren.Add(tableChild);
continue;
}
}
if (FEqualIdentity(n, Keywords.XDR_ATTRIBUTE, Keywords.XDRNS) ||
FEqualIdentity(n, Keywords.XDR_ELEMENT, Keywords.XDRNS))
{
HandleColumn((XmlElement)n, table);
continue;
}
}
}
internal DataTable InstantiateTable(DataSet dataSet, XmlElement node, XmlElement typeNode)
{
string typeName = string.Empty;
XmlAttributeCollection attrs = node.Attributes;
DataTable table;
int minOccurs = 1;
int maxOccurs = 1;
string keys = null;
ArrayList tableChildren = new ArrayList();
if (attrs.Count > 0)
{
typeName = GetInstanceName(node);
table = dataSet.Tables.GetTable(typeName, _schemaUri);
if (table != null)
{
return table;
}
}
table = new DataTable(XmlConvert.DecodeName(typeName));
// fxcop: new DataTable should inherit the CaseSensitive, Locale from DataSet and possibly updating during SetProperties
table.Namespace = _schemaUri;
GetMinMax(node, ref minOccurs, ref maxOccurs);
table.MinOccurs = minOccurs;
table.MaxOccurs = maxOccurs;
_ds.Tables.Add(table);
HandleTypeNode(typeNode, table, tableChildren);
SetProperties(table, attrs);
// check to see if we fave unique constraint
if (keys != null)
{
string[] list = keys.TrimEnd(null).Split(null);
int keyLength = list.Length;
var cols = new DataColumn[keyLength];
for (int i = 0; i < keyLength; i++)
{
DataColumn col = table.Columns[list[i], _schemaUri];
if (col == null)
throw ExceptionBuilder.ElementTypeNotFound(list[i]);
cols[i] = col;
}
table.PrimaryKey = cols;
}
foreach (DataTable _tableChild in tableChildren)
{
DataRelation relation = null;
DataRelationCollection childRelations = table.ChildRelations;
for (int j = 0; j < childRelations.Count; j++)
{
if (!childRelations[j].Nested)
continue;
if (_tableChild == childRelations[j].ChildTable)
relation = childRelations[j];
}
if (relation != null)
continue;
DataColumn parentKey = table.AddUniqueKey();
// foreign key in the child table
DataColumn childKey = _tableChild.AddForeignKey(parentKey);
// create relationship
// setup relationship between parent and this table
relation = new DataRelation(table.TableName + "_" + _tableChild.TableName, parentKey, childKey, true);
relation.CheckMultipleNested = false; // disable the check for multiple nested parent
relation.Nested = true;
_tableChild.DataSet.Relations.Add(relation);
relation.CheckMultipleNested = true; // enable the check for multiple nested parent
}
return table;
}
internal DataTable InstantiateSimpleTable(DataSet dataSet, XmlElement node)
{
string typeName;
XmlAttributeCollection attrs = node.Attributes;
DataTable table;
int minOccurs = 1;
int maxOccurs = 1;
typeName = GetInstanceName(node);
table = dataSet.Tables.GetTable(typeName, _schemaUri);
if (table != null)
{
throw ExceptionBuilder.DuplicateDeclaration(typeName);
}
string tbName = XmlConvert.DecodeName(typeName);
table = new DataTable(tbName);
// fxcop: new DataTable will either inherit the CaseSensitive, Locale from DataSet or be set during SetProperties
table.Namespace = _schemaUri;
GetMinMax(node, ref minOccurs, ref maxOccurs);
table.MinOccurs = minOccurs;
table.MaxOccurs = maxOccurs;
SetProperties(table, attrs);
table._repeatableElement = true;
HandleColumn(node, table);
table.Columns[0].ColumnName = tbName + "_Column";
_ds.Tables.Add(table);
return table;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Diagnostics;
using System.Text;
#pragma warning disable 1591 // disable warnings on XML comments not being present
// This code was automatically generated by the TraceParserGen tool, which converts
// an ETW event manifest into strongly typed C# classes.
namespace Microsoft.Diagnostics.Tracing.Parsers
{
using Microsoft.Diagnostics.Tracing.Parsers.MicrosoftXunitBenchmark;
using Microsoft.Diagnostics.Tracing.Session;
[System.CodeDom.Compiler.GeneratedCode("traceparsergen", "2.0")]
public sealed class MicrosoftXunitBenchmarkTraceEventParser : TraceEventParser
{
public static string ProviderName => "Microsoft-Xunit-Benchmark";
public static Guid ProviderGuid => TraceEventProviders.GetEventSourceGuidFromName(ProviderName);
public enum Keywords : long
{
Session3 = 0x100000000000,
Session2 = 0x200000000000,
Session1 = 0x400000000000,
Session0 = 0x800000000000,
};
public MicrosoftXunitBenchmarkTraceEventParser(TraceEventSource source) : base(source) { }
public event Action<BenchmarkIterationStartArgs> BenchmarkIterationStart
{
add
{
source.RegisterEventTemplate(BenchmarkIterationStartTemplate(value));
}
remove
{
source.UnregisterEventTemplate(value, 3, ProviderGuid);
}
}
public event Action<BenchmarkIterationStopArgs> BenchmarkIterationStop
{
add
{
source.RegisterEventTemplate(BenchmarkIterationStopTemplate(value));
}
remove
{
source.UnregisterEventTemplate(value, 4, ProviderGuid);
}
}
public event Action<BenchmarkStartArgs> BenchmarkStart
{
add
{
source.RegisterEventTemplate(BenchmarkStartTemplate(value));
}
remove
{
source.UnregisterEventTemplate(value, 1, ProviderGuid);
}
}
public event Action<BenchmarkStopArgs> BenchmarkStop
{
add
{
source.RegisterEventTemplate(BenchmarkStopTemplate(value));
}
remove
{
source.UnregisterEventTemplate(value, 2, ProviderGuid);
}
}
public event Action<EventSourceMessageArgs> EventSourceMessage
{
add
{
source.RegisterEventTemplate(EventSourceMessageTemplate(value));
}
remove
{
source.UnregisterEventTemplate(value, 0, ProviderGuid);
}
}
#region private
protected override string GetProviderName() { return ProviderName; }
static private BenchmarkIterationStartArgs BenchmarkIterationStartTemplate(Action<BenchmarkIterationStartArgs> action)
{ // action, eventid, taskid, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName
return new BenchmarkIterationStartArgs(action, 3, 2, "BenchmarkIteration", Guid.Empty, 1, "Start", ProviderGuid, ProviderName);
}
static private BenchmarkIterationStopArgs BenchmarkIterationStopTemplate(Action<BenchmarkIterationStopArgs> action)
{ // action, eventid, taskid, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName
return new BenchmarkIterationStopArgs(action, 4, 2, "BenchmarkIteration", Guid.Empty, 2, "Stop", ProviderGuid, ProviderName);
}
static private BenchmarkStartArgs BenchmarkStartTemplate(Action<BenchmarkStartArgs> action)
{ // action, eventid, taskid, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName
return new BenchmarkStartArgs(action, 1, 1, "Benchmark", Guid.Empty, 1, "Start", ProviderGuid, ProviderName);
}
static private BenchmarkStopArgs BenchmarkStopTemplate(Action<BenchmarkStopArgs> action)
{ // action, eventid, taskid, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName
return new BenchmarkStopArgs(action, 2, 1, "Benchmark", Guid.Empty, 2, "Stop", ProviderGuid, ProviderName);
}
static private EventSourceMessageArgs EventSourceMessageTemplate(Action<EventSourceMessageArgs> action)
{ // action, eventid, taskid, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName
return new EventSourceMessageArgs(action, 0, 65534, "EventSourceMessage", Guid.Empty, 0, "", ProviderGuid, ProviderName);
}
static private volatile TraceEvent[] s_templates;
protected override void EnumerateTemplates(Func<string, string, EventFilterResponse> eventsToObserve, Action<TraceEvent> callback)
{
if (s_templates == null)
{
var templates = new TraceEvent[5];
templates[0] = EventSourceMessageTemplate(null);
templates[1] = BenchmarkStartTemplate(null);
templates[2] = BenchmarkStopTemplate(null);
templates[3] = BenchmarkIterationStartTemplate(null);
templates[4] = BenchmarkIterationStopTemplate(null);
s_templates = templates;
}
foreach (var template in s_templates)
if (eventsToObserve == null || eventsToObserve(template.ProviderName, template.EventName) == EventFilterResponse.AcceptEvent)
callback(template);
}
#endregion
}
}
namespace Microsoft.Diagnostics.Tracing.Parsers.MicrosoftXunitBenchmark
{
public sealed class BenchmarkIterationStartArgs : TraceEvent
{
public string RunId { get { return GetUnicodeStringAt(0); } }
public string BenchmarkName { get { return GetUnicodeStringAt(SkipUnicodeString(0)); } }
public int Iteration { get { return GetInt32At(SkipUnicodeString(SkipUnicodeString(0))); } }
#region Private
internal BenchmarkIterationStartArgs(Action<BenchmarkIterationStartArgs> target, int eventID, int task, string taskName, Guid taskGuid, int opcode, string opcodeName, Guid providerGuid, string providerName)
: base(eventID, task, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName)
{
this.m_target = target;
}
protected override void Dispatch()
{
m_target(this);
}
protected override void Validate()
{
Debug.Assert(!(Version == 0 && EventDataLength != SkipUnicodeString(SkipUnicodeString(0)) + 12));
Debug.Assert(!(Version > 0 && EventDataLength < SkipUnicodeString(SkipUnicodeString(0)) + 12));
}
protected override Delegate Target
{
get { return m_target; }
set { m_target = (Action<BenchmarkIterationStartArgs>)value; }
}
public override StringBuilder ToXml(StringBuilder sb)
{
Prefix(sb);
XmlAttrib(sb, "RunId", RunId);
XmlAttrib(sb, "BenchmarkName", BenchmarkName);
XmlAttrib(sb, "Iteration", Iteration);
sb.Append("/>");
return sb;
}
public override string[] PayloadNames
{
get
{
if (payloadNames == null)
payloadNames = new string[] { "RunId", "BenchmarkName", "Iteration" };
return payloadNames;
}
}
public override object PayloadValue(int index)
{
switch (index)
{
case 0:
return RunId;
case 1:
return BenchmarkName;
case 2:
return Iteration;
default:
Debug.Assert(false, "Bad field index");
return null;
}
}
private event Action<BenchmarkIterationStartArgs> m_target;
#endregion
}
public sealed class BenchmarkIterationStopArgs : TraceEvent
{
public string RunId { get { return GetUnicodeStringAt(0); } }
public string BenchmarkName { get { return GetUnicodeStringAt(SkipUnicodeString(0)); } }
public int Iteration { get { return GetInt32At(SkipUnicodeString(SkipUnicodeString(0))); } }
public long AllocatedBytes { get { return GetInt64At(SkipUnicodeString(SkipUnicodeString(0)) + 4); } }
#region Private
internal BenchmarkIterationStopArgs(Action<BenchmarkIterationStopArgs> target, int eventID, int task, string taskName, Guid taskGuid, int opcode, string opcodeName, Guid providerGuid, string providerName)
: base(eventID, task, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName)
{
this.m_target = target;
}
protected override void Dispatch()
{
m_target(this);
}
protected override void Validate()
{
Debug.Assert(!(Version == 0 && EventDataLength != SkipUnicodeString(SkipUnicodeString(0)) + 12));
Debug.Assert(!(Version > 0 && EventDataLength < SkipUnicodeString(SkipUnicodeString(0)) + 12));
}
protected override Delegate Target
{
get { return m_target; }
set { m_target = (Action<BenchmarkIterationStopArgs>)value; }
}
public override StringBuilder ToXml(StringBuilder sb)
{
Prefix(sb);
XmlAttrib(sb, "RunId", RunId);
XmlAttrib(sb, "BenchmarkName", BenchmarkName);
XmlAttrib(sb, "Iteration", Iteration);
XmlAttrib(sb, "AllocatedBytes", AllocatedBytes);
sb.Append("/>");
return sb;
}
public override string[] PayloadNames
{
get
{
if (payloadNames == null)
payloadNames = new string[] { "RunId", "BenchmarkName", "Iteration", "AllocatedBytes" };
return payloadNames;
}
}
public override object PayloadValue(int index)
{
switch (index)
{
case 0:
return RunId;
case 1:
return BenchmarkName;
case 2:
return Iteration;
case 3:
return AllocatedBytes;
default:
Debug.Assert(false, "Bad field index");
return null;
}
}
private event Action<BenchmarkIterationStopArgs> m_target;
#endregion
}
public sealed class BenchmarkStartArgs : TraceEvent
{
public string RunId { get { return GetUnicodeStringAt(0); } }
public string BenchmarkName { get { return GetUnicodeStringAt(SkipUnicodeString(0)); } }
#region Private
internal BenchmarkStartArgs(Action<BenchmarkStartArgs> target, int eventID, int task, string taskName, Guid taskGuid, int opcode, string opcodeName, Guid providerGuid, string providerName)
: base(eventID, task, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName)
{
this.m_target = target;
}
protected override void Dispatch()
{
m_target(this);
}
protected override void Validate()
{
Debug.Assert(!(Version == 0 && EventDataLength != SkipUnicodeString(SkipUnicodeString(0))));
Debug.Assert(!(Version > 0 && EventDataLength < SkipUnicodeString(SkipUnicodeString(0))));
}
protected override Delegate Target
{
get { return m_target; }
set { m_target = (Action<BenchmarkStartArgs>)value; }
}
public override StringBuilder ToXml(StringBuilder sb)
{
Prefix(sb);
XmlAttrib(sb, "RunId", RunId);
XmlAttrib(sb, "BenchmarkName", BenchmarkName);
sb.Append("/>");
return sb;
}
public override string[] PayloadNames
{
get
{
if (payloadNames == null)
payloadNames = new string[] { "RunId", "BenchmarkName" };
return payloadNames;
}
}
public override object PayloadValue(int index)
{
switch (index)
{
case 0:
return RunId;
case 1:
return BenchmarkName;
default:
Debug.Assert(false, "Bad field index");
return null;
}
}
private event Action<BenchmarkStartArgs> m_target;
#endregion
}
public sealed class BenchmarkStopArgs : TraceEvent
{
public string RunId { get { return GetUnicodeStringAt(0); } }
public string BenchmarkName { get { return GetUnicodeStringAt(SkipUnicodeString(0)); } }
public string StopReason { get { return GetUnicodeStringAt(SkipUnicodeString(SkipUnicodeString(0))); } }
#region Private
internal BenchmarkStopArgs(Action<BenchmarkStopArgs> target, int eventID, int task, string taskName, Guid taskGuid, int opcode, string opcodeName, Guid providerGuid, string providerName)
: base(eventID, task, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName)
{
this.m_target = target;
}
protected override void Dispatch()
{
m_target(this);
}
protected override void Validate()
{
Debug.Assert(!(Version == 0 && EventDataLength != SkipUnicodeString(SkipUnicodeString(SkipUnicodeString(0)))));
Debug.Assert(!(Version > 0 && EventDataLength < SkipUnicodeString(SkipUnicodeString(SkipUnicodeString(0)))));
}
protected override Delegate Target
{
get { return m_target; }
set { m_target = (Action<BenchmarkStopArgs>)value; }
}
public override StringBuilder ToXml(StringBuilder sb)
{
Prefix(sb);
XmlAttrib(sb, "RunId", RunId);
XmlAttrib(sb, "BenchmarkName", BenchmarkName);
XmlAttrib(sb, "StopReason", StopReason);
sb.Append("/>");
return sb;
}
public override string[] PayloadNames
{
get
{
if (payloadNames == null)
payloadNames = new string[] { "RunId", "BenchmarkName", "StopReason" };
return payloadNames;
}
}
public override object PayloadValue(int index)
{
switch (index)
{
case 0:
return RunId;
case 1:
return BenchmarkName;
case 2:
return StopReason;
default:
Debug.Assert(false, "Bad field index");
return null;
}
}
private event Action<BenchmarkStopArgs> m_target;
#endregion
}
public sealed class EventSourceMessageArgs : TraceEvent
{
public string message { get { return GetUnicodeStringAt(0); } }
#region Private
internal EventSourceMessageArgs(Action<EventSourceMessageArgs> target, int eventID, int task, string taskName, Guid taskGuid, int opcode, string opcodeName, Guid providerGuid, string providerName)
: base(eventID, task, taskName, taskGuid, opcode, opcodeName, providerGuid, providerName)
{
this.m_target = target;
}
protected override void Dispatch()
{
m_target(this);
}
protected override void Validate()
{
Debug.Assert(!(Version == 0 && EventDataLength != SkipUnicodeString(0)));
Debug.Assert(!(Version > 0 && EventDataLength < SkipUnicodeString(0)));
}
protected override Delegate Target
{
get { return m_target; }
set { m_target = (Action<EventSourceMessageArgs>)value; }
}
public override StringBuilder ToXml(StringBuilder sb)
{
Prefix(sb);
XmlAttrib(sb, "message", message);
sb.Append("/>");
return sb;
}
public override string[] PayloadNames
{
get
{
if (payloadNames == null)
payloadNames = new string[] { "message" };
return payloadNames;
}
}
public override object PayloadValue(int index)
{
switch (index)
{
case 0:
return message;
default:
Debug.Assert(false, "Bad field index");
return null;
}
}
private event Action<EventSourceMessageArgs> m_target;
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#region Using directives
using System;
using System.Collections;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Management.Automation;
using System.Management.Automation.Internal;
using System.Management.Automation.Language;
using System.Management.Automation.Security;
using System.Reflection;
using System.Runtime.InteropServices;
#if !UNIX
using System.Threading;
#endif
using Dbg = System.Management.Automation.Diagnostics;
#endregion
namespace Microsoft.PowerShell.Commands
{
/// <summary>Create a new .net object</summary>
[Cmdlet(VerbsCommon.New, "Object", DefaultParameterSetName = netSetName, HelpUri = "https://go.microsoft.com/fwlink/?LinkID=2096620")]
public sealed class NewObjectCommand : PSCmdlet
{
#region parameters
/// <summary> the number</summary>
[Parameter(ParameterSetName = netSetName, Mandatory = true, Position = 0)]
[ValidateTrustedData]
public string TypeName { get; set; }
#if !UNIX
private Guid _comObjectClsId = Guid.Empty;
/// <summary>
/// The ProgID of the Com object.
/// </summary>
[Parameter(ParameterSetName = "Com", Mandatory = true, Position = 0)]
[ValidateTrustedData]
public string ComObject { get; set; }
#endif
/// <summary>
/// The parameters for the constructor.
/// </summary>
/// <value></value>
[Parameter(ParameterSetName = netSetName, Mandatory = false, Position = 1)]
[ValidateTrustedData]
[Alias("Args")]
public object[] ArgumentList { get; set; }
/// <summary>
/// True if we should have an error when Com objects will use an interop assembly.
/// </summary>
[Parameter(ParameterSetName = "Com")]
public SwitchParameter Strict { get; set; }
// Updated from Hashtable to IDictionary to support the work around ordered hashtables.
/// <summary>
/// Gets the properties to be set.
/// </summary>
[Parameter]
[ValidateTrustedData]
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public IDictionary Property { get; set; }
#endregion parameters
#region private
private object CallConstructor(Type type, ConstructorInfo[] constructors, object[] args)
{
object result = null;
try
{
result = DotNetAdapter.ConstructorInvokeDotNet(type, constructors, args);
}
catch (MethodException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"ConstructorInvokedThrowException",
ErrorCategory.InvalidOperation, null));
}
// let other exceptions propagate
return result;
}
private void CreateMemberNotFoundError(PSObject pso, DictionaryEntry property, Type resultType)
{
string message = StringUtil.Format(NewObjectStrings.MemberNotFound, null, property.Key.ToString(), ParameterSet2ResourceString(ParameterSetName));
ThrowTerminatingError(
new ErrorRecord(
new InvalidOperationException(message),
"InvalidOperationException",
ErrorCategory.InvalidOperation,
null));
}
private void CreateMemberSetValueError(SetValueException e)
{
Exception ex = new(StringUtil.Format(NewObjectStrings.InvalidValue, e));
ThrowTerminatingError(
new ErrorRecord(ex, "SetValueException", ErrorCategory.InvalidData, null));
}
private static string ParameterSet2ResourceString(string parameterSet)
{
if (parameterSet.Equals(netSetName, StringComparison.OrdinalIgnoreCase))
{
return ".NET";
}
else if (parameterSet.Equals("Com", StringComparison.OrdinalIgnoreCase))
{
return "COM";
}
else
{
Dbg.Assert(false, "Should never get here - unknown parameter set");
return parameterSet;
}
}
#endregion private
#region Overrides
/// <summary> Create the object </summary>
protected override void BeginProcessing()
{
Type type = null;
PSArgumentException mshArgE = null;
if (string.Equals(ParameterSetName, netSetName, StringComparison.Ordinal))
{
object _newObject = null;
try
{
type = LanguagePrimitives.ConvertTo(TypeName, typeof(Type), CultureInfo.InvariantCulture) as Type;
}
catch (Exception e)
{
// these complications in Exception handling are aim to make error messages better.
if (e is InvalidCastException || e is ArgumentException)
{
if (e.InnerException != null && e.InnerException is TypeResolver.AmbiguousTypeException)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"AmbiguousTypeReference",
ErrorCategory.InvalidType,
targetObject: null));
}
mshArgE = PSTraceSource.NewArgumentException(
"TypeName",
NewObjectStrings.TypeNotFound,
TypeName);
ThrowTerminatingError(
new ErrorRecord(
mshArgE,
"TypeNotFound",
ErrorCategory.InvalidType,
targetObject: null));
}
throw;
}
Diagnostics.Assert(type != null, "LanguagePrimitives.TryConvertTo failed but returned true");
if (type.IsByRefLike)
{
ThrowTerminatingError(
new ErrorRecord(
PSTraceSource.NewInvalidOperationException(
NewObjectStrings.CannotInstantiateBoxedByRefLikeType,
type),
nameof(NewObjectStrings.CannotInstantiateBoxedByRefLikeType),
ErrorCategory.InvalidOperation,
targetObject: null));
}
if (Context.LanguageMode == PSLanguageMode.ConstrainedLanguage)
{
if (!CoreTypes.Contains(type))
{
ThrowTerminatingError(
new ErrorRecord(
new PSNotSupportedException(NewObjectStrings.CannotCreateTypeConstrainedLanguage), "CannotCreateTypeConstrainedLanguage", ErrorCategory.PermissionDenied, null));
}
}
switch (Context.LanguageMode)
{
case PSLanguageMode.NoLanguage:
case PSLanguageMode.RestrictedLanguage:
if (SystemPolicy.GetSystemLockdownPolicy() == SystemEnforcementMode.Enforce
&& !CoreTypes.Contains(type))
{
ThrowTerminatingError(
new ErrorRecord(
new PSNotSupportedException(
string.Format(NewObjectStrings.CannotCreateTypeLanguageMode, Context.LanguageMode.ToString())),
nameof(NewObjectStrings.CannotCreateTypeLanguageMode),
ErrorCategory.PermissionDenied,
targetObject: null));
}
break;
}
// WinRT does not support creating instances of attribute & delegate WinRT types.
if (WinRTHelper.IsWinRTType(type) && ((typeof(System.Attribute)).IsAssignableFrom(type) || (typeof(System.Delegate)).IsAssignableFrom(type)))
{
ThrowTerminatingError(new ErrorRecord(new InvalidOperationException(NewObjectStrings.CannotInstantiateWinRTType),
"CannotInstantiateWinRTType", ErrorCategory.InvalidOperation, null));
}
if (ArgumentList == null || ArgumentList.Length == 0)
{
ConstructorInfo ci = type.GetConstructor(Type.EmptyTypes);
if (ci != null && ci.IsPublic)
{
_newObject = CallConstructor(type, new ConstructorInfo[] { ci }, Array.Empty<object>());
if (_newObject != null && Property != null)
{
// The method invocation is disabled for "Hashtable to Object conversion" (Win8:649519), but we need to keep it enabled for New-Object for compatibility to PSv2
_newObject = LanguagePrimitives.SetObjectProperties(_newObject, Property, type, CreateMemberNotFoundError, CreateMemberSetValueError, enableMethodCall: true);
}
WriteObject(_newObject);
return;
}
else if (type.IsValueType)
{
// This is for default parameterless struct ctor which is not returned by
// Type.GetConstructor(System.Type.EmptyTypes).
try
{
_newObject = Activator.CreateInstance(type);
if (_newObject != null && Property != null)
{
// Win8:649519
_newObject = LanguagePrimitives.SetObjectProperties(_newObject, Property, type, CreateMemberNotFoundError, CreateMemberSetValueError, enableMethodCall: true);
}
}
catch (TargetInvocationException e)
{
ThrowTerminatingError(
new ErrorRecord(
e.InnerException ?? e,
"ConstructorCalledThrowException",
ErrorCategory.InvalidOperation, null));
}
WriteObject(_newObject);
return;
}
}
else
{
ConstructorInfo[] ctorInfos = type.GetConstructors();
if (ctorInfos.Length != 0)
{
_newObject = CallConstructor(type, ctorInfos, ArgumentList);
if (_newObject != null && Property != null)
{
// Win8:649519
_newObject = LanguagePrimitives.SetObjectProperties(_newObject, Property, type, CreateMemberNotFoundError, CreateMemberSetValueError, enableMethodCall: true);
}
WriteObject(_newObject);
return;
}
}
mshArgE = PSTraceSource.NewArgumentException(
"TypeName", NewObjectStrings.CannotFindAppropriateCtor, TypeName);
ThrowTerminatingError(
new ErrorRecord(
mshArgE,
"CannotFindAppropriateCtor",
ErrorCategory.ObjectNotFound, null));
}
#if !UNIX
else // Parameterset -Com
{
int result = NewObjectNativeMethods.CLSIDFromProgID(ComObject, out _comObjectClsId);
// If we're in ConstrainedLanguage, do additional restrictions
if (Context.LanguageMode == PSLanguageMode.ConstrainedLanguage)
{
bool isAllowed = false;
// If it's a system-wide lockdown, we may allow additional COM types
if (SystemPolicy.GetSystemLockdownPolicy() == SystemEnforcementMode.Enforce)
{
if ((result >= 0) &&
SystemPolicy.IsClassInApprovedList(_comObjectClsId))
{
isAllowed = true;
}
}
if (!isAllowed)
{
ThrowTerminatingError(
new ErrorRecord(
new PSNotSupportedException(NewObjectStrings.CannotCreateTypeConstrainedLanguage), "CannotCreateComTypeConstrainedLanguage", ErrorCategory.PermissionDenied, null));
return;
}
}
object comObject = CreateComObject();
string comObjectTypeName = comObject.GetType().FullName;
if (!comObjectTypeName.Equals("System.__ComObject"))
{
mshArgE = PSTraceSource.NewArgumentException(
"TypeName", NewObjectStrings.ComInteropLoaded, comObjectTypeName);
WriteVerbose(mshArgE.Message);
if (Strict)
{
WriteError(new ErrorRecord(
mshArgE,
"ComInteropLoaded",
ErrorCategory.InvalidArgument, comObject));
}
}
if (comObject != null && Property != null)
{
// Win8:649519
comObject = LanguagePrimitives.SetObjectProperties(comObject, Property, type, CreateMemberNotFoundError, CreateMemberSetValueError, enableMethodCall: true);
}
WriteObject(comObject);
}
#endif
}
#endregion Overrides
#if !UNIX
#region Com
private object SafeCreateInstance(Type t)
{
object result = null;
try
{
result = Activator.CreateInstance(t);
}
// Does not catch InvalidComObjectException because ComObject is obtained from GetTypeFromProgID
catch (ArgumentException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"CannotNewNonRuntimeType",
ErrorCategory.InvalidOperation, null));
}
catch (NotSupportedException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"CannotNewTypeBuilderTypedReferenceArgIteratorRuntimeArgumentHandle",
ErrorCategory.InvalidOperation, null));
}
catch (MethodAccessException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"CtorAccessDenied",
ErrorCategory.PermissionDenied, null));
}
catch (MissingMethodException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"NoPublicCtorMatch",
ErrorCategory.InvalidOperation, null));
}
catch (MemberAccessException e)
{
ThrowTerminatingError(
new ErrorRecord(
e,
"CannotCreateAbstractClass",
ErrorCategory.InvalidOperation, null));
}
catch (COMException e)
{
if (e.HResult == RPC_E_CHANGED_MODE)
{
throw;
}
ThrowTerminatingError(
new ErrorRecord(
e,
"NoCOMClassIdentified",
ErrorCategory.ResourceUnavailable, null));
}
return result;
}
private sealed class ComCreateInfo
{
public object objectCreated;
public bool success;
public Exception e;
}
private ComCreateInfo createInfo;
private void STAComCreateThreadProc(object createstruct)
{
ComCreateInfo info = (ComCreateInfo)createstruct;
try
{
Type type = Type.GetTypeFromCLSID(_comObjectClsId);
if (type == null)
{
PSArgumentException mshArgE = PSTraceSource.NewArgumentException(
"ComObject",
NewObjectStrings.CannotLoadComObjectType,
ComObject);
info.e = mshArgE;
info.success = false;
return;
}
info.objectCreated = SafeCreateInstance(type);
info.success = true;
}
catch (Exception e)
{
info.e = e;
info.success = false;
}
}
private object CreateComObject()
{
try
{
Type type = Marshal.GetTypeFromCLSID(_comObjectClsId);
if (type == null)
{
PSArgumentException mshArgE = PSTraceSource.NewArgumentException(
"ComObject",
NewObjectStrings.CannotLoadComObjectType,
ComObject);
ThrowTerminatingError(
new ErrorRecord(
mshArgE,
"CannotLoadComObjectType",
ErrorCategory.InvalidType,
targetObject: null));
}
return SafeCreateInstance(type);
}
catch (COMException e)
{
// Check Error Code to see if Error is because of Com apartment Mismatch.
if (e.HResult == RPC_E_CHANGED_MODE)
{
createInfo = new ComCreateInfo();
Thread thread = new(new ParameterizedThreadStart(STAComCreateThreadProc));
thread.SetApartmentState(ApartmentState.STA);
thread.Start(createInfo);
thread.Join();
if (createInfo.success)
{
return createInfo.objectCreated;
}
ThrowTerminatingError(
new ErrorRecord(createInfo.e, "NoCOMClassIdentified",
ErrorCategory.ResourceUnavailable, null));
}
else
{
ThrowTerminatingError(
new ErrorRecord(
e,
"NoCOMClassIdentified",
ErrorCategory.ResourceUnavailable, null));
}
return null;
}
}
#endregion Com
#endif
// HResult code '-2147417850' - Cannot change thread mode after it is set.
private const int RPC_E_CHANGED_MODE = unchecked((int)0x80010106);
private const string netSetName = "Net";
}
/// <summary>
/// Native methods for dealing with COM objects.
/// </summary>
internal static class NewObjectNativeMethods
{
/// Return Type: HRESULT->LONG->int
[DllImport(PinvokeDllNames.CLSIDFromProgIDDllName)]
internal static extern int CLSIDFromProgID([MarshalAs(UnmanagedType.LPWStr)] string lpszProgID, out Guid pclsid);
}
}
| |
using System;
using System.Collections.Generic;
using Microsoft.Xna.Framework;
using XmasHell.Extensions;
using XmasHell.BulletML;
using MonoGame.Extended;
namespace XmasHell.Entities.Bosses.XmasBell
{
class XmasBellBehaviour4 : AbstractBossBehaviour
{
private TimeSpan _bulletFrequence;
private bool _trollPattern;
private bool _centerPattern;
private TimeSpan _trollPatternDuration;
private TimeSpan _centerPatternDuration;
public XmasBellBehaviour4(Boss boss) : base(boss)
{
}
public override void Start()
{
base.Start();
_bulletFrequence = TimeSpan.Zero;
_trollPattern = false;
_centerPattern = false;
Boss.Speed = GameConfig.BossDefaultSpeed * 2.5f;
Boss.CurrentAnimator.Rotation = 0;
Boss.CurrentAnimator.Speed = 1f;
// Events
Boss.CurrentAnimator.AnimationFinished += AnimationFinished;
Boss.CurrentAnimator.EventTriggered += AnimationEventTriggered;
Boss.MoveOutside();
}
private void AnimationFinished(string animationName)
{
if (animationName.StartsWith("Troll"))
{
GetNewRandomPosition();
PlayRandomTrollAnimation();
}
}
private void AnimationEventTriggered(string eventName)
{
if (eventName == "shoot")
{
Boss.Game.GameManager.MoverManager.TriggerPattern(
"XmasBell/pattern4", BulletType.Type2, false, Boss.ActionPointPosition(), Boss.ActionPointDirection()
);
}
}
private void PlayRandomTrollAnimation()
{
var randomNumber = Boss.Game.GameManager.Random.Next(4);
if (randomNumber == 0)
Boss.CurrentAnimator.Play("Troll");
else if(randomNumber == 1)
Boss.CurrentAnimator.Play("Troll2");
else if (randomNumber == 2)
Boss.CurrentAnimator.Play("Troll3");
else
{
if (Boss.Position().X < 0 || Boss.Position().X > Boss.Game.ViewportAdapter.VirtualWidth)
Boss.CurrentAnimator.Play("Troll4W");
else if(Boss.Position().Y < 0 || Boss.Position().Y > Boss.Game.ViewportAdapter.VirtualHeight)
Boss.CurrentAnimator.Play("Troll4H");
}
}
public override void Stop()
{
base.Stop();
Boss.CurrentAnimator.Rotation = 0;
Boss.ShootTimerFinished -= ShootTimerFinished;
}
private void GetNewRandomPosition()
{
var side = new List<ScreenSide>()
{
ScreenSide.Left, ScreenSide.Top, ScreenSide.Right, ScreenSide.Bottom
};
var randomSideIndex = Boss.Game.GameManager.Random.Next(side.Count);
float newXPosition;
float newYPosition;
switch (side[randomSideIndex])
{
case ScreenSide.Left:
Boss.CurrentAnimator.Rotation = 0;
newXPosition = -Boss.Width() / 2;
newYPosition = Boss.Game.GameManager.Random.NextFloat(
Boss.Height() / 2f,
GameConfig.VirtualResolution.Y - Boss.Height() / 2f
);
break;
case ScreenSide.Top:
Boss.CurrentAnimator.Rotation = MathHelper.ToRadians(90f);
newXPosition = Boss.Game.GameManager.Random.NextFloat(
Boss.Width() / 2f,
GameConfig.VirtualResolution.X - Boss.Width() / 2f
);
newYPosition = -Boss.Width() / 2f;
break;
case ScreenSide.Right:
Boss.CurrentAnimator.Rotation = MathHelper.ToRadians(180f);
newXPosition = GameConfig.VirtualResolution.X + (Boss.Width() / 2f);
newYPosition = Boss.Game.GameManager.Random.NextFloat(
Boss.Height() / 2f,
GameConfig.VirtualResolution.Y - Boss.Height() / 2f
);
break;
case ScreenSide.Bottom:
Boss.CurrentAnimator.Rotation = MathHelper.ToRadians(-90f);
newXPosition = Boss.Game.GameManager.Random.NextFloat(
Boss.Width() / 2f,
GameConfig.VirtualResolution.X - Boss.Width() / 2f
);
newYPosition = GameConfig.VirtualResolution.Y + (Boss.Width() / 2f);
break;
default:
throw new ArgumentOutOfRangeException();
}
Boss.CurrentAnimator.Position = new Vector2(newXPosition, newYPosition);
}
private void StartTrollPattern()
{
_trollPattern = true;
_trollPatternDuration = TimeSpan.FromSeconds(Boss.Game.GameManager.Random.Next(10, 30));
GetNewRandomPosition();
PlayRandomTrollAnimation();
}
private void StopTrollPattern()
{
_trollPattern = false;
StartCenterPattern();
}
private void StartCenterPattern()
{
Boss.CurrentAnimator.Play("Idle");
Boss.CurrentAnimator.Rotation = 0;
Boss.Invincible = true;
_centerPattern = true;
_centerPatternDuration = TimeSpan.FromSeconds(Boss.Game.GameManager.Random.Next(5, 10));
Boss.MoveToCenter();
Boss.ShootTimerTime = 0.2f;
Boss.ShootTimerFinished += ShootTimerFinished;
}
private void ShootTimerFinished(object sender, float e)
{
Boss.Game.GameManager.MoverManager.TriggerPattern("XmasBell/pattern4", BulletType.Type2, false, Boss.ActionPointPosition(), Boss.ActionPointDirection());
}
private void StopCenterPattern()
{
_centerPattern = false;
Boss.StartShootTimer = false;
Boss.Invincible = false;
Boss.ShootTimerFinished -= ShootTimerFinished;
Boss.Rotation(0);
Boss.CurrentAnimator.Play("Idle");
Boss.MoveOutside();
}
public override void Update(GameTime gameTime)
{
base.Update(gameTime);
if (!_centerPattern && !Boss.TargetingPosition && Boss.IsOutside && Boss.CurrentAnimator.CurrentAnimation.Name == "Idle")
StartTrollPattern();
// Increase animation speed over time
Boss.CurrentAnimator.Speed = MathHelper.Clamp(Boss.CurrentAnimator.Speed + 0.01f * gameTime.GetElapsedSeconds(), 1f, 4f);
UpdateTrollPattern(gameTime);
UpdateCenterPattern(gameTime);
}
private void UpdateTrollPattern(GameTime gameTime)
{
if (!_trollPattern)
return;
if (_trollPatternDuration.TotalMilliseconds > 0)
_trollPatternDuration -= gameTime.ElapsedGameTime;
else
StopTrollPattern();
}
private void UpdateCenterPattern(GameTime gameTime)
{
if (!_centerPattern)
return;
if (Boss.Position() == Boss.Game.ViewportAdapter.Center.ToVector2())
{
Boss.StartShootTimer = true;
Boss.Invincible = false;
Boss.CurrentAnimator.Play("No_Animation");
if (_centerPatternDuration.TotalMilliseconds > 0)
{
_centerPatternDuration -= gameTime.ElapsedGameTime;
Boss.Rotation(Boss.Rotation() + (2.5f * gameTime.GetElapsedSeconds()));
}
else
StopCenterPattern();
}
}
}
}
| |
using System;
using System.IO;
using Xunit;
public class GrepTests : IDisposable
{
[Fact]
public void One_file_one_match_no_flags()
{
var pattern = "Agamemnon";
var flags = "";
var files = new[] { "iliad.txt" };
var expected = "Of Atreus, Agamemnon, King of men.";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_print_line_numbers_flag()
{
var pattern = "Forbidden";
var flags = "-n";
var files = new[] { "paradise-lost.txt" };
var expected = "2:Of that Forbidden Tree, whose mortal tast";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_case_insensitive_flag()
{
var pattern = "FORBIDDEN";
var flags = "-i";
var files = new[] { "paradise-lost.txt" };
var expected = "Of that Forbidden Tree, whose mortal tast";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_print_file_names_flag()
{
var pattern = "Forbidden";
var flags = "-l";
var files = new[] { "paradise-lost.txt" };
var expected = "paradise-lost.txt";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_match_entire_lines_flag()
{
var pattern = "With loss of Eden, till one greater Man";
var flags = "-x";
var files = new[] { "paradise-lost.txt" };
var expected = "With loss of Eden, till one greater Man";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_multiple_flags()
{
var pattern = "OF ATREUS, Agamemnon, KIng of MEN.";
var flags = "-n -i -x";
var files = new[] { "iliad.txt" };
var expected = "9:Of Atreus, Agamemnon, King of men.";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_no_flags()
{
var pattern = "may";
var flags = "";
var files = new[] { "midsummer-night.txt" };
var expected =
"Nor how it may concern my modesty,\n" +
"But I beseech your grace that I may know\n" +
"The worst that may befall me in this case,";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_print_line_numbers_flag()
{
var pattern = "may";
var flags = "-n";
var files = new[] { "midsummer-night.txt" };
var expected =
"3:Nor how it may concern my modesty,\n" +
"5:But I beseech your grace that I may know\n" +
"6:The worst that may befall me in this case,";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_match_entire_lines_flag()
{
var pattern = "may";
var flags = "-x";
var files = new[] { "midsummer-night.txt" };
var expected = "";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_case_insensitive_flag()
{
var pattern = "ACHILLES";
var flags = "-i";
var files = new[] { "iliad.txt" };
var expected =
"Achilles sing, O Goddess! Peleus' son;\n" +
"The noble Chief Achilles from the son";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_inverted_flag()
{
var pattern = "Of";
var flags = "-v";
var files = new[] { "paradise-lost.txt" };
var expected =
"Brought Death into the World, and all our woe,\n" +
"With loss of Eden, till one greater Man\n" +
"Restore us, and regain the blissful Seat,\n" +
"Sing Heav'nly Muse, that on the secret top\n" +
"That Shepherd, who first taught the chosen Seed";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_no_matches_various_flags()
{
var pattern = "Gandalf";
var flags = "-n -l -x -i";
var files = new[] { "iliad.txt" };
var expected = "";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_one_match_file_flag_takes_precedence_over_line_flag()
{
var pattern = "ten";
var flags = "-n -l";
var files = new[] { "iliad.txt" };
var expected = "iliad.txt";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void One_file_several_matches_inverted_and_match_entire_lines_flags()
{
var pattern = "Illustrious into Ades premature,";
var flags = "-x -v";
var files = new[] { "iliad.txt" };
var expected =
"Achilles sing, O Goddess! Peleus' son;\n" +
"His wrath pernicious, who ten thousand woes\n" +
"Caused to Achaia's host, sent many a soul\n" +
"And Heroes gave (so stood the will of Jove)\n" +
"To dogs and to all ravening fowls a prey,\n" +
"When fierce dispute had separated once\n" +
"The noble Chief Achilles from the son\n" +
"Of Atreus, Agamemnon, King of men.";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_one_match_no_flags()
{
var pattern = "Agamemnon";
var flags = "";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected = "iliad.txt:Of Atreus, Agamemnon, King of men.";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_no_flags()
{
var pattern = "may";
var flags = "";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"midsummer-night.txt:Nor how it may concern my modesty,\n" +
"midsummer-night.txt:But I beseech your grace that I may know\n" +
"midsummer-night.txt:The worst that may befall me in this case,";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_print_line_numbers_flag()
{
var pattern = "that";
var flags = "-n";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"midsummer-night.txt:5:But I beseech your grace that I may know\n" +
"midsummer-night.txt:6:The worst that may befall me in this case,\n" +
"paradise-lost.txt:2:Of that Forbidden Tree, whose mortal tast\n" +
"paradise-lost.txt:6:Sing Heav'nly Muse, that on the secret top";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_one_match_print_file_names_flag()
{
var pattern = "who";
var flags = "-l";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"iliad.txt\n" +
"paradise-lost.txt";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_case_insensitive_flag()
{
var pattern = "TO";
var flags = "-i";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"iliad.txt:Caused to Achaia's host, sent many a soul\n" +
"iliad.txt:Illustrious into Ades premature,\n" +
"iliad.txt:And Heroes gave (so stood the will of Jove)\n" +
"iliad.txt:To dogs and to all ravening fowls a prey,\n" +
"midsummer-night.txt:I do entreat your grace to pardon me.\n" +
"midsummer-night.txt:In such a presence here to plead my thoughts;\n" +
"midsummer-night.txt:If I refuse to wed Demetrius.\n" +
"paradise-lost.txt:Brought Death into the World, and all our woe,\n" +
"paradise-lost.txt:Restore us, and regain the blissful Seat,\n" +
"paradise-lost.txt:Sing Heav'nly Muse, that on the secret top";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_inverted_flag()
{
var pattern = "a";
var flags = "-v";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"iliad.txt:Achilles sing, O Goddess! Peleus' son;\n" +
"iliad.txt:The noble Chief Achilles from the son\n" +
"midsummer-night.txt:If I refuse to wed Demetrius.";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_one_match_match_entire_lines_flag()
{
var pattern = "But I beseech your grace that I may know";
var flags = "-x";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected = "midsummer-night.txt:But I beseech your grace that I may know";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_one_match_multiple_flags()
{
var pattern = "WITH LOSS OF EDEN, TILL ONE GREATER MAN";
var flags = "-n -i -x";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected = "paradise-lost.txt:4:With loss of Eden, till one greater Man";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_no_matches_various_flags()
{
var pattern = "Frodo";
var flags = "-n -l -x -i";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected = "";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_file_flag_takes_precedence_over_line_number_flag()
{
var pattern = "who";
var flags = "-n -l";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"iliad.txt\n" +
"paradise-lost.txt";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
[Fact(Skip = "Remove this Skip property to run this test")]
public void Multiple_files_several_matches_inverted_and_match_entire_lines_flags()
{
var pattern = "Illustrious into Ades premature,";
var flags = "-x -v";
var files = new[] { "iliad.txt", "midsummer-night.txt", "paradise-lost.txt" };
var expected =
"iliad.txt:Achilles sing, O Goddess! Peleus' son;\n" +
"iliad.txt:His wrath pernicious, who ten thousand woes\n" +
"iliad.txt:Caused to Achaia's host, sent many a soul\n" +
"iliad.txt:And Heroes gave (so stood the will of Jove)\n" +
"iliad.txt:To dogs and to all ravening fowls a prey,\n" +
"iliad.txt:When fierce dispute had separated once\n" +
"iliad.txt:The noble Chief Achilles from the son\n" +
"iliad.txt:Of Atreus, Agamemnon, King of men.\n" +
"midsummer-night.txt:I do entreat your grace to pardon me.\n" +
"midsummer-night.txt:I know not by what power I am made bold,\n" +
"midsummer-night.txt:Nor how it may concern my modesty,\n" +
"midsummer-night.txt:In such a presence here to plead my thoughts;\n" +
"midsummer-night.txt:But I beseech your grace that I may know\n" +
"midsummer-night.txt:The worst that may befall me in this case,\n" +
"midsummer-night.txt:If I refuse to wed Demetrius.\n" +
"paradise-lost.txt:Of Mans First Disobedience, and the Fruit\n" +
"paradise-lost.txt:Of that Forbidden Tree, whose mortal tast\n" +
"paradise-lost.txt:Brought Death into the World, and all our woe,\n" +
"paradise-lost.txt:With loss of Eden, till one greater Man\n" +
"paradise-lost.txt:Restore us, and regain the blissful Seat,\n" +
"paradise-lost.txt:Sing Heav'nly Muse, that on the secret top\n" +
"paradise-lost.txt:Of Oreb, or of Sinai, didst inspire\n" +
"paradise-lost.txt:That Shepherd, who first taught the chosen Seed";
Assert.Equal(expected, Grep.Match(pattern, flags, files));
}
private const string IliadFileName = "iliad.txt";
private const string IliadContents =
"Achilles sing, O Goddess! Peleus' son;\n" +
"His wrath pernicious, who ten thousand woes\n" +
"Caused to Achaia's host, sent many a soul\n" +
"Illustrious into Ades premature,\n" +
"And Heroes gave (so stood the will of Jove)\n" +
"To dogs and to all ravening fowls a prey,\n" +
"When fierce dispute had separated once\n" +
"The noble Chief Achilles from the son\n" +
"Of Atreus, Agamemnon, King of men.\n";
private const string MidsummerNightFileName = "midsummer-night.txt";
private const string MidsummerNightContents =
"I do entreat your grace to pardon me.\n" +
"I know not by what power I am made bold,\n" +
"Nor how it may concern my modesty,\n" +
"In such a presence here to plead my thoughts;\n" +
"But I beseech your grace that I may know\n" +
"The worst that may befall me in this case,\n" +
"If I refuse to wed Demetrius.\n";
private const string ParadiseLostFileName = "paradise-lost.txt";
private const string ParadiseLostContents =
"Of Mans First Disobedience, and the Fruit\n" +
"Of that Forbidden Tree, whose mortal tast\n" +
"Brought Death into the World, and all our woe,\n" +
"With loss of Eden, till one greater Man\n" +
"Restore us, and regain the blissful Seat,\n" +
"Sing Heav'nly Muse, that on the secret top\n" +
"Of Oreb, or of Sinai, didst inspire\n" +
"That Shepherd, who first taught the chosen Seed\n";
public GrepTests()
{
Directory.SetCurrentDirectory(Path.GetTempPath());
File.WriteAllText(IliadFileName, IliadContents);
File.WriteAllText(MidsummerNightFileName, MidsummerNightContents);
File.WriteAllText(ParadiseLostFileName, ParadiseLostContents);
}
public void Dispose()
{
Directory.SetCurrentDirectory(Path.GetTempPath());
File.Delete(IliadFileName);
File.Delete(MidsummerNightFileName);
File.Delete(ParadiseLostFileName);
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace TeamManager.Areas.HelpPage.SampleGeneration
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
private const int DefaultCollectionSize = 3;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Table;
using Microsoft.WindowsAzure.Storage.Table.Queryable;
using Orleans.Runtime;
namespace Orleans.AzureUtils
{
/// <summary>
/// Utility class to encapsulate row-based access to Azure table storage.
/// </summary>
/// <remarks>
/// These functions are mostly intended for internal usage by Orleans runtime, but due to certain assembly packaging constrants this class needs to have public visibility.
/// </remarks>
/// <typeparam name="T">Table data entry used by this table / manager.</typeparam>
public class AzureTableDataManager<T> where T : class, ITableEntity, new()
{
/// <summary> Name of the table this instance is managing. </summary>
public string TableName { get; private set; }
/// <summary> Logger for this table manager instance. </summary>
protected internal Logger Logger { get; private set; }
/// <summary> Connection string for the Azure storage account used to host this table. </summary>
protected string ConnectionString { get; set; }
private CloudTable tableReference;
private readonly CounterStatistic numServerBusy = CounterStatistic.FindOrCreate(StatisticNames.AZURE_SERVER_BUSY, true);
/// <summary>
/// Constructor
/// </summary>
/// <param name="tableName">Name of the table to be connected to.</param>
/// <param name="storageConnectionString">Connection string for the Azure storage account used to host this table.</param>
/// <param name="logger">Logger to use.</param>
public AzureTableDataManager(string tableName, string storageConnectionString, Logger logger = null)
{
var loggerName = "AzureTableDataManager-" + typeof(T).Name;
Logger = logger ?? LogManager.GetLogger(loggerName, LoggerType.Runtime);
TableName = tableName;
ConnectionString = storageConnectionString;
AzureStorageUtils.ValidateTableName(tableName);
}
/// <summary>
/// Connects to, or creates and initializes a new Azure table if it does not already exist.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task InitTableAsync()
{
const string operation = "InitTable";
var startTime = DateTime.UtcNow;
try
{
CloudTableClient tableCreationClient = GetCloudTableCreationClient();
CloudTable tableRef = tableCreationClient.GetTableReference(TableName);
bool didCreate = await Task<bool>.Factory.FromAsync(
tableRef.BeginCreateIfNotExists,
tableRef.EndCreateIfNotExists,
null);
Logger.Info(ErrorCode.AzureTable_01, "{0} Azure storage table {1}", (didCreate ? "Created" : "Attached to"), TableName);
CloudTableClient tableOperationsClient = GetCloudTableOperationsClient();
tableReference = tableOperationsClient.GetTableReference(TableName);
}
catch (Exception exc)
{
Logger.Error(ErrorCode.AzureTable_02, $"Could not initialize connection to storage table {TableName}", exc);
throw;
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes the Azure table.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task DeleteTableAsync()
{
const string operation = "DeleteTable";
var startTime = DateTime.UtcNow;
try
{
CloudTableClient tableCreationClient = GetCloudTableCreationClient();
CloudTable tableRef = tableCreationClient.GetTableReference(TableName);
bool didDelete = await Task<bool>.Factory.FromAsync(
tableRef.BeginDeleteIfExists,
tableRef.EndDeleteIfExists,
null);
if (didDelete)
{
Logger.Info(ErrorCode.AzureTable_03, "Deleted Azure storage table {0}", TableName);
}
}
catch (Exception exc)
{
Logger.Error(ErrorCode.AzureTable_04, "Could not delete storage table {0}", exc);
throw;
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes all entities the Azure table.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task ClearTableAsync()
{
IEnumerable<Tuple<T,string>> items = await ReadAllTableEntriesAsync();
IEnumerable<Task> work = items.GroupBy(item => item.Item1.PartitionKey)
.SelectMany(partition => partition.ToBatch(AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS))
.Select(batch => DeleteTableEntriesAsync(batch.ToList()));
await Task.WhenAll(work);
}
/// <summary>
/// Create a new data entry in the Azure table (insert new, not update existing).
/// Fails if the data already exists.
/// </summary>
/// <param name="data">Data to be inserted into the table.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> CreateTableEntryAsync(T data)
{
const string operation = "CreateTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("Creating {0} table entry: {1}", TableName, data);
try
{
// WAS:
// svc.AddObject(TableName, data);
// SaveChangesOptions.None
try
{
// Presumably FromAsync(BeginExecute, EndExecute) has a slightly better performance then CreateIfNotExistsAsync.
var opResult = await Task<TableResult>.Factory.FromAsync(
tableReference.BeginExecute,
tableReference.EndExecute,
TableOperation.Insert(data),
null);
return opResult.Etag;
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data, null, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Inserts a data entry in the Azure table: creates a new one if does not exists or overwrites (without eTag) an already existing version (the "update in place" semantincs).
/// </summary>
/// <param name="data">Data to be inserted or replaced in the table.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> UpsertTableEntryAsync(T data)
{
const string operation = "UpsertTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} entry {1} into table {2}", operation, data, TableName);
try
{
try
{
// WAS:
// svc.AttachTo(TableName, data, null);
// svc.UpdateObject(data);
// SaveChangesOptions.ReplaceOnUpdate,
var opResult = await Task<TableResult>.Factory.FromAsync(
tableReference.BeginExecute,
tableReference.EndExecute,
TableOperation.InsertOrReplace(data),
null);
return opResult.Etag;
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.AzureTable_06,
$"Intermediate error upserting entry {(data == null ? "null" : data.ToString())} to the table {TableName}", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Merges a data entry in the Azure table.
/// </summary>
/// <param name="data">Data to be merged in the table.</param>
/// <param name="eTag">ETag to apply.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
internal async Task<string> MergeTableEntryAsync(T data, string eTag)
{
const string operation = "MergeTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} entry {1} into table {2}", operation, data, TableName);
try
{
try
{
// WAS:
// svc.AttachTo(TableName, data, ANY_ETAG);
// svc.UpdateObject(data);
data.ETag = eTag;
// Merge requires an ETag (which may be the '*' wildcard).
var opResult = await Task<TableResult>.Factory.FromAsync(
tableReference.BeginExecute,
tableReference.EndExecute,
TableOperation.Merge(data),
null);
return opResult.Etag;
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.AzureTable_07,
$"Intermediate error merging entry {(data == null ? "null" : data.ToString())} to the table {TableName}", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Updates a data entry in the Azure table: updates an already existing data in the table, by using eTag.
/// Fails if the data does not already exist or of eTag does not match.
/// </summary>
/// <param name="data">Data to be updated into the table.</param>
/// /// <param name="dataEtag">ETag to use.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> UpdateTableEntryAsync(T data, string dataEtag)
{
const string operation = "UpdateTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} table {1} entry {2}", operation, TableName, data);
try
{
try
{
data.ETag = dataEtag;
var opResult = await Task<TableResult>.Factory.FromAsync(
tableReference.BeginExecute,
tableReference.EndExecute,
TableOperation.Replace(data),
null);
//The ETag of data is needed in further operations.
return opResult.Etag;
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data, null, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes an already existing data in the table, by using eTag.
/// Fails if the data does not already exist or if eTag does not match.
/// </summary>
/// <param name="data">Data entry to be deleted from the table.</param>
/// <param name="eTag">ETag to use.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task DeleteTableEntryAsync(T data, string eTag)
{
const string operation = "DeleteTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} table {1} entry {2}", operation, TableName, data);
try
{
data.ETag = eTag;
try
{
// Presumably FromAsync(BeginExecute, EndExecute) has a slightly better performance then DeleteIfExistsAsync.
await Task<TableResult>.Factory.FromAsync(
tableReference.BeginExecute,
tableReference.EndExecute,
TableOperation.Delete(data),
null);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.AzureTable_08,
$"Intermediate error deleting entry {data} from the table {TableName}.", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read a single table entry from the storage table.
/// </summary>
/// <param name="partitionKey">The partition key for the entry.</param>
/// <param name="rowKey">The row key for the entry.</param>
/// <returns>Value promise for tuple containing the data entry and its corresponding etag.</returns>
public async Task<Tuple<T, string>> ReadSingleTableEntryAsync(string partitionKey, string rowKey)
{
const string operation = "ReadSingleTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} table {1} partitionKey {2} rowKey = {3}", operation, TableName, partitionKey, rowKey);
T retrievedResult = default(T);
try
{
try
{
string queryString = TableQueryFilterBuilder.MatchPartitionKeyAndRowKeyFilter(partitionKey, rowKey);
var query = new TableQuery<T>().Where(queryString);
TableQuerySegment<T> segment = await Task.Factory
.FromAsync<TableQuery<T>, TableContinuationToken, TableQuerySegment<T>>(
tableReference.BeginExecuteQuerySegmented,
tableReference.EndExecuteQuerySegmented<T>, query, null, null);
retrievedResult = segment.Results.SingleOrDefault();
}
catch (StorageException exception)
{
if (!AzureStorageUtils.TableStorageDataNotFound(exception))
throw;
}
//The ETag of data is needed in further operations.
if (retrievedResult != null) return new Tuple<T, string>(retrievedResult, retrievedResult.ETag);
if (Logger.IsVerbose) Logger.Verbose("Could not find table entry for PartitionKey={0} RowKey={1}", partitionKey, rowKey);
return null; // No data
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read all entries in one partition of the storage table.
/// NOTE: This could be an expensive and slow operation for large table partitions!
/// </summary>
/// <param name="partitionKey">The key for the partition to be searched.</param>
/// <returns>Enumeration of all entries in the specified table partition.</returns>
public Task<IEnumerable<Tuple<T, string>>> ReadAllTableEntriesForPartitionAsync(string partitionKey)
{
Expression<Func<T, bool>> query = instance =>
instance.PartitionKey == partitionKey;
return ReadTableEntriesAndEtagsAsync(query);
}
/// <summary>
/// Read all entries in the table.
/// NOTE: This could be a very expensive and slow operation for large tables!
/// </summary>
/// <returns>Enumeration of all entries in the table.</returns>
public Task<IEnumerable<Tuple<T, string>>> ReadAllTableEntriesAsync()
{
return ReadTableEntriesAndEtagsAsync(null);
}
/// <summary>
/// Deletes a set of already existing data entries in the table, by using eTag.
/// Fails if the data does not already exist or if eTag does not match.
/// </summary>
/// <param name="collection">Data entries and their corresponding etags to be deleted from the table.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task DeleteTableEntriesAsync(IReadOnlyCollection<Tuple<T, string>> collection)
{
const string operation = "DeleteTableEntries";
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("Deleting {0} table entries: {1}", TableName, Utils.EnumerableToString(collection));
if (collection == null) throw new ArgumentNullException("collection");
if (collection.Count > AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS)
{
throw new ArgumentOutOfRangeException("collection", collection.Count,
"Too many rows for bulk delete - max " + AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS);
}
if (collection.Count == 0)
{
return;
}
try
{
var entityBatch = new TableBatchOperation();
foreach (var tuple in collection)
{
// WAS:
// svc.AttachTo(TableName, tuple.Item1, tuple.Item2);
// svc.DeleteObject(tuple.Item1);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
T item = tuple.Item1;
item.ETag = tuple.Item2;
entityBatch.Delete(item);
}
try
{
await Task<IList<TableResult>>.Factory.FromAsync(
tableReference.BeginExecuteBatch,
tableReference.EndExecuteBatch,
entityBatch,
null);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.AzureTable_08,
$"Intermediate error deleting entries {Utils.EnumerableToString(collection)} from the table {TableName}.", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read data entries and their corresponding eTags from the Azure table.
/// </summary>
/// <param name="predicate">Predicate function to use for querying the table and filtering the results.</param>
/// <returns>Enumeration of entries in the table which match the query condition.</returns>
public async Task<IEnumerable<Tuple<T, string>>> ReadTableEntriesAndEtagsAsync(Expression<Func<T, bool>> predicate)
{
const string operation = "ReadTableEntriesAndEtags";
var startTime = DateTime.UtcNow;
try
{
TableQuery<T> cloudTableQuery = predicate == null
? tableReference.CreateQuery<T>()
: tableReference.CreateQuery<T>().Where(predicate).AsTableQuery();
try
{
Func<Task<List<T>>> executeQueryHandleContinuations = async () =>
{
TableQuerySegment<T> querySegment = null;
var list = new List<T>();
while (querySegment == null || querySegment.ContinuationToken != null)
{
querySegment = await cloudTableQuery.ExecuteSegmentedAsync(querySegment?.ContinuationToken);
list.AddRange(querySegment);
}
return list;
};
IBackoffProvider backoff = new FixedBackoff(AzureTableDefaultPolicies.PauseBetweenTableOperationRetries);
List<T> results = await AsyncExecutorWithRetries.ExecuteWithRetries(
counter => executeQueryHandleContinuations(),
AzureTableDefaultPolicies.MaxTableOperationRetries,
(exc, counter) => AzureStorageUtils.AnalyzeReadException(exc.GetBaseException(), counter, TableName, Logger),
AzureTableDefaultPolicies.TableOperationTimeout,
backoff);
// Data was read successfully if we got to here
return results.Select(i => Tuple.Create(i, i.ETag)).ToList();
}
catch (Exception exc)
{
// Out of retries...
var errorMsg = $"Failed to read Azure storage table {TableName}: {exc.Message}";
if (!AzureStorageUtils.TableStorageDataNotFound(exc))
{
Logger.Warn(ErrorCode.AzureTable_09, errorMsg, exc);
}
throw new OrleansException(errorMsg, exc);
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Inserts a set of new data entries into the table.
/// Fails if the data does already exists.
/// </summary>
/// <param name="collection">Data entries to be inserted into the table.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task BulkInsertTableEntries(IReadOnlyCollection<T> collection)
{
const string operation = "BulkInsertTableEntries";
if (collection == null) throw new ArgumentNullException("collection");
if (collection.Count > AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS)
{
throw new ArgumentOutOfRangeException("collection", collection.Count,
"Too many rows for bulk update - max " + AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS);
}
if (collection.Count == 0)
{
return;
}
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("Bulk inserting {0} entries to {1} table", collection.Count, TableName);
try
{
// WAS:
// svc.AttachTo(TableName, entry);
// svc.UpdateObject(entry);
// SaveChangesOptions.None | SaveChangesOptions.Batch,
// SaveChangesOptions.None == Insert-or-merge operation, SaveChangesOptions.Batch == Batch transaction
// http://msdn.microsoft.com/en-us/library/hh452241.aspx
var entityBatch = new TableBatchOperation();
foreach (T entry in collection)
{
entityBatch.Insert(entry);
}
try
{
// http://msdn.microsoft.com/en-us/library/hh452241.aspx
await Task<IList<TableResult>>.Factory.FromAsync(
tableReference.BeginExecuteBatch,
tableReference.EndExecuteBatch,
entityBatch,
null);
}
catch (Exception exc)
{
Logger.Warn(ErrorCode.AzureTable_37,
$"Intermediate error bulk inserting {collection.Count} entries in the table {TableName}", exc);
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
#region Internal functions
internal async Task<Tuple<string, string>> InsertTwoTableEntriesConditionallyAsync(T data1, T data2, string data2Etag)
{
const string operation = "InsertTableEntryConditionally";
string data2Str = (data2 == null ? "null" : data2.ToString());
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} into table {1} data1 {2} data2 {3}", operation, TableName, data1, data2Str);
try
{
try
{
// WAS:
// Only AddObject, do NOT AttachTo. If we did both UpdateObject and AttachTo, it would have been equivalent to InsertOrReplace.
// svc.AddObject(TableName, data);
// ---
// svc.AttachTo(TableName, tableVersion, tableVersionEtag);
// svc.UpdateObject(tableVersion);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
// EntityDescriptor dataResult = svc.GetEntityDescriptor(data);
// return dataResult.ETag;
var entityBatch = new TableBatchOperation();
entityBatch.Add(TableOperation.Insert(data1));
data2.ETag = data2Etag;
entityBatch.Add(TableOperation.Replace(data2));
var opResults = await Task<IList<TableResult>>.Factory.FromAsync(
tableReference.BeginExecuteBatch,
tableReference.EndExecuteBatch,
entityBatch,
null);
//The batch results are returned in order of execution,
//see reference at https://msdn.microsoft.com/en-us/library/microsoft.windowsazure.storage.table.cloudtable.executebatch.aspx.
//The ETag of data is needed in further operations.
return new Tuple<string, string>(opResults[0].Etag, opResults[1].Etag);
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data1, data2Str, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
internal async Task<Tuple<string, string>> UpdateTwoTableEntriesConditionallyAsync(T data1, string data1Etag, T data2, string data2Etag)
{
const string operation = "UpdateTableEntryConditionally";
string data2Str = (data2 == null ? "null" : data2.ToString());
var startTime = DateTime.UtcNow;
if (Logger.IsVerbose2) Logger.Verbose2("{0} table {1} data1 {2} data2 {3}", operation, TableName, data1, data2Str);
try
{
try
{
// WAS:
// Only AddObject, do NOT AttachTo. If we did both UpdateObject and AttachTo, it would have been equivalent to InsertOrReplace.
// svc.AttachTo(TableName, data, dataEtag);
// svc.UpdateObject(data);
// ----
// svc.AttachTo(TableName, tableVersion, tableVersionEtag);
// svc.UpdateObject(tableVersion);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
// EntityDescriptor dataResult = svc.GetEntityDescriptor(data);
// return dataResult.ETag;
var entityBatch = new TableBatchOperation();
data1.ETag = data1Etag;
entityBatch.Add(TableOperation.Replace(data1));
if (data2 != null && data2Etag != null)
{
data2.ETag = data2Etag;
entityBatch.Add(TableOperation.Replace(data2));
}
var opResults = await Task<IList<TableResult>>.Factory.FromAsync(
tableReference.BeginExecuteBatch,
tableReference.EndExecuteBatch,
entityBatch,
null);
//The batch results are returned in order of execution,
//see reference at https://msdn.microsoft.com/en-us/library/microsoft.windowsazure.storage.table.cloudtable.executebatch.aspx.
//The ETag of data is needed in further operations.
return new Tuple<string, string>(opResults[0].Etag, opResults[1].Etag);
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data1, data2Str, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
// Utility methods
private CloudTableClient GetCloudTableOperationsClient()
{
try
{
CloudStorageAccount storageAccount = AzureStorageUtils.GetCloudStorageAccount(ConnectionString);
CloudTableClient operationsClient = storageAccount.CreateCloudTableClient();
operationsClient.DefaultRequestOptions.RetryPolicy = AzureTableDefaultPolicies.TableOperationRetryPolicy;
operationsClient.DefaultRequestOptions.ServerTimeout = AzureTableDefaultPolicies.TableOperationTimeout;
// Values supported can be AtomPub, Json, JsonFullMetadata or JsonNoMetadata with Json being the default value
operationsClient.DefaultRequestOptions.PayloadFormat = TablePayloadFormat.JsonNoMetadata;
return operationsClient;
}
catch (Exception exc)
{
Logger.Error(ErrorCode.AzureTable_17, "Error creating CloudTableOperationsClient.", exc);
throw;
}
}
private CloudTableClient GetCloudTableCreationClient()
{
try
{
CloudStorageAccount storageAccount = AzureStorageUtils.GetCloudStorageAccount(ConnectionString);
CloudTableClient creationClient = storageAccount.CreateCloudTableClient();
creationClient.DefaultRequestOptions.RetryPolicy = AzureTableDefaultPolicies.TableCreationRetryPolicy;
creationClient.DefaultRequestOptions.ServerTimeout = AzureTableDefaultPolicies.TableCreationTimeout;
// Values supported can be AtomPub, Json, JsonFullMetadata or JsonNoMetadata with Json being the default value
creationClient.DefaultRequestOptions.PayloadFormat = TablePayloadFormat.JsonNoMetadata;
return creationClient;
}
catch (Exception exc)
{
Logger.Error(ErrorCode.AzureTable_18, "Error creating CloudTableCreationClient.", exc);
throw;
}
}
private void CheckAlertWriteError(string operation, object data1, string data2, Exception exc)
{
HttpStatusCode httpStatusCode;
string restStatus;
if(AzureStorageUtils.EvaluateException(exc, out httpStatusCode, out restStatus) && AzureStorageUtils.IsContentionError(httpStatusCode))
{
// log at Verbose, since failure on conditional is not not an error. Will analyze and warn later, if required.
if(Logger.IsVerbose) Logger.Verbose(ErrorCode.AzureTable_13,
$"Intermediate Azure table write error {operation} to table {TableName} data1 {(data1 ?? "null")} data2 {(data2 ?? "null")}", exc);
}
else
{
Logger.Error(ErrorCode.AzureTable_14,
$"Azure table access write error {operation} to table {TableName} entry {data1}", exc);
}
}
private void CheckAlertSlowAccess(DateTime startOperation, string operation)
{
var timeSpan = DateTime.UtcNow - startOperation;
if (timeSpan > AzureTableDefaultPolicies.TableOperationTimeout)
{
Logger.Warn(ErrorCode.AzureTable_15, "Slow access to Azure Table {0} for {1}, which took {2}.", TableName, operation, timeSpan);
}
}
#endregion
}
internal static class TableDataManagerInternalExtensions
{
internal static IEnumerable<IEnumerable<TItem>> ToBatch<TItem>(this IEnumerable<TItem> source, int size)
{
using (IEnumerator<TItem> enumerator = source.GetEnumerator())
while (enumerator.MoveNext())
yield return Take(enumerator, size);
}
private static IEnumerable<TItem> Take<TItem>(IEnumerator<TItem> source, int size)
{
int i = 0;
do
yield return source.Current;
while (++i < size && source.MoveNext());
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using StructureMap;
using LibGit2Sharp;
using GitTfs.Commands;
using Branch = LibGit2Sharp.Branch;
using GitTfs.Util;
namespace GitTfs.Core
{
public class GitRepository : GitHelpers, IGitRepository
{
private readonly IContainer _container;
private readonly Globals _globals;
private IDictionary<string, IGitTfsRemote> _cachedRemotes;
private readonly Repository _repository;
private readonly RemoteConfigConverter _remoteConfigReader;
public GitRepository(string gitDir, IContainer container, Globals globals, RemoteConfigConverter remoteConfigReader)
: base(container)
{
_container = container;
_globals = globals;
GitDir = gitDir;
_repository = new Repository(GitDir);
_remoteConfigReader = remoteConfigReader;
}
~GitRepository()
{
if (_repository != null)
_repository.Dispose();
}
public GitCommit Commit(LogEntry logEntry)
{
var parents = logEntry.CommitParents.Select(sha => _repository.Lookup<Commit>(sha));
var commit = _repository.ObjectDatabase.CreateCommit(
new Signature(logEntry.AuthorName, logEntry.AuthorEmail, logEntry.Date.ToUniversalTime()),
new Signature(logEntry.CommitterName, logEntry.CommitterEmail, logEntry.Date.ToUniversalTime()),
logEntry.Log,
logEntry.Tree,
parents,
false);
changesetsCache[logEntry.ChangesetId] = commit.Sha;
return new GitCommit(commit);
}
public void UpdateRef(string gitRefName, string shaCommit, string message = null)
{
if (message == null)
_repository.Refs.Add(gitRefName, shaCommit, allowOverwrite: true);
else
_repository.Refs.Add(gitRefName, shaCommit, message, true);
}
public static string ShortToLocalName(string branchName)
{
return "refs/heads/" + branchName;
}
public static string ShortToTfsRemoteName(string branchName)
{
return "refs/remotes/tfs/" + branchName;
}
public string GitDir { get; set; }
public string WorkingCopyPath { get; set; }
public string WorkingCopySubdir { get; set; }
protected override GitProcess Start(string[] command, Action<ProcessStartInfo> initialize)
{
return base.Start(command, initialize.And(SetUpPaths));
}
private void SetUpPaths(ProcessStartInfo gitCommand)
{
if (GitDir != null)
gitCommand.EnvironmentVariables["GIT_DIR"] = GitDir;
if (WorkingCopyPath != null)
gitCommand.WorkingDirectory = WorkingCopyPath;
if (WorkingCopySubdir != null)
gitCommand.WorkingDirectory = Path.Combine(gitCommand.WorkingDirectory, WorkingCopySubdir);
}
public string GetConfig(string key)
{
var entry = _repository.Config.Get<string>(key);
return entry == null ? null : entry.Value;
}
public T GetConfig<T>(string key)
{
return GetConfig(key, default(T));
}
public T GetConfig<T>(string key, T defaultValue)
{
try
{
var entry = _repository.Config.Get<T>(key);
if (entry == null)
return defaultValue;
return entry.Value;
}
catch (Exception)
{
return defaultValue;
}
}
public void SetConfig(string key, string value)
{
_repository.Config.Set<string>(key, value, ConfigurationLevel.Local);
}
public void SetConfig(string key, bool value)
{
SetConfig(key, value.ToString().ToLower());
}
public IEnumerable<IGitTfsRemote> ReadAllTfsRemotes()
{
var remotes = GetTfsRemotes().Values;
foreach (var remote in remotes)
remote.EnsureTfsAuthenticated();
return remotes;
}
public IGitTfsRemote ReadTfsRemote(string remoteId)
{
if (!HasRemote(remoteId))
throw new GitTfsException("Unable to locate git-tfs remote with id = " + remoteId)
.WithRecommendation("Try using `git tfs bootstrap` to auto-init TFS remotes.");
var remote = GetTfsRemotes()[remoteId];
remote.EnsureTfsAuthenticated();
return remote;
}
private IGitTfsRemote ReadTfsRemote(string tfsUrl, string tfsRepositoryPath)
{
var allRemotes = GetTfsRemotes();
var matchingRemotes =
allRemotes.Values.Where(
remote => remote.MatchesUrlAndRepositoryPath(tfsUrl, tfsRepositoryPath));
switch (matchingRemotes.Count())
{
case 0:
return new DerivedGitTfsRemote(tfsUrl, tfsRepositoryPath);
case 1:
var remote = matchingRemotes.First();
return remote;
default:
Trace.WriteLine("More than one remote matched!");
goto case 1;
}
}
public IEnumerable<string> GetGitRemoteBranches(string gitRemote)
{
gitRemote = gitRemote + "/";
var references = _repository.Branches.Where(b => b.IsRemote && b.FriendlyName.StartsWith(gitRemote) && !b.FriendlyName.EndsWith("/HEAD"));
return references.Select(r => r.FriendlyName);
}
private IDictionary<string, IGitTfsRemote> GetTfsRemotes()
{
return _cachedRemotes ?? (_cachedRemotes = ReadTfsRemotes());
}
public IGitTfsRemote CreateTfsRemote(RemoteInfo remote, string autocrlf = null, string ignorecase = null)
{
if (HasRemote(remote.Id))
throw new GitTfsException("A remote with id \"" + remote.Id + "\" already exists.");
// The autocrlf default (as indicated by a null) is false and is set to override the system-wide setting.
// When creating branches we use the empty string to indicate that we do not want to set the value at all.
if (autocrlf == null)
autocrlf = "false";
if (autocrlf != string.Empty)
_repository.Config.Set("core.autocrlf", autocrlf);
if (ignorecase != null)
_repository.Config.Set("core.ignorecase", ignorecase);
foreach (var entry in _remoteConfigReader.Dump(remote))
{
if (entry.Value != null)
{
_repository.Config.Set(entry.Key, entry.Value);
}
else
{
_repository.Config.Unset(entry.Key);
}
}
var gitTfsRemote = BuildRemote(remote);
gitTfsRemote.EnsureTfsAuthenticated();
return _cachedRemotes[remote.Id] = gitTfsRemote;
}
public void DeleteTfsRemote(IGitTfsRemote remote)
{
if (remote == null)
throw new GitTfsException("error: the name of the remote to delete is invalid!");
UnsetTfsRemoteConfig(remote.Id);
_repository.Refs.Remove(remote.RemoteRef);
}
private void UnsetTfsRemoteConfig(string remoteId)
{
foreach (var entry in _remoteConfigReader.Delete(remoteId))
{
_repository.Config.Unset(entry.Key);
}
_cachedRemotes = null;
}
public void MoveRemote(string oldRemoteName, string newRemoteName)
{
if (!Reference.IsValidName(ShortToLocalName(oldRemoteName)))
throw new GitTfsException("error: the name of the remote to move is invalid!");
if (!Reference.IsValidName(ShortToLocalName(newRemoteName)))
throw new GitTfsException("error: the new name of the remote is invalid!");
if (HasRemote(newRemoteName))
throw new GitTfsException(string.Format("error: this remote name \"{0}\" is already used!", newRemoteName));
var oldRemote = ReadTfsRemote(oldRemoteName);
if (oldRemote == null)
throw new GitTfsException(string.Format("error: the remote \"{0}\" doesn't exist!", oldRemoteName));
var remoteInfo = oldRemote.RemoteInfo;
remoteInfo.Id = newRemoteName;
CreateTfsRemote(remoteInfo);
var newRemote = ReadTfsRemote(newRemoteName);
_repository.Refs.Rename(oldRemote.RemoteRef, newRemote.RemoteRef);
UnsetTfsRemoteConfig(oldRemoteName);
}
public Branch RenameBranch(string oldName, string newName)
{
var branch = _repository.Branches[oldName];
if (branch == null)
return null;
return _repository.Branches.Rename(branch, newName);
}
private IDictionary<string, IGitTfsRemote> ReadTfsRemotes()
{
// does this need to ensuretfsauthenticated?
_repository.Config.Set("tfs.touch", "1"); // reload configuration, because `git tfs init` and `git tfs clone` use Process.Start to update the config, so _repository's copy is out of date.
var remotes = _remoteConfigReader.Load(_repository.Config).Select(x => BuildRemote(x)).ToDictionary(x => x.Id);
bool shouldExport = GetConfig(GitTfsConstants.ExportMetadatasConfigKey) == "true";
foreach(var remote in remotes.Values)
{
var metadataExportInitializer = new ExportMetadatasInitializer(_globals);
metadataExportInitializer.InitializeRemote(remote, shouldExport);
}
return remotes;
}
private IGitTfsRemote BuildRemote(RemoteInfo remoteInfo)
{
return _container.With(remoteInfo).With<IGitRepository>(this).GetInstance<IGitTfsRemote>();
}
public bool HasRemote(string remoteId)
{
return GetTfsRemotes().ContainsKey(remoteId);
}
public bool IsInSameTeamProjectAsDefaultRepository(string tfsRepositoryPath)
{
IGitTfsRemote defaultRepository;
if (!GetTfsRemotes().TryGetValue(GitTfsConstants.DefaultRepositoryId, out defaultRepository))
{
return true;
}
var teamProjectPath = defaultRepository.TfsRepositoryPath.ToTfsTeamProjectRepositoryPath();
//add ending '/' because there can be overlapping names ($/myproject/ and $/myproject other/)
return tfsRepositoryPath.StartsWith(teamProjectPath + "/");
}
public bool HasRef(string gitRef)
{
return _repository.Refs[gitRef] != null;
}
public void MoveTfsRefForwardIfNeeded(IGitTfsRemote remote)
{
MoveTfsRefForwardIfNeeded(remote, "HEAD");
}
public void MoveTfsRefForwardIfNeeded(IGitTfsRemote remote, string @ref)
{
int currentMaxChangesetId = remote.MaxChangesetId;
var untrackedTfsChangesets = from cs in GetLastParentTfsCommits(@ref)
where cs.Remote.Id == remote.Id && cs.ChangesetId > currentMaxChangesetId
orderby cs.ChangesetId
select cs;
foreach (var cs in untrackedTfsChangesets)
{
// UpdateTfsHead sets tag with TFS changeset id on each commit so we can't just update to latest
remote.UpdateTfsHead(cs.GitCommit, cs.ChangesetId);
}
}
public GitCommit GetCommit(string commitish)
{
var commit = _repository.Lookup<Commit>(commitish);
return commit is null ? null : new GitCommit(commit);
}
public MergeResult Merge(string commitish)
{
var commit = _repository.Lookup<Commit>(commitish);
if (commit == null)
throw new GitTfsException("error: commit '" + commitish + "' can't be found and merged into!");
return _repository.Merge(commit, _repository.Config.BuildSignature(new DateTimeOffset(DateTime.Now)));
}
public String GetCurrentCommit()
{
return _repository.Head.Commits.First().Sha;
}
public IEnumerable<TfsChangesetInfo> GetLastParentTfsCommits(string head)
{
var changesets = new List<TfsChangesetInfo>();
var commit = _repository.Lookup<Commit>(head);
if (commit == null)
return changesets;
FindTfsParentCommits(changesets, commit);
return changesets;
}
private void FindTfsParentCommits(List<TfsChangesetInfo> changesets, Commit commit)
{
var commitsToFollow = new Stack<Commit>();
commitsToFollow.Push(commit);
var alreadyVisitedCommits = new HashSet<string>();
while (commitsToFollow.Any())
{
commit = commitsToFollow.Pop();
alreadyVisitedCommits.Add(commit.Sha);
var changesetInfo = TryParseChangesetInfo(commit.Message, commit.Sha);
if (changesetInfo == null)
{
// If commit was not a TFS commit, continue searching all new parents of the commit
// Add parents in reverse order to keep topology (main parent should be treated first!)
foreach (var parent in commit.Parents.Where(x => !alreadyVisitedCommits.Contains(x.Sha)).Reverse())
commitsToFollow.Push(parent);
}
else
{
changesets.Add(changesetInfo);
}
}
Trace.WriteLine("Commits visited count:" + alreadyVisitedCommits.Count);
}
public TfsChangesetInfo GetTfsChangesetById(string remoteRef, int changesetId)
{
var commit = FindCommitByChangesetId(changesetId, remoteRef);
if (commit == null)
return null;
return TryParseChangesetInfo(commit.Message, commit.Sha);
}
public TfsChangesetInfo GetCurrentTfsCommit()
{
var currentCommit = _repository.Head.Commits.First();
return TryParseChangesetInfo(currentCommit.Message, currentCommit.Sha);
}
public TfsChangesetInfo GetTfsCommit(GitCommit commit)
{
if (commit is null) throw new ArgumentNullException(nameof(commit));
return TryParseChangesetInfo(commit.Message, commit.Sha);
}
public TfsChangesetInfo GetTfsCommit(string sha)
{
var gitCommit = GetCommit(sha);
return gitCommit is null ? null : GetTfsCommit(gitCommit);
}
private TfsChangesetInfo TryParseChangesetInfo(string gitTfsMetaInfo, string commit)
{
var match = GitTfsConstants.TfsCommitInfoRegex.Match(gitTfsMetaInfo);
if (match.Success)
{
var commitInfo = _container.GetInstance<TfsChangesetInfo>();
commitInfo.Remote = ReadTfsRemote(match.Groups["url"].Value, match.Groups["repository"].Success ? match.Groups["repository"].Value : null);
commitInfo.ChangesetId = Convert.ToInt32(match.Groups["changeset"].Value);
commitInfo.GitCommit = commit;
return commitInfo;
}
return null;
}
public IDictionary<string, GitObject> CreateObjectsDictionary()
{
return new Dictionary<string, GitObject>(StringComparer.InvariantCultureIgnoreCase);
}
public IDictionary<string, GitObject> GetObjects(string commit, IDictionary<string, GitObject> entries)
{
if (commit != null)
{
ParseEntries(entries, _repository.Lookup<Commit>(commit).Tree, commit);
}
return entries;
}
public IDictionary<string, GitObject> GetObjects(string commit)
{
var entries = CreateObjectsDictionary();
return GetObjects(commit, entries);
}
public IGitTreeBuilder GetTreeBuilder(string commit)
{
if (commit == null)
{
return new GitTreeBuilder(_repository.ObjectDatabase);
}
else
{
return new GitTreeBuilder(_repository.ObjectDatabase, _repository.Lookup<Commit>(commit).Tree);
}
}
public string GetCommitMessage(string head, string parentCommitish)
{
var message = new System.Text.StringBuilder();
foreach (Commit comm in
_repository.Commits.QueryBy(new CommitFilter { IncludeReachableFrom = head, ExcludeReachableFrom = parentCommitish }))
{
// Normalize commit message line endings to CR+LF style, so that message
// would be correctly shown in TFS commit dialog.
message.AppendLine(NormalizeLineEndings(comm.Message));
}
return GitTfsConstants.TfsCommitInfoRegex.Replace(message.ToString(), "").Trim(' ', '\r', '\n');
}
private static string NormalizeLineEndings(string input)
{
return string.IsNullOrEmpty(input)
? input
: input.Replace("\r\n", "\n").Replace("\r", "\n").Replace("\n", "\r\n");
}
private void ParseEntries(IDictionary<string, GitObject> entries, Tree treeInfo, string commit)
{
var treesToDescend = new Queue<Tree>(new[] { treeInfo });
while (treesToDescend.Any())
{
var currentTree = treesToDescend.Dequeue();
foreach (var item in currentTree)
{
if (item.TargetType == TreeEntryTargetType.Tree)
{
treesToDescend.Enqueue((Tree)item.Target);
}
var path = item.Path.Replace('\\', '/');
entries[path] = new GitObject
{
Mode = item.Mode,
Sha = item.Target.Sha,
ObjectType = item.TargetType,
Path = path,
Commit = commit
};
}
}
}
public IEnumerable<IGitChangedFile> GetChangedFiles(string from, string to)
{
using (var diffOutput = CommandOutputPipe("diff-tree", "-r", "-M", "-z", from, to))
{
var changes = GitChangeInfo.GetChangedFiles(diffOutput);
foreach (var change in changes)
{
yield return BuildGitChangedFile(change);
}
}
}
private IGitChangedFile BuildGitChangedFile(GitChangeInfo change)
{
return change.ToGitChangedFile(_container.With((IGitRepository)this));
}
public bool WorkingCopyHasUnstagedOrUncommitedChanges
{
get
{
if (IsBare)
return false;
return (from
entry in _repository.RetrieveStatus()
where
entry.State != FileStatus.Ignored &&
entry.State != FileStatus.NewInWorkdir
select entry).Any();
}
}
public void CopyBlob(string sha, string outputFile)
{
Blob blob;
var destination = new FileInfo(outputFile);
if (!destination.Directory.Exists)
destination.Directory.Create();
if ((blob = _repository.Lookup<Blob>(sha)) != null)
using (Stream stream = blob.GetContentStream(new FilteringOptions(string.Empty)))
using (var outstream = File.Create(destination.FullName))
stream.CopyTo(outstream);
}
public string AssertValidBranchName(string gitBranchName)
{
if (!Reference.IsValidName(ShortToLocalName(gitBranchName)))
throw new GitTfsException("The name specified for the new git branch is not allowed. Choose another one!");
return gitBranchName;
}
private bool IsRefNameUsed(string gitBranchName)
{
var parts = gitBranchName.Split('/');
var refName = parts.First();
for (int i = 1; i <= parts.Length; i++)
{
if (HasRef(ShortToLocalName(refName)) || HasRef(ShortToTfsRemoteName(refName)))
return true;
if (i < parts.Length)
refName += '/' + parts[i];
}
return false;
}
public bool CreateBranch(string gitBranchName, string target)
{
Reference reference;
try
{
reference = _repository.Refs.Add(gitBranchName, target);
}
catch (Exception)
{
return false;
}
return reference != null;
}
private readonly Dictionary<int, string> changesetsCache = new Dictionary<int, string>();
private bool cacheIsFull = false;
public string FindCommitHashByChangesetId(int changesetId)
{
var commit = FindCommitByChangesetId(changesetId);
if (commit == null)
return null;
return commit.Sha;
}
private static readonly Regex tfsIdRegex = new Regex("^git-tfs-id: .*;C([0-9]+)\r?$", RegexOptions.Multiline | RegexOptions.Compiled | RegexOptions.RightToLeft);
public static bool TryParseChangesetId(string commitMessage, out int changesetId)
{
var match = tfsIdRegex.Match(commitMessage);
if (match.Success)
{
changesetId = int.Parse(match.Groups[1].Value);
return true;
}
changesetId = 0;
return false;
}
private Commit FindCommitByChangesetId(int changesetId, string remoteRef = null)
{
Trace.WriteLine("Looking for changeset " + changesetId + " in git repository...");
if (remoteRef == null)
{
string sha;
if (changesetsCache.TryGetValue(changesetId, out sha))
{
Trace.WriteLine("Changeset " + changesetId + " found at " + sha);
return _repository.Lookup<Commit>(sha);
}
if (cacheIsFull)
{
Trace.WriteLine("Looking for changeset " + changesetId + " in git repository: CacheIsFull, stopped looking.");
return null;
}
}
var reachableFromRemoteBranches = new CommitFilter
{
IncludeReachableFrom = _repository.Branches.Where(p => p.IsRemote),
SortBy = CommitSortStrategies.Time
};
if (remoteRef != null)
{
var query = _repository.Branches.Where(p => p.IsRemote && p.CanonicalName.EndsWith(remoteRef));
Trace.WriteLine("Looking for changeset " + changesetId + " in git repository: Adding remotes:");
foreach (var reachable in query)
{
Trace.WriteLine(reachable.CanonicalName + "reachable from " + remoteRef);
}
reachableFromRemoteBranches.IncludeReachableFrom = query;
}
var commitsFromRemoteBranches = _repository.Commits.QueryBy(reachableFromRemoteBranches);
Commit commit = null;
foreach (var c in commitsFromRemoteBranches)
{
int id;
if (TryParseChangesetId(c.Message, out id))
{
changesetsCache[id] = c.Sha;
if (id == changesetId)
{
commit = c;
break;
}
}
else
{
foreach (var note in c.Notes)
{
if (TryParseChangesetId(note.Message, out id))
{
changesetsCache[id] = c.Sha;
if (id == changesetId)
{
commit = c;
break;
}
}
}
}
}
if (remoteRef == null && commit == null)
cacheIsFull = true; // repository fully scanned
Trace.WriteLine((commit == null) ? " => Commit " + changesetId + " not found!" : " => Commit " + changesetId + " found! hash: " + commit.Sha);
return commit;
}
public void CreateTag(string name, string sha, string comment, string Owner, string emailOwner, DateTime creationDate)
{
if (_repository.Tags[name] == null)
_repository.ApplyTag(name, sha, new Signature(Owner, emailOwner, new DateTimeOffset(creationDate)), comment);
}
public void CreateNote(string sha, string content, string owner, string emailOwner, DateTime creationDate)
{
Signature author = new Signature(owner, emailOwner, creationDate);
_repository.Notes.Add(new ObjectId(sha), content, author, author, "commits");
}
public void ResetHard(string sha)
{
_repository.Reset(ResetMode.Hard, sha);
}
public bool IsBare { get { return _repository.Info.IsBare; } }
/// <summary>
/// Gets all configured "subtree" remotes which point to the same Tfs URL as the given remote.
/// If the given remote is itself a subtree, an empty enumerable is returned.
/// </summary>
public IEnumerable<IGitTfsRemote> GetSubtrees(IGitTfsRemote owner)
{
//a subtree remote cannot have subtrees itself.
if (owner.IsSubtree)
return Enumerable.Empty<IGitTfsRemote>();
return ReadAllTfsRemotes().Where(x => x.IsSubtree && string.Equals(x.OwningRemoteId, owner.Id, StringComparison.InvariantCultureIgnoreCase));
}
public void ResetRemote(IGitTfsRemote remoteToReset, string target)
{
_repository.Refs.UpdateTarget(remoteToReset.RemoteRef, target);
}
public string GetCurrentBranch()
{
return _repository.Head.CanonicalName;
}
public void GarbageCollect(bool auto, string additionalMessage)
{
if (Globals.DisableGarbageCollect)
return;
try
{
if (auto)
_globals.Repository.CommandNoisy("gc", "--auto");
else
_globals.Repository.CommandNoisy("gc");
}
catch (Exception e)
{
Trace.WriteLine(e);
Trace.TraceWarning("Warning: `git gc` failed! " + additionalMessage);
}
}
public bool Checkout(string commitish)
{
try
{
LibGit2Sharp.Commands.Checkout(_repository, commitish);
return true;
}
catch (CheckoutConflictException)
{
return false;
}
}
public IEnumerable<GitCommit> FindParentCommits(string @from, string to)
{
var commits = _repository.Commits.QueryBy(
new CommitFilter() { IncludeReachableFrom = @from, ExcludeReachableFrom = to, SortBy = CommitSortStrategies.Reverse, FirstParentOnly = true })
.Select(c => new GitCommit(c));
var parent = to;
foreach (var gitCommit in commits)
{
if (gitCommit.Parents.All(c => c.Sha != parent))
return new List<GitCommit>();
parent = gitCommit.Sha;
}
return commits;
}
public bool IsPathIgnored(string relativePath)
{
return _repository.Ignore.IsPathIgnored(relativePath);
}
public string CommitGitIgnore(string pathToGitIgnoreFile)
{
if (!File.Exists(pathToGitIgnoreFile))
{
Trace.TraceWarning("warning: the .gitignore file specified '{0}' does not exist!", pathToGitIgnoreFile);
}
var gitTreeBuilder = new GitTreeBuilder(_repository.ObjectDatabase);
gitTreeBuilder.Add(".gitignore", pathToGitIgnoreFile, LibGit2Sharp.Mode.NonExecutableFile);
var tree = gitTreeBuilder.GetTree();
var signature = new Signature("git-tfs", "git-tfs@noreply.com", new DateTimeOffset(2000, 1, 1, 0, 0, 0, new TimeSpan(0)));
var sha = _repository.ObjectDatabase.CreateCommit(signature, signature, ".gitignore", tree, new Commit[0], false).Sha;
Trace.WriteLine(".gitignore commit created: " + sha);
_repository.Refs.Add(ShortToTfsRemoteName("default"), new ObjectId(sha));
_repository.Refs.Add(ShortToLocalName("master"), new ObjectId(sha));
return sha;
}
public void UseGitIgnore(string pathToGitIgnoreFile)
{
//Should add ourself the rules to the temporary rules because committing directly to the git database
//prevent libgit2sharp to detect the new .gitignore file
_repository.Ignore.AddTemporaryRules(File.ReadLines(pathToGitIgnoreFile));
}
public IDictionary<int, string> GetCommitChangeSetPairs()
{
var allCommits = _repository.Commits.QueryBy(new CommitFilter());
var pairs = new Dictionary<int, string>() ;
foreach (var c in allCommits)
{
int changesetId;
if (TryParseChangesetId(c.Message, out changesetId))
{
pairs.Add(changesetId, c.Sha);
}
else
{
foreach (var note in c.Notes)
{
if (TryParseChangesetId(note.Message, out changesetId))
{
pairs.Add(changesetId, c.Sha);
}
}
}
}
return pairs;
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="FrameApi.cs" company="Google">
//
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// </copyright>
//-----------------------------------------------------------------------
namespace GoogleARCoreInternal
{
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using GoogleARCore;
using UnityEngine;
#if UNITY_IOS && !UNITY_EDITOR
using AndroidImport = GoogleARCoreInternal.DllImportNoop;
using IOSImport = System.Runtime.InteropServices.DllImportAttribute;
#else
using AndroidImport = System.Runtime.InteropServices.DllImportAttribute;
using IOSImport = GoogleARCoreInternal.DllImportNoop;
#endif
internal class FrameApi
{
private NativeSession m_NativeSession;
// Throttle warnings to at most once every N seconds.
private ThrottledLogMessage m_FailedToAcquireWarning = new ThrottledLogMessage(5f);
public FrameApi(NativeSession nativeSession)
{
m_NativeSession = nativeSession;
}
public void Release(IntPtr frameHandle)
{
ExternApi.ArFrame_release(frameHandle);
}
public long GetTimestamp()
{
long timestamp = 0;
ExternApi.ArFrame_getTimestamp(m_NativeSession.SessionHandle, m_NativeSession.FrameHandle,
ref timestamp);
return timestamp;
}
public IntPtr AcquireCamera()
{
IntPtr cameraHandle = IntPtr.Zero;
ExternApi.ArFrame_acquireCamera(m_NativeSession.SessionHandle, m_NativeSession.FrameHandle,
ref cameraHandle);
return cameraHandle;
}
public CameraImageBytes AcquireCameraImageBytes()
{
IntPtr cameraImageHandle = IntPtr.Zero;
ApiArStatus status = ExternApi.ArFrame_acquireCameraImage(m_NativeSession.SessionHandle,
m_NativeSession.FrameHandle, ref cameraImageHandle);
if (status != ApiArStatus.Success)
{
m_FailedToAcquireWarning.ThrottledLogWarningFormat(
"Failed to acquire camera image with status {0}.\n" +
"Will continue to retry.", status);
return new CameraImageBytes(IntPtr.Zero);
}
m_NativeSession.MarkHandleAcquired(cameraImageHandle);
return new CameraImageBytes(cameraImageHandle);
}
public bool TryAcquirePointCloudHandle(out IntPtr pointCloudHandle)
{
pointCloudHandle = IntPtr.Zero;
ApiArStatus status = ExternApi.ArFrame_acquirePointCloud(m_NativeSession.SessionHandle,
m_NativeSession.FrameHandle, ref pointCloudHandle);
if (status != ApiArStatus.Success)
{
Debug.LogWarningFormat("Failed to acquire point cloud with status {0}", status);
return false;
}
return true;
}
public bool AcquireImageMetadata(ref IntPtr imageMetadataHandle)
{
var status = ExternApi.ArFrame_acquireImageMetadata(m_NativeSession.SessionHandle,
m_NativeSession.FrameHandle, ref imageMetadataHandle);
if (status != ApiArStatus.Success)
{
Debug.LogErrorFormat("Failed to aquire camera image metadata with status {0}", status);
return false;
}
return true;
}
public LightEstimate GetLightEstimate()
{
IntPtr lightEstimateHandle = m_NativeSession.LightEstimateApi.Create();
ExternApi.ArFrame_getLightEstimate(m_NativeSession.SessionHandle, m_NativeSession.FrameHandle,
lightEstimateHandle);
LightEstimateState state = m_NativeSession.LightEstimateApi.GetState(lightEstimateHandle);
Color colorCorrection = m_NativeSession.LightEstimateApi.GetColorCorrection(lightEstimateHandle);
m_NativeSession.LightEstimateApi.Destroy(lightEstimateHandle);
return new LightEstimate(state, colorCorrection.a,
new Color(colorCorrection.r, colorCorrection.g, colorCorrection.b, 1f));
}
public void TransformDisplayUvCoords(ref ApiDisplayUvCoords uv)
{
ApiDisplayUvCoords uvOut = new ApiDisplayUvCoords();
ExternApi.ArFrame_transformDisplayUvCoords(m_NativeSession.SessionHandle, m_NativeSession.FrameHandle,
ApiDisplayUvCoords.NumFloats, ref uv, ref uvOut);
uv = uvOut;
}
public void GetUpdatedTrackables(List<Trackable> trackables)
{
IntPtr listHandle = m_NativeSession.TrackableListApi.Create();
ExternApi.ArFrame_getUpdatedTrackables(m_NativeSession.SessionHandle, m_NativeSession.FrameHandle,
ApiTrackableType.BaseTrackable, listHandle);
trackables.Clear();
int count = m_NativeSession.TrackableListApi.GetCount(listHandle);
for (int i = 0; i < count; i++)
{
IntPtr trackableHandle = m_NativeSession.TrackableListApi.AcquireItem(listHandle, i);
// TODO:: Remove conditional when b/75291352 is fixed.
ApiTrackableType trackableType = m_NativeSession.TrackableApi.GetType(trackableHandle);
if ((int)trackableType == 0x41520105)
{
m_NativeSession.TrackableApi.Release(trackableHandle);
continue;
}
Trackable trackable = m_NativeSession.TrackableFactory(trackableHandle);
if (trackable != null)
{
trackables.Add(trackable);
}
else
{
m_NativeSession.TrackableApi.Release(trackableHandle);
}
}
m_NativeSession.TrackableListApi.Destroy(listHandle);
}
private struct ExternApi
{
[DllImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_release(IntPtr frame);
[DllImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_getTimestamp(IntPtr sessionHandle,
IntPtr frame, ref long timestamp);
#pragma warning disable 626
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_acquireCamera(IntPtr sessionHandle, IntPtr frameHandle,
ref IntPtr cameraHandle);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern ApiArStatus ArFrame_acquireCameraImage(IntPtr sessionHandle, IntPtr frameHandle,
ref IntPtr imageHandle);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern ApiArStatus ArFrame_acquirePointCloud(IntPtr sessionHandle, IntPtr frameHandle,
ref IntPtr pointCloudHandle);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_transformDisplayUvCoords(IntPtr session, IntPtr frame,
int numElements, ref ApiDisplayUvCoords uvsIn, ref ApiDisplayUvCoords uvsOut);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_getUpdatedTrackables(IntPtr sessionHandle, IntPtr frameHandle,
ApiTrackableType filterType, IntPtr outTrackableList);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArFrame_getLightEstimate(IntPtr sessionHandle, IntPtr frameHandle,
IntPtr lightEstimateHandle);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern ApiArStatus ArFrame_acquireImageMetadata(IntPtr sessionHandle, IntPtr frameHandle,
ref IntPtr outMetadata);
#pragma warning restore 626
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Cache.Store
{
using System.Collections;
using System.Diagnostics;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache.Store;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Impl.Binary;
using Apache.Ignite.Core.Impl.Binary.IO;
using Apache.Ignite.Core.Impl.Handle;
using Apache.Ignite.Core.Impl.Resource;
using Apache.Ignite.Core.Impl.Unmanaged;
/// <summary>
/// Interop cache store.
/// </summary>
internal class CacheStore
{
/** */
private const byte OpLoadCache = 0;
/** */
private const byte OpLoad = 1;
/** */
private const byte OpLoadAll = 2;
/** */
private const byte OpPut = 3;
/** */
private const byte OpPutAll = 4;
/** */
private const byte OpRmv = 5;
/** */
private const byte OpRmvAll = 6;
/** */
private const byte OpSesEnd = 7;
/** */
private readonly bool _convertBinary;
/** Store. */
private readonly ICacheStore _store;
/** Session. */
private readonly CacheStoreSessionProxy _sesProxy;
/** */
private readonly long _handle;
/// <summary>
/// Initializes a new instance of the <see cref="CacheStore" /> class.
/// </summary>
/// <param name="store">Store.</param>
/// <param name="convertBinary">Whether to convert binary objects.</param>
/// <param name="registry">The handle registry.</param>
private CacheStore(ICacheStore store, bool convertBinary, HandleRegistry registry)
{
Debug.Assert(store != null);
_store = store;
_convertBinary = convertBinary;
_sesProxy = new CacheStoreSessionProxy();
ResourceProcessor.InjectStoreSession(store, _sesProxy);
_handle = registry.AllocateCritical(this);
}
/// <summary>
/// Creates interop cache store from a stream.
/// </summary>
/// <param name="memPtr">Memory pointer.</param>
/// <param name="registry">The handle registry.</param>
/// <returns>
/// Interop cache store.
/// </returns>
internal static CacheStore CreateInstance(long memPtr, HandleRegistry registry)
{
using (var stream = IgniteManager.Memory.Get(memPtr).GetStream())
{
var reader = BinaryUtils.Marshaller.StartUnmarshal(stream);
var convertBinary = reader.ReadBoolean();
var factory = reader.ReadObject<IFactory<ICacheStore>>();
ICacheStore store;
if (factory != null)
store = factory.CreateInstance();
else
{
var className = reader.ReadString();
var propertyMap = reader.ReadDictionaryAsGeneric<string, object>();
store = IgniteUtils.CreateInstance<ICacheStore>(className);
IgniteUtils.SetProperties(store, propertyMap);
}
return new CacheStore(store, convertBinary, registry);
}
}
/// <summary>
/// Gets the handle.
/// </summary>
public long Handle
{
get { return _handle; }
}
/// <summary>
/// Initializes this instance with a grid.
/// </summary>
/// <param name="grid">Grid.</param>
public void Init(Ignite grid)
{
ResourceProcessor.Inject(_store, grid);
}
/// <summary>
/// Invokes a store operation.
/// </summary>
/// <param name="input">Input stream.</param>
/// <param name="cb">Callback.</param>
/// <param name="grid">Grid.</param>
/// <returns>Invocation result.</returns>
/// <exception cref="IgniteException">Invalid operation type: + opType</exception>
public int Invoke(IBinaryStream input, IUnmanagedTarget cb, Ignite grid)
{
IBinaryReader reader = grid.Marshaller.StartUnmarshal(input,
_convertBinary ? BinaryMode.Deserialize : BinaryMode.ForceBinary);
IBinaryRawReader rawReader = reader.GetRawReader();
int opType = rawReader.ReadByte();
// Setup cache sessoin for this invocation.
long sesId = rawReader.ReadLong();
CacheStoreSession ses = grid.HandleRegistry.Get<CacheStoreSession>(sesId, true);
ses.CacheName = rawReader.ReadString();
_sesProxy.SetSession(ses);
try
{
// Perform operation.
switch (opType)
{
case OpLoadCache:
_store.LoadCache((k, v) => WriteObjects(cb, grid, k, v), rawReader.ReadArray<object>());
break;
case OpLoad:
object val = _store.Load(rawReader.ReadObject<object>());
if (val != null)
WriteObjects(cb, grid, val);
break;
case OpLoadAll:
var keys = rawReader.ReadCollection();
var result = _store.LoadAll(keys);
foreach (DictionaryEntry entry in result)
WriteObjects(cb, grid, entry.Key, entry.Value);
break;
case OpPut:
_store.Write(rawReader.ReadObject<object>(), rawReader.ReadObject<object>());
break;
case OpPutAll:
var size = rawReader.ReadInt();
var dict = new Hashtable(size);
for (int i = 0; i < size; i++)
dict[rawReader.ReadObject<object>()] = rawReader.ReadObject<object>();
_store.WriteAll(dict);
break;
case OpRmv:
_store.Delete(rawReader.ReadObject<object>());
break;
case OpRmvAll:
_store.DeleteAll(rawReader.ReadCollection());
break;
case OpSesEnd:
grid.HandleRegistry.Release(sesId);
_store.SessionEnd(rawReader.ReadBoolean());
break;
default:
throw new IgniteException("Invalid operation type: " + opType);
}
return 0;
}
finally
{
_sesProxy.ClearSession();
}
}
/// <summary>
/// Writes objects to the marshaller.
/// </summary>
/// <param name="cb">Optional callback.</param>
/// <param name="grid">Grid.</param>
/// <param name="objects">Objects.</param>
private static void WriteObjects(IUnmanagedTarget cb, Ignite grid, params object[] objects)
{
using (var stream = IgniteManager.Memory.Allocate().GetStream())
{
BinaryWriter writer = grid.Marshaller.StartMarshal(stream);
try
{
foreach (var obj in objects)
{
var obj0 = obj;
writer.WithDetach(w => w.WriteObject(obj0));
}
}
finally
{
grid.Marshaller.FinishMarshal(writer);
}
if (cb != null)
{
stream.SynchronizeOutput();
UnmanagedUtils.CacheStoreCallbackInvoke(cb, stream.MemoryPointer);
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using com.calitha.goldparser;
using Epi.Data;
using Epi.Web;
using Epi.Analysis.Statistics;
namespace Epi.Core.AnalysisInterpreter.Rules
{
public class Rule_Graph : AnalysisRule
{
bool HasRun = false;
Dictionary<string, string> inputVariableList = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
string _commandText = string.Empty;
string _strataVar = string.Empty;
string _independentVariableList = string.Empty;
List<string> _optionList = new List<string>();
string _graphType = string.Empty;
string _graphCrossTab = string.Empty;
string _graphTitle = string.Empty;
string _graphXAxisLabel = string.Empty;
string _graphYAxisLabel = string.Empty;
string _weightVar = string.Empty;
string _aggregateFunction = string.Empty;
string _graphDateFormat = string.Empty;
string _graphInterval = string.Empty;
string _graphIntervalUnits = string.Empty;
string _graphStartFrom = string.Empty;
EpiInfo.Plugin.IAnalysisStatistic GraphStatistic;
public Rule_Graph(Rule_Context pContext, NonterminalToken pToken) : base(pContext)
{
_commandText = this.ExtractTokens(pToken.Tokens);
string caseSymbol = string.Empty;
foreach (Token token in pToken.Tokens)
{
if (token is NonterminalToken)
{
NonterminalToken nonterminalToken = (NonterminalToken)token;
caseSymbol = nonterminalToken.Symbol.ToString();
switch (caseSymbol)
{
case "<Graph_Type>":
_graphType = nonterminalToken.Tokens[0].ToString();
break;
case "<Graph_Interval>":
string intervalText = (((NonterminalToken)nonterminalToken).Tokens[2]).ToString();
intervalText = intervalText.Trim(new char[] { '"' });
string[] intervalArray = intervalText.Split(' ');
_graphInterval = intervalArray[0];
_graphIntervalUnits = intervalArray[1];
break;
case "<Graph_StartFrom>":
_graphStartFrom = this.GetCommandElement(nonterminalToken.Tokens, 2);
break;
case "<Graph_Variable>":
_independentVariableList = nonterminalToken.Tokens[0].ToString().Trim(new char[] {'[',']'});
break;
case "<Graph_Variable_List>":
this.SetIdentifierList(nonterminalToken);
break;
case "<Graph_CrossTab>":
_graphCrossTab = nonterminalToken.Tokens[0].ToString().Trim(new char[] { '[', ']' });
break;
case "<Graph_Option_List>":
this.SetOptionList(nonterminalToken);
break;
case "<Graph_Title>":
_graphTitle = this.GetCommandElement(nonterminalToken.Tokens, 2);
break;
case "<Graph_XAxisLabel>":
_graphXAxisLabel = this.GetCommandElement(nonterminalToken.Tokens, 2);
break;
case "<Graph_YAxisLabel>":
_graphYAxisLabel = this.GetCommandElement(nonterminalToken.Tokens, 2);
break;
case "<Graph_StrataVar>":
_strataVar = (((NonterminalToken)nonterminalToken).Tokens[2]).ToString();
break;
case "<Graph_DateFormat>":
_graphDateFormat = (((NonterminalToken)nonterminalToken).Tokens[2]).ToString();
break;
case "<Graph_WeightVar>":
object tokenObject = (((NonterminalToken)nonterminalToken).Tokens[2]);
if (tokenObject is TerminalToken)
{
_weightVar = ((TerminalToken)tokenObject).Text.Trim(new char[] { '[', ']' });
}
else
{
_aggregateFunction = this.GetCommandElement(((NonterminalToken)tokenObject).Tokens, 0);
Token tok = null;
if (((NonterminalToken)tokenObject).Tokens.Length == 1)
{
tok = ((NonterminalToken)tokenObject).Tokens[0];
if (_aggregateFunction.ToUpperInvariant() == "PERCENT()")
{
_aggregateFunction = _aggregateFunction.TrimEnd(new char[] { '(', ')' });
_weightVar = _independentVariableList.Trim(new char[] { ',' });
break;
}
}
else
{
tok = ((NonterminalToken)tokenObject).Tokens[2];
tok = ((NonterminalToken)(tok)).Tokens[0];
}
_weightVar = ((TerminalToken)(tok)).Text;
_weightVar = _weightVar.Trim(new char[] { '[', ']' });
}
break;
}
}
else
{
TerminalToken terminalToken = (TerminalToken)token;
switch (terminalToken.Symbol.ToString())
{
case "Identifier":
_independentVariableList = terminalToken.ToString().Trim(new char[] { '[', ']' });
break;
}
}
}
}
private void SetIdentifierList(NonterminalToken nonTerm)
{
_independentVariableList = _independentVariableList + "," + this.GetCommandElement(nonTerm.Tokens, 0).ToUpperInvariant().Trim(new char[] { '[', ']' });
if (nonTerm.Tokens.Length > 1)
{
this.SetIdentifierList((NonterminalToken)nonTerm.Tokens[1]);
}
}
private void SetOptionList(NonterminalToken nonTerm)
{
this._optionList.Add(this.GetCommandElement(nonTerm.Tokens, 0));
if (nonTerm.Tokens.Length > 0)
{
foreach (Token optionToken in nonTerm.Tokens)
{
if (optionToken is NonterminalToken)
{
NonterminalToken nonterminalOptionToken = (NonterminalToken) optionToken;
string symbol = nonterminalOptionToken.Symbol.ToString();
switch (symbol)
{
case "<Graph_Option>":
break;
case "<Graph_Option_List>":
this.SetOptionList(nonterminalOptionToken);
break;
case "<Graph_Title>":
_graphTitle = this.GetCommandElement(nonterminalOptionToken.Tokens, 2);
break;
case "<Graph_XAxisLabel>":
_graphXAxisLabel = this.GetCommandElement(nonterminalOptionToken.Tokens, 2);
break;
case "<Graph_YAxisLabel>":
_graphYAxisLabel = this.GetCommandElement(nonterminalOptionToken.Tokens, 2);
break;
case "<Graph_StrataVar>":
_strataVar = (((NonterminalToken)nonterminalOptionToken).Tokens[2]).ToString();
break;
case "<Graph_DateFormat>":
_graphDateFormat = (((NonterminalToken)nonterminalOptionToken).Tokens[2]).ToString();
break;
case "<Graph_WeightVar>":
object tokenObject = (((NonterminalToken)nonterminalOptionToken).Tokens[2]);
if (tokenObject is TerminalToken)
{
_weightVar = ((TerminalToken)tokenObject).Text.Trim(new char[] { '[', ']' });
}
else
{
if (((NonterminalToken)tokenObject).Tokens.Length >= 3)
{
Token ntt = (NonterminalToken)((NonterminalToken)tokenObject).Tokens[2];
string weightTok = ((com.calitha.goldparser.TerminalToken)(((com.calitha.goldparser.NonterminalToken)(ntt)).Tokens[0])).Text;
_weightVar = weightTok.Trim(new char[] { '[', ']' });
_aggregateFunction = this.GetCommandElement(((NonterminalToken)tokenObject).Tokens, 0);
}
else
{
_weightVar = this.GetCommandElement(((NonterminalToken)tokenObject).Tokens, 0);
}
}
break;
case "<Graph_Interval>":
string intervalText = (((NonterminalToken)nonterminalOptionToken).Tokens[2]).ToString();
intervalText = intervalText.Trim(new char[] { '"' });
string[] intervalArray = intervalText.Split(' ');
_graphInterval = intervalArray[0];
_graphIntervalUnits = intervalArray[1];
break;
case "<Graph_StartFrom>":
_graphStartFrom = (((NonterminalToken)nonterminalOptionToken).Tokens[2]).ToString();
_graphStartFrom = _graphStartFrom.Trim(new char[] { '"' });
break;
}
}
}
}
}
public override object Execute()
{
object result = null;
if (!this.HasRun)
{
Dictionary<string, string> setProperties = this.Context.GetGlobalSettingProperties();
if (!string.IsNullOrEmpty(_commandText)) inputVariableList.Add("Command_Text", _commandText);
if (!string.IsNullOrEmpty(_independentVariableList)) inputVariableList.Add("Independent_Variable_List", _independentVariableList.Trim(','));
if (!string.IsNullOrEmpty(_graphCrossTab)) inputVariableList.Add("Cross_Tabulation_Variable", _graphCrossTab);
if (!string.IsNullOrEmpty(_strataVar)) inputVariableList.Add("StrataVar", _strataVar);
if (!string.IsNullOrEmpty(_graphType)) inputVariableList.Add("Graph_Type", _graphType);
if (!string.IsNullOrEmpty(_graphTitle)) inputVariableList.Add("Graph_Title", _graphTitle);
if (!string.IsNullOrEmpty(_graphXAxisLabel)) inputVariableList.Add("Graph_Independent_Value_Axis_Label", _graphXAxisLabel);
if (!string.IsNullOrEmpty(_graphYAxisLabel)) inputVariableList.Add("Graph_Dependent_Value_Axis_Label", _graphYAxisLabel);
if (!string.IsNullOrEmpty(_weightVar)) inputVariableList.Add("Weight_Variable", _weightVar);
if (!string.IsNullOrEmpty(_aggregateFunction)) inputVariableList.Add("Aggregate_Function", _aggregateFunction);
if (!string.IsNullOrEmpty(_graphDateFormat)) inputVariableList.Add("Date_Format", _graphDateFormat);
if (!string.IsNullOrEmpty(_graphInterval)) inputVariableList.Add("Interval", _graphInterval);
if (!string.IsNullOrEmpty(_graphIntervalUnits)) inputVariableList.Add("Interval_Units", _graphIntervalUnits);
if (!string.IsNullOrEmpty(_graphStartFrom)) inputVariableList.Add("Start_From", _graphStartFrom);
EpiInfo.Plugin.IDataSource DataSource = this.Context.GetDefaultIDataSource();
AnalysisStatisticExecuteHost statisticHost = new AnalysisStatisticExecuteHost(this.Context, setProperties, DataSource, inputVariableList, this.Context.CurrentSelect.ToString(), this.Context.AnalysisInterpreterHost);
this.GraphStatistic = this.Context.GetStatistic("Graph", statisticHost);
this.GraphStatistic.Execute();
this.GraphStatistic = null;
this.HasRun = true;
}
return result;
}
private System.Data.DataTable BuildTempTableFromContext()
{
System.Data.DataTable tempTable = new DataTable();
foreach (DataColumn column in this.Context.DataSet.Tables["output"].Columns)
{
DataColumn newColumn = new DataColumn(column.ColumnName);
newColumn.DataType = column.DataType;
tempTable.Columns.Add(newColumn);
}
foreach (DataRow row in this.Context.GetOutput())
{
tempTable.ImportRow(row);
}
return tempTable;
}
private static void AggregateCount(List<DataRow> rows, string seriesName, Dictionary<object, double> indDep_PreSORT_Values)
{
foreach (DataRow row in rows)
{
object item = row[seriesName];
if (item != System.DBNull.Value)
{
if (indDep_PreSORT_Values.ContainsKey(item))
{
indDep_PreSORT_Values[item]++;
}
else
{
indDep_PreSORT_Values.Add(item, 1);
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Text.RegularExpressions;
using System.Globalization;
using Xunit;
public class RegexLangElementsCoverageTests
{
// This class mainly exists to hit language elements that were missed in other test cases.
[Fact]
public static void RegexLangElementsCoverage()
{
//////////// Global Variables used for all tests
String strLoc = "Loc_000oo";
int iCountErrors = 0;
int iCountTestcases = 0;
try
{
///////////////////////// START TESTS ////////////////////////////
///////////////////////////////////////////////////////////////////
//AppDomain.CurrentDomain.AssemblyLoad += AssemblyLoadEventHandler;
for (int i = 0; i < s_regexTests.Length; i++)
{
try
{
iCountTestcases++;
if (!s_regexTests[i].Run())
{
Console.WriteLine("Err_79872asnko! Test {0} FAILED Pattern={1}, Input={2}\n", i, s_regexTests[i].Pattern, s_regexTests[i].Input);
iCountErrors++;
}
}
catch (Exception e)
{
Console.WriteLine("Err_79872asnko! Test {0} FAILED Pattern={1}, Input={2} threw the following exception:", i, s_regexTests[i].Pattern, s_regexTests[i].Input);
Console.WriteLine(e);
iCountErrors++;
}
}
///////////////////////////////////////////////////////////////////
/////////////////////////// END TESTS /////////////////////////////
}
catch (Exception exc_general)
{
++iCountErrors;
Console.WriteLine("Error Err_8888yyy! strLoc==" + strLoc + ", exc_general==" + exc_general.ToString());
}
//// Finish Diagnostics
Assert.Equal(0, iCountErrors);
}
//private const int GERMAN_PHONEBOOK = 0x10407;
//private const int ENGLISH_US = 0x0409;
//private const int INVARIANT = 0x007F;
//private const int CZECH = 0x0405;
//private const int DANISH = 0x0406;
//private const int TURKISH = 0x041F;
//private const int LATIN_AZERI = 0x042C;
private static CultureInfo _GERMAN_PHONEBOOK = new CultureInfo("de-DE");
private static CultureInfo _ENGLISH_US = new CultureInfo("en-US");
private static CultureInfo _INVARIANT = new CultureInfo("");
private static CultureInfo _CZECH = new CultureInfo("cs-CZ");
private static CultureInfo _DANISH = new CultureInfo("da-DK");
private static CultureInfo _TURKISH = new CultureInfo("tr-TR");
private static CultureInfo _LATIN_AZERI = new CultureInfo("az-Latn-AZ");
private static RegexTestCase[] s_regexTests = new RegexTestCase[] {
/*********************************************************
Unicode Char Classes
*********************************************************/
new RegexTestCase(@"(\p{Lu}\w*)\s(\p{Lu}\w*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"(\p{Lu}\p{Ll}*)\s(\p{Lu}\p{Ll}*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"(\P{Ll}\p{Ll}*)\s(\P{Ll}\p{Ll}*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"(\P{Lu}+\p{Lu})\s(\P{Lu}+\p{Lu})", "hellO worlD", new string[] {"hellO worlD", "hellO", "worlD"}),
new RegexTestCase(@"(\p{Lt}\w*)\s(\p{Lt}*\w*)", "\u01C5ello \u01C5orld", new string[] {"\u01C5ello \u01C5orld", "\u01C5ello", "\u01C5orld"}),
new RegexTestCase(@"(\P{Lt}\w*)\s(\P{Lt}*\w*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
/*********************************************************
Character ranges IgnoreCase
*********************************************************/
new RegexTestCase(@"[@-D]+", RegexOptions.IgnoreCase, "eE?@ABCDabcdeE", new string[] {"@ABCDabcd"}),
new RegexTestCase(@"[>-D]+", RegexOptions.IgnoreCase, "eE=>?@ABCDabcdeE", new string[] {">?@ABCDabcd"}),
new RegexTestCase(@"[\u0554-\u0557]+", RegexOptions.IgnoreCase, "\u0583\u0553\u0554\u0555\u0556\u0584\u0585\u0586\u0557\u0558", new string[] {"\u0554\u0555\u0556\u0584\u0585\u0586\u0557"}),
new RegexTestCase(@"[X-\]]+", RegexOptions.IgnoreCase, "wWXYZxyz[\\]^", new string[] {"XYZxyz[\\]"}),
new RegexTestCase(@"[X-\u0533]+", RegexOptions.IgnoreCase, "\u0551\u0554\u0560AXYZaxyz\u0531\u0532\u0533\u0561\u0562\u0563\u0564", new string[] {"AXYZaxyz\u0531\u0532\u0533\u0561\u0562\u0563"}),
new RegexTestCase(@"[X-a]+", RegexOptions.IgnoreCase, "wWAXYZaxyz", new string[] {"AXYZaxyz"}),
new RegexTestCase(@"[X-c]+", RegexOptions.IgnoreCase, "wWABCXYZabcxyz", new string[] {"ABCXYZabcxyz"}),
new RegexTestCase(@"[X-\u00C0]+", RegexOptions.IgnoreCase, "\u00C1\u00E1\u00C0\u00E0wWABCXYZabcxyz", new string[] {"\u00C0\u00E0wWABCXYZabcxyz"}),
new RegexTestCase(@"[\u0100\u0102\u0104]+", RegexOptions.IgnoreCase, "\u00FF \u0100\u0102\u0104\u0101\u0103\u0105\u0106", new string[] {"\u0100\u0102\u0104\u0101\u0103\u0105"}),
new RegexTestCase(@"[B-D\u0130]+", RegexOptions.IgnoreCase, "aAeE\u0129\u0131\u0068 BCDbcD\u0130\u0069\u0070", new string[] {"BCDbcD\u0130\u0069"}),
new RegexTestCase(@"[\u013B\u013D\u013F]+", RegexOptions.IgnoreCase, "\u013A\u013B\u013D\u013F\u013C\u013E\u0140\u0141", new string[] {"\u013B\u013D\u013F\u013C\u013E\u0140"}),
new RegexTestCase(@"[\uFFFD-\uFFFF]+", RegexOptions.IgnoreCase, "\uFFFC\uFFFD\uFFFE\uFFFF", new string[] {"\uFFFD\uFFFE\uFFFF"}),
new RegexTestCase(@"[\uFFFC-\uFFFE]+", RegexOptions.IgnoreCase, "\uFFFB\uFFFC\uFFFD\uFFFE\uFFFF", new string[] {"\uFFFC\uFFFD\uFFFE"}),
/*********************************************************
Escape Chars
*********************************************************/
new RegexTestCase("(Cat)\r(Dog)", "Cat\rDog", new string[] {"Cat\rDog", "Cat", "Dog"}),
new RegexTestCase("(Cat)\t(Dog)", "Cat\tDog", new string[] {"Cat\tDog", "Cat", "Dog"}),
new RegexTestCase("(Cat)\f(Dog)", "Cat\fDog", new string[] {"Cat\fDog", "Cat", "Dog"}),
/*********************************************************
Miscellaneous { witout matching }
*********************************************************/
new RegexTestCase(@"\p{klsak", typeof(ArgumentException)),
new RegexTestCase(@"{5", "hello {5 world", new string[] {"{5"}),
new RegexTestCase(@"{5,", "hello {5, world", new string[] {"{5,"}),
new RegexTestCase(@"{5,6", "hello {5,6 world", new string[] {"{5,6"}),
/*********************************************************
Miscellaneous inline options
*********************************************************/
new RegexTestCase(@"(?r:cat)", typeof(ArgumentException)),
new RegexTestCase(@"(?c:cat)", typeof(ArgumentException)),
new RegexTestCase(@"(?n:(?<cat>cat)(\s+)(?<dog>dog))", "cat dog", new string[] {"cat dog", "cat", "dog"}),
new RegexTestCase(@"(?n:(cat)(\s+)(dog))", "cat dog", new string[] {"cat dog"}),
new RegexTestCase(@"(?n:(cat)(?<SpaceChars>\s+)(dog))", "cat dog", new string[] {"cat dog", " "}),
new RegexTestCase(@"(?x:
(?<cat>cat) # Cat statement
(\s+) # Whitespace chars
(?<dog>dog # Dog statement
))", "cat dog", new string[] {"cat dog", " ", "cat", "dog"}),
new RegexTestCase(@"(?e:cat)", typeof(ArgumentException)),
new RegexTestCase(@"(?+i:cat)", "CAT", new string[] {"CAT"}),
/*********************************************************
\d, \D, \s, \S, \w, \W, \P, \p inside character range
*********************************************************/
new RegexTestCase(@"cat([\d]*)dog", "hello123cat230927dog1412d", new string[] {"cat230927dog", "230927"}),
new RegexTestCase(@"([\D]*)dog", "65498catdog58719", new string[] {"catdog", "cat"}),
new RegexTestCase(@"cat([\s]*)dog", "wiocat dog3270", new string[] {"cat dog", " "}),
new RegexTestCase(@"cat([\S]*)", "sfdcatdog 3270", new string[] {"catdog", "dog"}),
new RegexTestCase(@"cat([\w]*)", "sfdcatdog 3270", new string[] {"catdog", "dog"}),
new RegexTestCase(@"cat([\W]*)dog", "wiocat dog3270", new string[] {"cat dog", " "}),
new RegexTestCase(@"([\p{Lu}]\w*)\s([\p{Lu}]\w*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"([\P{Ll}][\p{Ll}]*)\s([\P{Ll}][\p{Ll}]*)", "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"cat([a-\d]*)dog", typeof(ArgumentException)),
new RegexTestCase(@"([5-\D]*)dog", typeof(ArgumentException)),
new RegexTestCase(@"cat([6-\s]*)dog", typeof(ArgumentException)),
new RegexTestCase(@"cat([c-\S]*)", typeof(ArgumentException)),
new RegexTestCase(@"cat([7-\w]*)", typeof(ArgumentException)),
new RegexTestCase(@"cat([a-\W]*)dog", typeof(ArgumentException)),
new RegexTestCase(@"([f-\p{Lu}]\w*)\s([\p{Lu}]\w*)", typeof(ArgumentException)),
new RegexTestCase(@"([1-\P{Ll}][\p{Ll}]*)\s([\P{Ll}][\p{Ll}]*)", typeof(ArgumentException)),
new RegexTestCase(@"[\p]", typeof(ArgumentException)),
new RegexTestCase(@"[\P]", typeof(ArgumentException)),
new RegexTestCase(@"([\pcat])", typeof(ArgumentException)),
new RegexTestCase(@"([\Pcat])", typeof(ArgumentException)),
new RegexTestCase(@"(\p{", typeof(ArgumentException)),
new RegexTestCase(@"(\p{Ll", typeof(ArgumentException)),
/*********************************************************
\x, \u, \a, \b, \e, \f, \n, \r, \t, \v, \c, inside character range
*********************************************************/
new RegexTestCase(@"(cat)([\x41]*)(dog)", "catAAAdog", new String[] {"catAAAdog", "cat", "AAA", "dog"}),
new RegexTestCase(@"(cat)([\u0041]*)(dog)", "catAAAdog", new String[] {"catAAAdog", "cat", "AAA", "dog"}),
new RegexTestCase(@"(cat)([\a]*)(dog)", "cat\a\a\adog", new String[] {"cat\a\a\adog", "cat", "\a\a\a", "dog"}),
new RegexTestCase(@"(cat)([\b]*)(dog)", "cat\b\b\bdog", new String[] {"cat\b\b\bdog", "cat", "\b\b\b", "dog"}),
new RegexTestCase(@"(cat)([\e]*)(dog)", "cat\u001B\u001B\u001Bdog", new String[] {"cat\u001B\u001B\u001Bdog", "cat", "\u001B\u001B\u001B", "dog"}),
new RegexTestCase(@"(cat)([\f]*)(dog)", "cat\f\f\fdog", new String[] {"cat\f\f\fdog", "cat", "\f\f\f", "dog"}),
new RegexTestCase(@"(cat)([\r]*)(dog)", "cat\r\r\rdog", new String[] {"cat\r\r\rdog", "cat", "\r\r\r", "dog"}),
new RegexTestCase(@"(cat)([\v]*)(dog)", "cat\v\v\vdog", new String[] {"cat\v\v\vdog", "cat", "\v\v\v", "dog"}),
new RegexTestCase(@"(cat)([\o]*)(dog)", typeof(ArgumentException)),
/*********************************************************
\d, \D, \s, \S, \w, \W, \P, \p inside character range ([0-5]) with ECMA Option
*********************************************************/
new RegexTestCase(@"cat([\d]*)dog", RegexOptions.ECMAScript, "hello123cat230927dog1412d", new string[] {"cat230927dog", "230927"}),
new RegexTestCase(@"([\D]*)dog", RegexOptions.ECMAScript, "65498catdog58719", new string[] {"catdog", "cat"}),
new RegexTestCase(@"cat([\s]*)dog", RegexOptions.ECMAScript, "wiocat dog3270", new string[] {"cat dog", " "}),
new RegexTestCase(@"cat([\S]*)", RegexOptions.ECMAScript, "sfdcatdog 3270", new string[] {"catdog", "dog"}),
new RegexTestCase(@"cat([\w]*)", RegexOptions.ECMAScript, "sfdcatdog 3270", new string[] {"catdog", "dog"}),
new RegexTestCase(@"cat([\W]*)dog", RegexOptions.ECMAScript, "wiocat dog3270", new string[] {"cat dog", " "}),
new RegexTestCase(@"([\p{Lu}]\w*)\s([\p{Lu}]\w*)", RegexOptions.ECMAScript, "Hello World", new string[] {"Hello World", "Hello", "World"}),
new RegexTestCase(@"([\P{Ll}][\p{Ll}]*)\s([\P{Ll}][\p{Ll}]*)", RegexOptions.ECMAScript, "Hello World", new string[] {"Hello World", "Hello", "World"}),
/*********************************************************
\d, \D, \s, \S, \w, \W, \P, \p outside character range ([0-5]) with ECMA Option
*********************************************************/
new RegexTestCase(@"(cat)\d*dog", RegexOptions.ECMAScript, "hello123cat230927dog1412d", new string[] {"cat230927dog", "cat"}),
new RegexTestCase(@"\D*(dog)", RegexOptions.ECMAScript, "65498catdog58719", new string[] {"catdog", "dog"}),
new RegexTestCase(@"(cat)\s*(dog)", RegexOptions.ECMAScript, "wiocat dog3270", new string[] {"cat dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\S*", RegexOptions.ECMAScript, "sfdcatdog 3270", new string[] {"catdog", "cat"}),
new RegexTestCase(@"(cat)\w*", RegexOptions.ECMAScript, "sfdcatdog 3270", new string[] {"catdog", "cat"}),
new RegexTestCase(@"(cat)\W*(dog)", RegexOptions.ECMAScript, "wiocat dog3270", new string[] {"cat dog", "cat", "dog"}),
new RegexTestCase(@"\p{Lu}(\w*)\s\p{Lu}(\w*)", RegexOptions.ECMAScript, "Hello World", new string[] {"Hello World", "ello", "orld"}),
new RegexTestCase(@"\P{Ll}\p{Ll}*\s\P{Ll}\p{Ll}*", RegexOptions.ECMAScript, "Hello World", new string[] {"Hello World"}),
/*********************************************************
Use < in a group
*********************************************************/
new RegexTestCase(@"cat(?<0>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<1dog>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<dog)_*>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<dog!>)_*>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<dog >)_*>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<dog<>)_*>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<>dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<->dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?<dog121>dog)", "catcatdogdogcat", new string[] {"catdog", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s*(?<cat>dog)", "catcat dogdogcat", new string[] {"cat dog", "dog"}),
new RegexTestCase(@"(?<1>cat)\s*(?<1>dog)", "catcat dogdogcat", new string[] {"cat dog", "dog"}),
new RegexTestCase(@"(?<2048>cat)\s*(?<2048>dog)", "catcat dogdogcat", new string[] {"cat dog", "dog"}),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-cat>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", "", "_Hello_World_"}),
new RegexTestCase(@"(?<cat>cat)\w+(?<-cat>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", ""}),
new RegexTestCase(@"(?<cat>cat)\w+(?<cat-cat>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", "_Hello_World_"}),
new RegexTestCase(@"(?<1>cat)\w+(?<dog-1>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", "", "_Hello_World_"}),
new RegexTestCase(@"(?<cat>cat)\w+(?<2-cat>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", "", "_Hello_World_"}),
new RegexTestCase(@"(?<1>cat)\w+(?<2-1>dog)", "cat_Hello_World_dog", new string[] {"cat_Hello_World_dog", "", "_Hello_World_"}),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-16>dog)", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-1uosn>dog)", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-catdog>dog)", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-()*!@>dog)", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\w+(?<dog-0>dog)", "cat_Hello_World_dog", null),
/*********************************************************
Quantifiers
*********************************************************/
new RegexTestCase(@"(?<cat>cat){", "STARTcat{", new string[] {"cat{", "cat"}),
new RegexTestCase(@"(?<cat>cat){fdsa", "STARTcat{fdsa", new string[] {"cat{fdsa", "cat"}),
new RegexTestCase(@"(?<cat>cat){1", "STARTcat{1", new string[] {"cat{1", "cat"}),
new RegexTestCase(@"(?<cat>cat){1END", "STARTcat{1END", new string[] {"cat{1END", "cat"}),
new RegexTestCase(@"(?<cat>cat){1,", "STARTcat{1,", new string[] {"cat{1,", "cat"}),
new RegexTestCase(@"(?<cat>cat){1,END", "STARTcat{1,END", new string[] {"cat{1,END", "cat"}),
new RegexTestCase(@"(?<cat>cat){1,2", "STARTcat{1,2", new string[] {"cat{1,2", "cat"}),
new RegexTestCase(@"(?<cat>cat){1,2END", "STARTcat{1,2END", new string[] {"cat{1,2END", "cat"}),
/*********************************************************
Use (? in a group
*********************************************************/
new RegexTestCase(@"cat(?(?#COMMENT)cat)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?(?'cat'cat)dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?(?<cat>cat)dog)", typeof(ArgumentException)),
new RegexTestCase(@"cat(?(?afdcat)dog)", typeof(ArgumentException)),
/*********************************************************
Use IgnorePatternWhitespace
*********************************************************/
new RegexTestCase(@"(cat) #cat
\s+ #followed by 1 or more whitespace
(dog) #followed by dog
", RegexOptions.IgnorePatternWhitespace, "cat dog", new String[] {"cat dog", "cat", "dog" }),
new RegexTestCase(@"(cat) #cat
\s+ #followed by 1 or more whitespace
(dog) #followed by dog", RegexOptions.IgnorePatternWhitespace, "cat dog", new String[] {"cat dog", "cat", "dog" }),
new RegexTestCase(@"(cat) (?#cat) \s+ (?#followed by 1 or more whitespace) (dog) (?#followed by dog)",
RegexOptions.IgnorePatternWhitespace, "cat dog", new String[] {"cat dog", "cat", "dog" }),
new RegexTestCase(@"(cat) (?#cat) \s+ (?#followed by 1 or more whitespace",
RegexOptions.IgnorePatternWhitespace, typeof(ArgumentException)),
/*********************************************************
Without IgnorePatternWhitespace
*********************************************************/
new RegexTestCase(@"(cat) (?#cat) \s+ (?#followed by 1 or more whitespace", typeof(ArgumentException)),
/*********************************************************
Back Reference
*********************************************************/
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<cat>", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k'cat'", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\<cat>", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\'cat'", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<1>", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k'1'", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\<1>", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\'1'", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\1", "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\1", RegexOptions.ECMAScript, "asdfcat dogcat dog", new string[] {"cat dogcat", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<dog>", "asdfcat dogdog dog", new string[] {"cat dogdog", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\2", "asdfcat dogdog dog", new string[] {"cat dogdog", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\2", RegexOptions.ECMAScript, "asdfcat dogdog dog", new string[] {"cat dogdog", "cat", "dog"}),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\kcat", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<cat2>", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<8>cat", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k<8>cat", RegexOptions.ECMAScript, typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k8", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\k8", RegexOptions.ECMAScript, typeof(ArgumentException)),
/*********************************************************
Octal
*********************************************************/
new RegexTestCase(@"(cat)(\077)", "hellocat?dogworld", new string[] {"cat?", "cat", "?"}),
new RegexTestCase(@"(cat)(\77)", "hellocat?dogworld", new string[] {"cat?", "cat", "?"}),
new RegexTestCase(@"(cat)(\176)", "hellocat~dogworld", new string[] {"cat~", "cat", "~"}),
new RegexTestCase(@"(cat)(\400)", "hellocat\0dogworld", new string[] {"cat\0", "cat", "\0"}),
new RegexTestCase(@"(cat)(\300)", "hellocat\u00C0dogworld", new string[] {"cat\u00C0", "cat", "\u00C0"}),
new RegexTestCase(@"(cat)(\300)", "hellocat\u00C0dogworld", new string[] {"cat\u00C0", "cat", "\u00C0"}),
new RegexTestCase(@"(cat)(\477)", "hellocat\u003Fdogworld", new string[] {"cat\u003F", "cat", "\u003F"}),
new RegexTestCase(@"(cat)(\777)", "hellocat\u00FFdogworld", new string[] {"cat\u00FF", "cat", "\u00FF"}),
new RegexTestCase(@"(cat)(\7770)", "hellocat\u00FF0dogworld", new string[] {"cat\u00FF0", "cat", "\u00FF0"}),
new RegexTestCase(@"(cat)(\7)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\077)", RegexOptions.ECMAScript, "hellocat?dogworld", new string[] {"cat?", "cat", "?"}),
new RegexTestCase(@"(cat)(\77)", RegexOptions.ECMAScript, "hellocat?dogworld", new string[] {"cat?", "cat", "?"}),
new RegexTestCase(@"(cat)(\7)", RegexOptions.ECMAScript, "hellocat\adogworld", new string[] {"cat\a", "cat", "\a"}),
new RegexTestCase(@"(cat)(\40)", RegexOptions.ECMAScript, "hellocat dogworld", new string[] {"cat ", "cat", " "}),
new RegexTestCase(@"(cat)(\040)", RegexOptions.ECMAScript, "hellocat dogworld", new string[] {"cat ", "cat", " "}),
new RegexTestCase(@"(cat)(\176)", RegexOptions.ECMAScript, "hellocatcat76dogworld", new string[] {"catcat76", "cat", "cat76"}),
new RegexTestCase(@"(cat)(\377)", RegexOptions.ECMAScript, "hellocat\u00FFdogworld", new string[] {"cat\u00FF", "cat", "\u00FF"}),
new RegexTestCase(@"(cat)(\400)", RegexOptions.ECMAScript, "hellocat 0Fdogworld", new string[] {"cat 0", "cat", " 0"}),
/*********************************************************
Decimal
*********************************************************/
new RegexTestCase(@"(cat)\s+(?<2147483646>dog)", "asdlkcat dogiwod", new string[] {"cat dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(?<2147483647>dog)", "asdlkcat dogiwod", new string[] {"cat dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(?<2147483648>dog)", typeof(System.ArgumentException)),
new RegexTestCase(@"(cat)\s+(?<21474836481097>dog)", typeof(System.ArgumentException)),
/*********************************************************
Hex
*********************************************************/
new RegexTestCase(@"(cat)(\x2a*)(dog)", "asdlkcat***dogiwod", new string[] {"cat***dog", "cat", "***", "dog"}),
new RegexTestCase(@"(cat)(\x2b*)(dog)", "asdlkcat+++dogiwod", new string[] {"cat+++dog", "cat", "+++", "dog"}),
new RegexTestCase(@"(cat)(\x2c*)(dog)", "asdlkcat,,,dogiwod", new string[] {"cat,,,dog", "cat", ",,,", "dog"}),
new RegexTestCase(@"(cat)(\x2d*)(dog)", "asdlkcat---dogiwod", new string[] {"cat---dog", "cat", "---", "dog"}),
new RegexTestCase(@"(cat)(\x2e*)(dog)", "asdlkcat...dogiwod", new string[] {"cat...dog", "cat", "...", "dog"}),
new RegexTestCase(@"(cat)(\x2f*)(dog)", "asdlkcat///dogiwod", new string[] {"cat///dog", "cat", "///", "dog"}),
new RegexTestCase(@"(cat)(\x2A*)(dog)", "asdlkcat***dogiwod", new string[] {"cat***dog", "cat", "***", "dog"}),
new RegexTestCase(@"(cat)(\x2B*)(dog)", "asdlkcat+++dogiwod", new string[] {"cat+++dog", "cat", "+++", "dog"}),
new RegexTestCase(@"(cat)(\x2C*)(dog)", "asdlkcat,,,dogiwod", new string[] {"cat,,,dog", "cat", ",,,", "dog"}),
new RegexTestCase(@"(cat)(\x2D*)(dog)", "asdlkcat---dogiwod", new string[] {"cat---dog", "cat", "---", "dog"}),
new RegexTestCase(@"(cat)(\x2E*)(dog)", "asdlkcat...dogiwod", new string[] {"cat...dog", "cat", "...", "dog"}),
new RegexTestCase(@"(cat)(\x2F*)(dog)", "asdlkcat///dogiwod", new string[] {"cat///dog", "cat", "///", "dog"}),
/*********************************************************
ScanControl
*********************************************************/
new RegexTestCase(@"(cat)(\c*)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)\c", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c *)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c?*)(dog)", typeof(ArgumentException)),
new RegexTestCase("(cat)(\\c\0*)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c`*)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c\|*)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c\[*)(dog)", typeof(ArgumentException)),
new RegexTestCase(@"(cat)(\c@*)(dog)", "asdlkcat\0\0dogiwod", new string[] {"cat\0\0dog", "cat", "\0\0", "dog"}),
new RegexTestCase(@"(cat)(\cA*)(dog)", "asdlkcat\u0001dogiwod", new string[] {"cat\u0001dog", "cat", "\u0001", "dog"}),
new RegexTestCase(@"(cat)(\ca*)(dog)", "asdlkcat\u0001dogiwod", new string[] {"cat\u0001dog", "cat", "\u0001", "dog"}),
new RegexTestCase(@"(cat)(\cC*)(dog)", "asdlkcat\u0003dogiwod", new string[] {"cat\u0003dog", "cat", "\u0003", "dog"}),
new RegexTestCase(@"(cat)(\cc*)(dog)", "asdlkcat\u0003dogiwod", new string[] {"cat\u0003dog", "cat", "\u0003", "dog"}),
new RegexTestCase(@"(cat)(\cD*)(dog)", "asdlkcat\u0004dogiwod", new string[] {"cat\u0004dog", "cat", "\u0004", "dog"}),
new RegexTestCase(@"(cat)(\cd*)(dog)", "asdlkcat\u0004dogiwod", new string[] {"cat\u0004dog", "cat", "\u0004", "dog"}),
new RegexTestCase(@"(cat)(\cX*)(dog)", "asdlkcat\u0018dogiwod", new string[] {"cat\u0018dog", "cat", "\u0018", "dog"}),
new RegexTestCase(@"(cat)(\cx*)(dog)", "asdlkcat\u0018dogiwod", new string[] {"cat\u0018dog", "cat", "\u0018", "dog"}),
new RegexTestCase(@"(cat)(\cZ*)(dog)", "asdlkcat\u001adogiwod", new string[] {"cat\u001adog", "cat", "\u001a", "dog"}),
new RegexTestCase(@"(cat)(\cz*)(dog)", "asdlkcat\u001adogiwod", new string[] {"cat\u001adog", "cat", "\u001a", "dog"}),
/*********************************************************
Atomic Zero-Width Assertions \A \Z \z \G \b \B
*********************************************************/
//\A
new RegexTestCase(@"\A(cat)\s+(dog)", "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"\A(cat)\s+(dog)", RegexOptions.Multiline, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"\A(cat)\s+(dog)", RegexOptions.ECMAScript, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"\A(cat)\s+(dog)", "cat \n\n\ncat dog", null),
new RegexTestCase(@"\A(cat)\s+(dog)", RegexOptions.Multiline, "cat \n\n\ncat dog", null),
new RegexTestCase(@"\A(cat)\s+(dog)", RegexOptions.ECMAScript, "cat \n\n\ncat dog", null),
//\Z
new RegexTestCase(@"(cat)\s+(dog)\Z", "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.Multiline, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.ECMAScript, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", "cat \n\n\n dog\n", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.Multiline, "cat \n\n\n dog\n", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.ECMAScript, "cat \n\n\n dog\n", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\Z", "cat dog\n\n\ncat", null),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.Multiline, "cat dog\n\n\ncat ", null),
new RegexTestCase(@"(cat)\s+(dog)\Z", RegexOptions.ECMAScript, "cat dog\n\n\ncat ", null),
//\z
new RegexTestCase(@"(cat)\s+(dog)\z", "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.Multiline, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.ECMAScript, "cat \n\n\n dog", new string[] {"cat \n\n\n dog", "cat", "dog"}),
new RegexTestCase(@"(cat)\s+(dog)\z", "cat dog\n\n\ncat", null),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.Multiline, "cat dog\n\n\ncat ", null),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.ECMAScript, "cat dog\n\n\ncat ", null),
new RegexTestCase(@"(cat)\s+(dog)\z", "cat \n\n\n dog\n", null),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.Multiline, "cat \n\n\n dog\n", null),
new RegexTestCase(@"(cat)\s+(dog)\z", RegexOptions.ECMAScript, "cat \n\n\n dog\n", null),
//\b
new RegexTestCase(@"\b@cat", "123START123@catEND", new string[] {"@cat"}),
new RegexTestCase(@"\b\<cat", "123START123<catEND", new string[] {"<cat"}),
new RegexTestCase(@"\b,cat", "satwe,,,START,catEND", new string[] {",cat"}),
new RegexTestCase(@"\b\[cat", "`12START123[catEND", new string[] {"[cat"}),
new RegexTestCase(@"\b@cat", "123START123;@catEND", null),
new RegexTestCase(@"\b\<cat", "123START123'<catEND", null),
new RegexTestCase(@"\b,cat", "satwe,,,START',catEND", null),
new RegexTestCase(@"\b\[cat", "`12START123'[catEND", null),
//\B
new RegexTestCase(@"\B@cat", "123START123;@catEND", new string[] {"@cat"}),
new RegexTestCase(@"\B\<cat", "123START123'<catEND", new string[] {"<cat"}),
new RegexTestCase(@"\B,cat", "satwe,,,START',catEND", new string[] {",cat"}),
new RegexTestCase(@"\B\[cat", "`12START123'[catEND", new string[] {"[cat"}),
new RegexTestCase(@"\B@cat", "123START123@catEND", null),
new RegexTestCase(@"\B\<cat", "123START123<catEND", null),
new RegexTestCase(@"\B,cat", "satwe,,,START,catEND", null),
new RegexTestCase(@"\B\[cat", "`12START123[catEND", null),
/*********************************************************
\w matching \p{Lm} (Letter, Modifier)
*********************************************************/
new RegexTestCase(@"(\w+)\s+(\w+)", "cat\u02b0 dog\u02b1", new string[] {"cat\u02b0 dog\u02b1", "cat\u02b0", "dog\u02b1"}),
new RegexTestCase(@"(cat\w+)\s+(dog\w+)", "STARTcat\u30FC dog\u3005END", new string[] {"cat\u30FC dog\u3005END", "cat\u30FC", "dog\u3005END"}),
new RegexTestCase(@"(cat\w+)\s+(dog\w+)", "STARTcat\uff9e dog\uff9fEND", new string[] {"cat\uff9e dog\uff9fEND", "cat\uff9e", "dog\uff9fEND"}),
/*********************************************************
positive and negative character classes [a-c]|[^b-c]
*********************************************************/
new RegexTestCase(@"[^a]|d", "d", new string[] {"d"}),
new RegexTestCase(@"([^a]|[d])*", "Hello Worlddf", new string[] {"Hello Worlddf", "f"}),
new RegexTestCase(@"([^{}]|\n)+", "{{{{Hello\n World \n}END", new string[] {"Hello\n World \n", "\n"}),
new RegexTestCase(@"([a-d]|[^abcd])+", "\tonce\n upon\0 a- ()*&^%#time?", new string[] {"\tonce\n upon\0 a- ()*&^%#time?", "?"}),
new RegexTestCase(@"([^a]|[a])*", "once upon a time", new string[] {"once upon a time", "e"}),
new RegexTestCase(@"([a-d]|[^abcd]|[x-z]|^wxyz])+", "\tonce\n upon\0 a- ()*&^%#time?", new string[] {"\tonce\n upon\0 a- ()*&^%#time?", "?"}),
new RegexTestCase(@"([a-d]|[e-i]|[^e]|wxyz])+", "\tonce\n upon\0 a- ()*&^%#time?", new string[] {"\tonce\n upon\0 a- ()*&^%#time?", "?"}),
/*********************************************************
canonical and noncanonical char class, where one group is in it's
simplest form [a-e] and another is more complex .
*********************************************************/
new RegexTestCase(@"^(([^b]+ )|(.* ))$", "aaa ", new string[] {"aaa ", "aaa ", "aaa ", ""}),
new RegexTestCase(@"^(([^b]+ )|(.*))$", "aaa", new string[] {"aaa", "aaa", "", "aaa"}),
new RegexTestCase(@"^(([^b]+ )|(.* ))$", "bbb ", new string[] {"bbb ", "bbb ", "", "bbb "}),
new RegexTestCase(@"^(([^b]+ )|(.*))$", "bbb", new string[] {"bbb", "bbb", "", "bbb"}),
new RegexTestCase(@"^((a*)|(.*))$", "aaa", new string[] {"aaa", "aaa", "aaa", ""}),
new RegexTestCase(@"^((a*)|(.*))$", "aaabbb", new string[] {"aaabbb", "aaabbb", "", "aaabbb"}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))*", "{hello 1234567890 world}", new string[] {"", "", "", "", ""}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))+", "{hello 1234567890 world}", new string[] {"hello", "o", "", "o", ""}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))*", "{HELLO 1234567890 world}", new string[] {"", "", "", "", ""}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))+", "{HELLO 1234567890 world}", new string[] {"HELLO", "O", "", "", "O"}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))*", "{1234567890 hello world}", new string[] {"", "", "", "", ""}),
new RegexTestCase(@"(([0-9])|([a-z])|([A-Z]))+", "{1234567890 hello world}", new string[] {"1234567890", "0", "0", "", ""}),
new RegexTestCase(@"^(([a-d]*)|([a-z]*))$", "aaabbbcccdddeeefff", new string[] {"aaabbbcccdddeeefff", "aaabbbcccdddeeefff", "", "aaabbbcccdddeeefff"}),
new RegexTestCase(@"^(([d-f]*)|([c-e]*))$", "dddeeeccceee", new string[] {"dddeeeccceee", "dddeeeccceee", "", "dddeeeccceee"}),
new RegexTestCase(@"^(([c-e]*)|([d-f]*))$", "dddeeeccceee", new string[] {"dddeeeccceee", "dddeeeccceee", "dddeeeccceee", ""}),
new RegexTestCase(@"(([a-d]*)|([a-z]*))", "aaabbbcccdddeeefff", new string[] {"aaabbbcccddd", "aaabbbcccddd", "aaabbbcccddd", ""}),
new RegexTestCase(@"(([d-f]*)|([c-e]*))", "dddeeeccceee", new string[] {"dddeee", "dddeee", "dddeee", ""}),
new RegexTestCase(@"(([c-e]*)|([d-f]*))", "dddeeeccceee", new string[] {"dddeeeccceee", "dddeeeccceee", "dddeeeccceee", ""}),
new RegexTestCase(@"(([a-d]*)|(.*))", "aaabbbcccdddeeefff", new string[] {"aaabbbcccddd", "aaabbbcccddd", "aaabbbcccddd", ""}),
new RegexTestCase(@"(([d-f]*)|(.*))", "dddeeeccceee", new string[] {"dddeee", "dddeee", "dddeee", ""}),
new RegexTestCase(@"(([c-e]*)|(.*))", "dddeeeccceee", new string[] {"dddeeeccceee", "dddeeeccceee", "dddeeeccceee", ""}),
/*********************************************************
\p{Pi} (Punctuation Initial quote) \p{Pf} (Punctuation Final quote)
*********************************************************/
new RegexTestCase(@"\p{Pi}(\w*)\p{Pf}", "\u00ABCat\u00BB \u00BBDog\u00AB'", new string[] {"\u00ABCat\u00BB", "Cat"}),
new RegexTestCase(@"\p{Pi}(\w*)\p{Pf}", "\u2018Cat\u2019 \u2019Dog\u2018'", new string[] {"\u2018Cat\u2019", "Cat"}),
/*********************************************************
Use special unicode characters
*********************************************************/
/* new RegexTest(@"AE", "\u00C4", new string[] {"Hello World", "Hello", "World"}, GERMAN_PHONEBOOK),
new RegexTest(@"oe", "\u00F6", new string[] {"Hello World", "Hello", "World"}, GERMAN_PHONEBOOK),
new RegexTest("\u00D1", "\u004E\u0303", new string[] {"Hello World", "Hello", "World"}, ENGLISH_US),
new RegexTest("\u00D1", "\u004E\u0303", new string[] {"Hello World", "Hello", "World"}, INVARIANT),
new RegexTest("\u00D1", RegexOptions.IgnoreCase, "\u006E\u0303", new string[] {"Hello World", "Hello", "World"}, ENGLISH_US),
new RegexTest("\u00D1", RegexOptions.IgnoreCase, "\u006E\u0303", new string[] {"Hello World", "Hello", "World"}, INVARIANT),
new RegexTest("\u00F1", RegexOptions.IgnoreCase, "\u004E\u0303", new string[] {"Hello World", "Hello", "World"}, ENGLISH_US),
new RegexTest("\u00F1", RegexOptions.IgnoreCase, "\u004E\u0303", new string[] {"Hello World", "Hello", "World"}, INVARIANT),
new RegexTest("\u00F1", "\u006E\u0303", new string[] {"Hello World", "Hello", "World"}, ENGLISH_US),
new RegexTest("\u00F1", "\u006E\u0303", new string[] {"Hello World", "Hello", "World"}, ENGLISH_US),
*/
new RegexTestCase("CH", RegexOptions.IgnoreCase, "Ch", new string[] {"Ch"}, _ENGLISH_US),
new RegexTestCase("CH", RegexOptions.IgnoreCase, "Ch", new string[] {"Ch"}, _CZECH),
new RegexTestCase("cH", RegexOptions.IgnoreCase, "Ch", new string[] {"Ch"}, _ENGLISH_US),
new RegexTestCase("cH", RegexOptions.IgnoreCase, "Ch", new string[] {"Ch"}, _CZECH),
new RegexTestCase("AA", RegexOptions.IgnoreCase, "Aa", new string[] {"Aa"}, _ENGLISH_US),
new RegexTestCase("AA", RegexOptions.IgnoreCase, "Aa", new string[] {"Aa"}, _DANISH),
new RegexTestCase("aA", RegexOptions.IgnoreCase, "Aa", new string[] {"Aa"}, _ENGLISH_US),
new RegexTestCase("aA", RegexOptions.IgnoreCase, "Aa", new string[] {"Aa"}, _DANISH),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0049", new string[] {"\u0049"}, _TURKISH),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0069", new string[] {"\u0069"}, _TURKISH),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0049", new string[] {"\u0049"}, _LATIN_AZERI),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0069", new string[] {"\u0069"}, _LATIN_AZERI),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0049", null, _ENGLISH_US),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0069", null, _ENGLISH_US),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0049", new string[] {"\u0049"}, _ENGLISH_US),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0069", new string[] {"\u0069"}, _ENGLISH_US),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0049", null, _INVARIANT),
new RegexTestCase("\u0131", RegexOptions.IgnoreCase, "\u0069", null, _INVARIANT),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0049", null, _INVARIANT),
new RegexTestCase("\u0130", RegexOptions.IgnoreCase, "\u0069", null, _INVARIANT),
/*********************************************************
ECMAScript
*********************************************************/
new RegexTestCase(@"(?<cat>cat)\s+(?<dog>dog)\s+\123\s+\234", RegexOptions.ECMAScript, "asdfcat dog cat23 dog34eia", new string[] {"cat dog cat23 dog34", "cat", "dog"}),
/*********************************************************
Balanced Matching
*********************************************************/
new RegexTestCase(@"<div>
(?>
<div>(?<DEPTH>) |
</div> (?<-DEPTH>) |
.?
)*?
(?(DEPTH)(?!))
</div>", RegexOptions.IgnorePatternWhitespace,
"<div>this is some <div>red</div> text</div></div></div>",
new string[] {"<div>this is some <div>red</div> text</div>", ""}),
new RegexTestCase(@"(
((?'open'<+)[^<>]*)+
((?'close-open'>+)[^<>]*)+
)+", RegexOptions.IgnorePatternWhitespace,
"<01deep_01<02deep_01<03deep_01>><02deep_02><02deep_03<03deep_03>>>",
new string[] {"<01deep_01<02deep_01<03deep_01>><02deep_02><02deep_03<03deep_03>>>", "<02deep_03<03deep_03>>>",
"<03deep_03", ">>>", "<", "03deep_03"}),
new RegexTestCase(@"(
(?<start><)?
[^<>]?
(?<end-start>>)?
)*", RegexOptions.IgnorePatternWhitespace,
"<01deep_01<02deep_01<03deep_01>><02deep_02><02deep_03<03deep_03>>>",
new string[] {"<01deep_01<02deep_01<03deep_01>><02deep_02><02deep_03<03deep_03>>>", "", "",
"01deep_01<02deep_01<03deep_01>><02deep_02><02deep_03<03deep_03>>"}),
new RegexTestCase(@"(
(?<start><[^/<>]*>)?
[^<>]?
(?<end-start></[^/<>]*>)?
)*", RegexOptions.IgnorePatternWhitespace,
"<b><a>Cat</a></b>",
new string[] {"<b><a>Cat</a></b>", "", "", "<a>Cat</a>"}),
new RegexTestCase(@"(
(?<start><(?<TagName>[^/<>]*)>)?
[^<>]?
(?<end-start></\k<TagName>>)?
)*", RegexOptions.IgnorePatternWhitespace,
"<b>cat</b><a>dog</a>",
new string[] {"<b>cat</b><a>dog</a>", "", "", "a", "dog"}),
/*********************************************************
Balanced Matching With Backtracking
*********************************************************/
new RegexTestCase(@"(
(?<start><[^/<>]*>)?
.?
(?<end-start></[^/<>]*>)?
)*
(?(start)(?!)) ", RegexOptions.IgnorePatternWhitespace,
"<b><a>Cat</a></b><<<<c>>>><<d><e<f>><g><<<>>>>",
new string[] {"<b><a>Cat</a></b><<<<c>>>><<d><e<f>><g><<<>>>>", "", "", "<a>Cat"}),
/*********************************************************
Character Classes and Lazy quantifier
*********************************************************/
new RegexTestCase(@"([0-9]+?)([\w]+?)", RegexOptions.ECMAScript, "55488aheiaheiad", new string[] {"55", "5", "5"}),
new RegexTestCase(@"([0-9]+?)([a-z]+?)", RegexOptions.ECMAScript, "55488aheiaheiad", new string[] {"55488a", "55488", "a"}),
/*********************************************************
Miscellaneous/Regression scenarios
*********************************************************/
new RegexTestCase(@"(?<openingtag>1)(?<content>.*?)(?=2)", RegexOptions.Singleline | RegexOptions.ExplicitCapture,
"1" + Environment.NewLine + "<Projecaa DefaultTargets=\"x\"/>" + Environment.NewLine + "2",
new string[] {"1" + Environment.NewLine + "<Projecaa DefaultTargets=\"x\"/>" + Environment.NewLine, "1",
Environment.NewLine + "<Projecaa DefaultTargets=\"x\"/>"+ Environment.NewLine }),
new RegexTestCase(@"\G<%#(?<code>.*?)?%>", RegexOptions.Singleline,
@"<%# DataBinder.Eval(this, ""MyNumber"") %>", new string[] {@"<%# DataBinder.Eval(this, ""MyNumber"") %>", @" DataBinder.Eval(this, ""MyNumber"") "}),
/*********************************************************
Nested Quantifiers
*********************************************************/
new RegexTestCase(@"^[abcd]{0,0x10}*$", "a{0,0x10}}}", new string[] {"a{0,0x10}}}"}),
new RegexTestCase(@"^[abcd]{0,16}*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]{1,}*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]{1}*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]{0,16}?*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]{1,}?*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]{1}?*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]*+$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]+*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]?*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]*?+$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]+?*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]??*$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]*{0,5}$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]+{0,5}$", typeof(ArgumentException)),
new RegexTestCase(@"^[abcd]?{0,5}$", typeof(ArgumentException)),
/*********************************************************
Lazy operator Backtracking
*********************************************************/
new RegexTestCase(@"http://([a-zA-z0-9\-]*\.?)*?(:[0-9]*)??/", RegexOptions.IgnoreCase, "http://www.msn.com", null),
new RegexTestCase(@"http://([a-zA-z0-9\-]*\.?)*?(:[0-9]*)??/", RegexOptions.IgnoreCase, "http://www.msn.com/", new string[] {"http://www.msn.com/", "com", String.Empty}),
new RegexTestCase(@"http://([a-zA-Z0-9\-]*\.?)*?/", RegexOptions.IgnoreCase, @"http://www.google.com/", new string[] {"http://www.google.com/", "com"}),
new RegexTestCase(@"([a-z]*?)([\w])", RegexOptions.IgnoreCase, "cat", new string[] {"c", String.Empty, "c"}),
new RegexTestCase(@"^([a-z]*?)([\w])$", RegexOptions.IgnoreCase, "cat", new string[] {"cat", "ca", "t"}),
// TODO: Come up with more scenarios here
/*********************************************************
Backtracking
*********************************************************/
new RegexTestCase(@"([a-z]*)([\w])", RegexOptions.IgnoreCase, "cat", new string[] {"cat", "ca", "t"}),
new RegexTestCase(@"^([a-z]*)([\w])$", RegexOptions.IgnoreCase, "cat", new string[] {"cat", "ca", "t"}),
// TODO: Come up with more scenarios here
/*********************************************************
Character Escapes Invalid Regular Expressions
*********************************************************/
new RegexTestCase(@"\u", typeof(ArgumentException)),
new RegexTestCase(@"\ua", typeof(ArgumentException)),
new RegexTestCase(@"\u0", typeof(ArgumentException)),
new RegexTestCase(@"\x", typeof(ArgumentException)),
new RegexTestCase(@"\x2", typeof(ArgumentException)),
/*********************************************************
Character class Invalid Regular Expressions
*********************************************************/
new RegexTestCase(@"[", typeof(ArgumentException)),
new RegexTestCase(@"[]", typeof(ArgumentException)),
new RegexTestCase(@"[a", typeof(ArgumentException)),
new RegexTestCase(@"[^", typeof(ArgumentException)),
new RegexTestCase(@"[cat", typeof(ArgumentException)),
new RegexTestCase(@"[^cat", typeof(ArgumentException)),
new RegexTestCase(@"[a-", typeof(ArgumentException)),
new RegexTestCase(@"[a-]+", "ba-b", new string[] {"a-"}),
new RegexTestCase(@"\p{", typeof(ArgumentException)),
new RegexTestCase(@"\p{cat", typeof(ArgumentException)),
new RegexTestCase(@"\p{cat}", typeof(ArgumentException)),
new RegexTestCase(@"\P{", typeof(ArgumentException)),
new RegexTestCase(@"\P{cat", typeof(ArgumentException)),
new RegexTestCase(@"\P{cat}", typeof(ArgumentException)),
/*********************************************************
Quantifiers
*********************************************************/
new RegexTestCase(@"(cat){", "cat{", new string[] {"cat{", "cat"}),
new RegexTestCase(@"(cat){}", "cat{}", new string[] {"cat{}", "cat"}),
new RegexTestCase(@"(cat){,", "cat{,", new string[] {"cat{,", "cat"}),
new RegexTestCase(@"(cat){,}", "cat{,}", new string[] {"cat{,}", "cat"}),
new RegexTestCase(@"(cat){cat}", "cat{cat}", new string[] {"cat{cat}", "cat"}),
new RegexTestCase(@"(cat){cat,5}", "cat{cat,5}", new string[] {"cat{cat,5}", "cat"}),
new RegexTestCase(@"(cat){5,dog}", "cat{5,dog}", new string[] {"cat{5,dog}", "cat"}),
new RegexTestCase(@"(cat){cat,dog}", "cat{cat,dog}", new string[] {"cat{cat,dog}", "cat"}),
new RegexTestCase(@"(cat){,}?", "cat{,}?", new string[] {"cat{,}", "cat"}),
new RegexTestCase(@"(cat){cat}?", "cat{cat}?", new string[] {"cat{cat}", "cat"}),
new RegexTestCase(@"(cat){cat,5}?", "cat{cat,5}?", new string[] {"cat{cat,5}", "cat"}),
new RegexTestCase(@"(cat){5,dog}?", "cat{5,dog}?", new string[] {"cat{5,dog}", "cat"}),
new RegexTestCase(@"(cat){cat,dog}?", "cat{cat,dog}?", new string[] {"cat{cat,dog}", "cat"}),
/*********************************************************
Grouping Constructs Invalid Regular Expressions
*********************************************************/
new RegexTestCase(@"(", typeof(ArgumentException)),
new RegexTestCase(@"(?", typeof(ArgumentException)),
new RegexTestCase(@"(?<", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>", typeof(ArgumentException)),
new RegexTestCase(@"(?'", typeof(ArgumentException)),
new RegexTestCase(@"(?'cat'", typeof(ArgumentException)),
new RegexTestCase(@"(?:", typeof(ArgumentException)),
new RegexTestCase(@"(?imn", typeof(ArgumentException)),
new RegexTestCase(@"(?imn )", typeof(ArgumentException)),
new RegexTestCase(@"(?=", typeof(ArgumentException)),
new RegexTestCase(@"(?!", typeof(ArgumentException)),
new RegexTestCase(@"(?<=", typeof(ArgumentException)),
new RegexTestCase(@"(?<!", typeof(ArgumentException)),
new RegexTestCase(@"(?>", typeof(ArgumentException)),
new RegexTestCase(@"()", "cat", new string[] {String.Empty, String.Empty}),
new RegexTestCase(@"(?)", typeof(ArgumentException)),
new RegexTestCase(@"(?<)", typeof(ArgumentException)),
new RegexTestCase(@"(?<cat>)", "cat", new string[] {String.Empty, String.Empty}),
new RegexTestCase(@"(?')", typeof(ArgumentException)),
new RegexTestCase(@"(?'cat')", "cat", new string[] {String.Empty, String.Empty}),
new RegexTestCase(@"(?:)", "cat", new string[] {String.Empty}),
new RegexTestCase(@"(?imn)", "cat", new string[] {String.Empty}),
new RegexTestCase(@"(?imn)cat", "(?imn)cat", new string[] {"cat"}),
new RegexTestCase(@"(?=)", "cat", new string[] {String.Empty}),
new RegexTestCase(@"(?!)", "(?!)cat"),
new RegexTestCase(@"(?<=)", "cat", new string[] {String.Empty}),
new RegexTestCase(@"(?<!)", "(?<!)cat"),
new RegexTestCase(@"(?>)", "cat", new string[] {String.Empty}),
/*********************************************************
Grouping Constructs Invalid Regular Expressions
*********************************************************/
new RegexTestCase(@"\1", typeof(ArgumentException)),
new RegexTestCase(@"\1", typeof(ArgumentException)),
new RegexTestCase(@"\k", typeof(ArgumentException)),
new RegexTestCase(@"\k<", typeof(ArgumentException)),
new RegexTestCase(@"\k<1", typeof(ArgumentException)),
new RegexTestCase(@"\k<cat", typeof(ArgumentException)),
new RegexTestCase(@"\k<>", typeof(ArgumentException)),
/*********************************************************
Alternation construct Invalid Regular Expressions
*********************************************************/
new RegexTestCase(@"(?(", typeof(ArgumentException)),
new RegexTestCase(@"(?()|", typeof(ArgumentException)),
new RegexTestCase(@"(?()|)", "(?()|)", new string[] {""}),
new RegexTestCase(@"(?(cat", typeof(ArgumentException)),
new RegexTestCase(@"(?(cat)|", typeof(ArgumentException)),
new RegexTestCase(@"(?(cat)|)", "cat", new string[] {""}),
new RegexTestCase(@"(?(cat)|)", "dog", new string[] {""}),
new RegexTestCase(@"(?(cat)catdog|)", "catdog", new string[] {"catdog"}),
new RegexTestCase(@"(?(cat)catdog|)", "dog", new string[] {""}),
new RegexTestCase(@"(?(cat)dog|)", "dog", new string[] {""}),
new RegexTestCase(@"(?(cat)dog|)", "cat", new string[] {""}),
new RegexTestCase(@"(?(cat)|catdog)", "cat", new string[] {""}),
new RegexTestCase(@"(?(cat)|catdog)", "catdog", new string[] {""}),
new RegexTestCase(@"(?(cat)|dog)", "dog", new string[] {"dog"}),
new RegexTestCase(@"(?(cat)|dog)", "oof"),
/*********************************************************
Empty Match
*********************************************************/
new RegexTestCase(@"([a*]*)+?$", "ab", new string[] {"", ""}),
new RegexTestCase(@"(a*)+?$", "b", new string[] {"", ""}),
};
}
| |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Text;
namespace StatsViewer
{
/// <summary>
/// The stats table shared memory segment contains this
/// header structure.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct StatsFileHeader {
public int version;
public int size;
public int max_counters;
public int max_threads;
};
/// <summary>
/// An entry in the StatsTable.
/// </summary>
class StatsTableEntry {
public StatsTableEntry(int id, string name, StatsTable table) {
id_ = id;
name_ = name;
table_ = table;
}
/// <summary>
/// The unique id for this entry
/// </summary>
public int id { get { return id_; } }
/// <summary>
/// The name for this entry.
/// </summary>
public string name { get { return name_; } }
/// <summary>
/// The value of this entry now.
/// </summary>
public int GetValue(int filter_pid) {
return table_.GetValue(id_, filter_pid);
}
private int id_;
private string name_;
private StatsTable table_;
}
// An interface for StatsCounters
interface IStatsCounter {
// The name of the counter
string name { get; }
}
// A counter.
class StatsCounter : IStatsCounter {
public StatsCounter(StatsTableEntry entry) {
entry_ = entry;
}
public string name {
get {
return entry_.name;
}
}
public int GetValue(int filter_pid) {
return entry_.GetValue(filter_pid);
}
private StatsTableEntry entry_;
}
// A timer.
class StatsTimer : IStatsCounter {
public StatsTimer(StatsTableEntry entry)
{
entry_ = entry;
}
public string name {
get {
return entry_.name;
}
}
public int GetValue(int filter_pid) {
return entry_.GetValue(filter_pid);
}
private StatsTableEntry entry_;
}
// A rate.
class StatsCounterRate : IStatsCounter
{
public StatsCounterRate(StatsCounter counter, StatsTimer timer) {
counter_ = counter;
timer_ = timer;
}
public string name { get { return counter_.name; } }
public int GetCount(int filter_pid) {
return counter_.GetValue(filter_pid);
}
public int GetTime(int filter_pid) {
return timer_.GetValue(filter_pid);
}
private StatsCounter counter_;
private StatsTimer timer_;
}
/// <summary>
/// This is a C# reader for the chrome stats_table.
/// </summary>
class StatsTable {
internal const int kMaxThreadNameLength = 32;
internal const int kMaxCounterNameLength = 32;
/// <summary>
/// Open a StatsTable
/// </summary>
public StatsTable() {
}
#region Public Properties
/// <summary>
/// Get access to the counters in the table.
/// </summary>
public StatsTableCounters Counters() {
return new StatsTableCounters(this);
}
/// <summary>
/// Get access to the processes in the table
/// </summary>
public ICollection Processes {
get {
return new StatsTableProcesses(this);
}
}
#endregion
#region Internal Properties
//
// The internal methods are accessible to the enumerators
// and helper classes below.
//
/// <summary>
/// Access to the table header
/// </summary>
internal StatsFileHeader Header {
get { return header_; }
}
/// <summary>
/// Get the offset of the ThreadName table
/// </summary>
internal long ThreadNamesOffset {
get {
return memory_.ToInt64() + Marshal.SizeOf(typeof(StatsFileHeader));
}
}
/// <summary>
/// Get the offset of the PIDs table
/// </summary>
internal long PidsOffset {
get {
long offset = ThreadNamesOffset;
// Thread names table
offset += AlignedSize(header_.max_threads * kMaxThreadNameLength * 2);
// Thread TID table
offset += AlignedSize(header_.max_threads *
Marshal.SizeOf(typeof(int)));
return offset;
}
}
/// <summary>
/// Get the offset of the CounterName table
/// </summary>
internal long CounterNamesOffset {
get {
long offset = PidsOffset;
// Thread PID table
offset += AlignedSize(header_.max_threads *
Marshal.SizeOf(typeof(int)));
return offset;
}
}
/// <summary>
/// Get the offset of the Data table
/// </summary>
internal long DataOffset {
get {
long offset = CounterNamesOffset;
// Counter names table
offset += AlignedSize(header_.max_counters *
kMaxCounterNameLength * 2);
return offset;
}
}
#endregion
#region Public Methods
/// <summary>
/// Opens the memory map
/// </summary>
/// <returns></returns>
/// <param name="name">The name of the file to open</param>
public bool Open(string name) {
map_handle_ =
Win32.OpenFileMapping((int)Win32.MapAccess.FILE_MAP_WRITE, false,
name);
if (map_handle_ == IntPtr.Zero)
return false;
memory_ =
Win32.MapViewOfFile(map_handle_, (int)Win32.MapAccess.FILE_MAP_WRITE,
0,0, 0);
if (memory_ == IntPtr.Zero) {
Win32.CloseHandle(map_handle_);
return false;
}
header_ = (StatsFileHeader)Marshal.PtrToStructure(memory_, header_.GetType());
return true;
}
/// <summary>
/// Close the mapped file.
/// </summary>
public void Close() {
Win32.UnmapViewOfFile(memory_);
Win32.CloseHandle(map_handle_);
}
/// <summary>
/// Zero out the stats file.
/// </summary>
public void Zero() {
long offset = DataOffset;
for (int threads = 0; threads < header_.max_threads; threads++) {
for (int counters = 0; counters < header_.max_counters; counters++) {
Marshal.WriteInt32((IntPtr) offset, 0);
offset += Marshal.SizeOf(typeof(int));
}
}
}
/// <summary>
/// Get the value for a StatsCounterEntry now.
/// </summary>
/// <returns></returns>
/// <param name="filter_pid">If a specific PID is being queried, filter to this PID. 0 means use all data.</param>
/// <param name="id">The id of the CounterEntry to get the value for.</param>
public int GetValue(int id, int filter_pid) {
long pid_offset = PidsOffset;
long data_offset = DataOffset;
data_offset += id * (Header.max_threads *
Marshal.SizeOf(typeof(int)));
int rv = 0;
for (int cols = 0; cols < Header.max_threads; cols++)
{
int pid = Marshal.ReadInt32((IntPtr)pid_offset);
if (filter_pid == 0 || filter_pid == pid)
{
rv += Marshal.ReadInt32((IntPtr)data_offset);
}
data_offset += Marshal.SizeOf(typeof(int));
pid_offset += Marshal.SizeOf(typeof(int));
}
return rv;
}
#endregion
#region Private Methods
/// <summary>
/// Align to 4-byte boundaries
/// </summary>
/// <param name="size"></param>
/// <returns></returns>
private long AlignedSize(long size) {
Debug.Assert(sizeof(int) == 4);
return size + (sizeof(int) - (size % sizeof(int))) % sizeof(int);
}
#endregion
#region Private Members
private IntPtr memory_;
private IntPtr map_handle_;
private StatsFileHeader header_;
#endregion
}
/// <summary>
/// Enumerable list of Counters in the StatsTable
/// </summary>
class StatsTableCounters : ICollection {
/// <summary>
/// Create the list of counters
/// </summary>
/// <param name="table"></param>
/// pid</param>
public StatsTableCounters(StatsTable table) {
table_ = table;
counter_hi_water_mark_ = -1;
counters_ = new List<IStatsCounter>();
FindCounters();
}
/// <summary>
/// Scans the table for new entries.
/// </summary>
public void Update() {
FindCounters();
}
#region IEnumerable Members
public IEnumerator GetEnumerator() {
return counters_.GetEnumerator();
}
#endregion
#region ICollection Members
public void CopyTo(Array array, int index) {
throw new Exception("The method or operation is not implemented.");
}
public int Count {
get {
return counters_.Count;
}
}
public bool IsSynchronized {
get {
throw new Exception("The method or operation is not implemented.");
}
}
public object SyncRoot {
get {
throw new Exception("The method or operation is not implemented.");
}
}
#endregion
#region Private Methods
/// <summary>
/// Create a counter based on an entry
/// </summary>
/// <param name="id"></param>
/// <param name="name"></param>
/// <returns></returns>
private IStatsCounter NameToCounter(int id, string name)
{
IStatsCounter rv = null;
// check if the name has a type encoded
if (name.Length > 2 && name[1] == ':')
{
StatsTableEntry entry = new StatsTableEntry(id, name.Substring(2), table_);
switch (name[0])
{
case 't':
rv = new StatsTimer(entry);
break;
case 'c':
rv = new StatsCounter(entry);
break;
}
}
else
{
StatsTableEntry entry = new StatsTableEntry(id, name, table_);
rv = new StatsCounter(entry);
}
return rv;
}
// If we have two StatsTableEntries with the same name,
// attempt to upgrade them to a higher level type.
// Example: A counter + a timer == a rate!
private void UpgradeCounter(IStatsCounter old_counter, IStatsCounter counter)
{
if (old_counter is StatsCounter && counter is StatsTimer)
{
StatsCounterRate rate = new StatsCounterRate(old_counter as StatsCounter,
counter as StatsTimer);
counters_.Remove(old_counter);
counters_.Add(rate);
}
else if (old_counter is StatsTimer && counter is StatsCounter)
{
StatsCounterRate rate = new StatsCounterRate(counter as StatsCounter,
old_counter as StatsTimer);
counters_.Remove(old_counter);
counters_.Add(rate);
}
}
/// <summary>
/// Find the counters in the table and insert into the counters_
/// hash table.
/// </summary>
private void FindCounters()
{
Debug.Assert(table_.Header.max_counters > 0);
int index = counter_hi_water_mark_;
do
{
// Find an entry in the table.
index++;
long offset = table_.CounterNamesOffset +
(index * StatsTable.kMaxCounterNameLength * 2);
string name = Marshal.PtrToStringUni((IntPtr)offset);
if (name.Length == 0)
continue;
// Record that we've already looked at this StatsTableEntry.
counter_hi_water_mark_ = index;
IStatsCounter counter = NameToCounter(index, name);
if (counter != null)
{
IStatsCounter old_counter = FindExistingCounter(counter.name);
if (old_counter != null)
UpgradeCounter(old_counter, counter);
else
counters_.Add(counter);
}
} while (index < table_.Header.max_counters - 1);
}
/// <summary>
/// Find an existing counter in our table
/// </summary>
/// <param name="name"></param>
private IStatsCounter FindExistingCounter(string name) {
foreach (IStatsCounter ctr in counters_)
{
if (ctr.name == name)
return ctr;
}
return null;
}
#endregion
#region Private Members
private StatsTable table_;
private List<IStatsCounter> counters_;
// Highest index of counters processed.
private int counter_hi_water_mark_;
#endregion
}
/// <summary>
/// A collection of processes
/// </summary>
class StatsTableProcesses : ICollection
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="table"></param>
public StatsTableProcesses(StatsTable table) {
table_ = table;
pids_ = new List<int>();
Initialize();
}
#region ICollection Members
public void CopyTo(Array array, int index) {
throw new Exception("The method or operation is not implemented.");
}
public int Count {
get {
return pids_.Count;
}
}
public bool IsSynchronized {
get {
throw new Exception("The method or operation is not implemented.");
}
}
public object SyncRoot {
get {
throw new Exception("The method or operation is not implemented.");
}
}
#endregion
#region IEnumerable Members
public IEnumerator GetEnumerator() {
return pids_.GetEnumerator();
}
#endregion
/// <summary>
/// Initialize the pid list.
/// </summary>
private void Initialize() {
long offset = table_.ThreadNamesOffset;
for (int index = 0; index < table_.Header.max_threads; index++) {
string thread_name = Marshal.PtrToStringUni((IntPtr)offset);
if (thread_name.Length > 0) {
long pidOffset = table_.PidsOffset + index *
Marshal.SizeOf(typeof(int));
int pid = Marshal.ReadInt32((IntPtr)pidOffset);
if (!pids_.Contains(pid))
pids_.Add(pid);
}
offset += StatsTable.kMaxThreadNameLength * 2;
}
}
#region Private Members
private StatsTable table_;
private List<int> pids_;
#endregion
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
[AddComponentMenu("2D Toolkit/Camera/tk2dCamera")]
[ExecuteInEditMode]
/// <summary>
/// Maintains a screen resolution camera.
/// Whole number increments seen through this camera represent one pixel.
/// For example, setting an object to 300, 300 will position it at exactly that pixel position.
/// </summary>
public class tk2dCamera : MonoBehaviour
{
static int CURRENT_VERSION = 1;
public int version = 0;
[SerializeField] private tk2dCameraSettings cameraSettings = new tk2dCameraSettings();
/// <summary>
/// The unity camera settings.
/// Use this instead of camera.XXX to change parameters.
/// </summary>
public tk2dCameraSettings CameraSettings {
get {
return cameraSettings;
}
}
/// <summary>
/// Resolution overrides, if necessary. See <see cref="tk2dCameraResolutionOverride"/>
/// </summary>
public tk2dCameraResolutionOverride[] resolutionOverride = new tk2dCameraResolutionOverride[1] {
tk2dCameraResolutionOverride.DefaultOverride
};
/// <summary>
/// The currently used override
/// </summary>
public tk2dCameraResolutionOverride CurrentResolutionOverride {
get {
tk2dCamera settings = SettingsRoot;
Camera cam = ScreenCamera;
float pixelWidth = cam.pixelWidth;
float pixelHeight = cam.pixelHeight;
#if UNITY_EDITOR
if (settings.useGameWindowResolutionInEditor) {
pixelWidth = settings.gameWindowResolution.x;
pixelHeight = settings.gameWindowResolution.y;
}
else if (settings.forceResolutionInEditor)
{
pixelWidth = settings.forceResolution.x;
pixelHeight = settings.forceResolution.y;
}
#endif
tk2dCameraResolutionOverride currentResolutionOverride = null;
if ((currentResolutionOverride == null ||
(currentResolutionOverride != null && (currentResolutionOverride.width != pixelWidth || currentResolutionOverride.height != pixelHeight))
))
{
currentResolutionOverride = null;
// find one if it matches the current resolution
if (settings.resolutionOverride != null)
{
foreach (var ovr in settings.resolutionOverride)
{
if (ovr.Match((int)pixelWidth, (int)pixelHeight))
{
currentResolutionOverride = ovr;
break;
}
}
}
}
return currentResolutionOverride;
}
}
/// <summary>
/// A tk2dCamera to inherit configuration from.
/// All resolution and override settings will be pulled from the root inherited camera.
/// This allows you to create a tk2dCamera prefab in your project or a master camera
/// in the scene and guarantee that multiple instances of tk2dCameras referencing this
/// will use exactly the same paramaters.
/// </summary>
public tk2dCamera InheritConfig {
get { return inheritSettings; }
set {
if (inheritSettings != value) {
inheritSettings = value;
_settingsRoot = null;
}
}
}
[SerializeField]
private tk2dCamera inheritSettings = null;
/// <summary>
/// Native resolution width of the camera. Override this in the inspector.
/// Don't change this at runtime unless you understand the implications.
/// </summary>
public int nativeResolutionWidth = 960;
/// <summary>
/// Native resolution height of the camera. Override this in the inspector.
/// Don't change this at runtime unless you understand the implications.
/// </summary>
public int nativeResolutionHeight = 640;
[SerializeField]
private Camera _unityCamera;
private Camera UnityCamera {
get {
if (_unityCamera == null) {
_unityCamera = GetComponent<Camera>();
if (_unityCamera == null) {
Debug.LogError("A unity camera must be attached to the tk2dCamera script");
}
}
return _unityCamera;
}
}
static tk2dCamera inst;
/// <summary>
/// Global instance, used by sprite and textmesh class to quickly find the tk2dCamera instance.
/// </summary>
public static tk2dCamera Instance {
get {
return inst;
}
}
// Global instance of active tk2dCameras, used to quickly find cameras matching a particular layer.
private static List<tk2dCamera> allCameras = new List<tk2dCamera>();
/// <summary>
/// Returns the first camera in the list that can "see" this layer, or null if none can be found
/// </summary>
public static tk2dCamera CameraForLayer( int layer ) {
int layerMask = 1 << layer;
int cameraCount = allCameras.Count;
for (int i = 0; i < cameraCount; ++i) {
tk2dCamera cam = allCameras[i];
if ((cam.UnityCamera.cullingMask & layerMask) == layerMask) {
return cam;
}
}
return null;
}
/// <summary>
/// Returns screen extents - top, bottom, left and right will be the extent of the physical screen
/// Regardless of resolution or override
/// </summary>
public Rect ScreenExtents { get { return _screenExtents; } }
/// <summary>
/// Returns screen extents - top, bottom, left and right will be the extent of the native screen
/// before it gets scaled and processed by overrides
/// </summary>
public Rect NativeScreenExtents { get { return _nativeScreenExtents; } }
/// <summary>
/// Enable/disable viewport clipping.
/// ScreenCamera must be valid for it to be actually enabled when rendering.
/// </summary>
public bool viewportClippingEnabled = false;
/// <summary>
/// Viewport clipping region.
/// </summary>
public Vector4 viewportRegion = new Vector4(0, 0, 100, 100);
/// <summary>
/// Target resolution
/// The target resolution currently being used.
/// If displaying on a 960x640 display, this will be the number returned here, regardless of scale, etc.
/// If the editor resolution is forced, the returned value will be the forced resolution.
/// </summary>
public Vector2 TargetResolution { get { return _targetResolution; } }
Vector2 _targetResolution = Vector2.zero;
/// <summary>
/// Native resolution
/// The native resolution of this camera.
/// This is the native resolution of the camera before any scaling is performed.
/// The resolution the game is set up to run at initially.
/// </summary>
public Vector2 NativeResolution { get { return new Vector2(nativeResolutionWidth, nativeResolutionHeight); } }
// Some obselete functions, use ScreenExtents instead
[System.Obsolete] public Vector2 ScreenOffset { get { return new Vector2(ScreenExtents.xMin - NativeScreenExtents.xMin, ScreenExtents.yMin - NativeScreenExtents.yMin); } }
[System.Obsolete] public Vector2 resolution { get { return new Vector2( ScreenExtents.xMax, ScreenExtents.yMax ); } }
[System.Obsolete] public Vector2 ScreenResolution { get { return new Vector2( ScreenExtents.xMax, ScreenExtents.yMax ); } }
[System.Obsolete] public Vector2 ScaledResolution { get { return new Vector2( ScreenExtents.width, ScreenExtents.height ); } }
/// <summary>
/// Zooms the current display
/// A zoom factor of 2 will zoom in 2x, i.e. the object on screen will be twice as large
/// Anchors will still be anchored, but will be scaled with the zoomScale.
/// It is recommended to use a second camera for HUDs if necessary to avoid this behaviour.
/// </summary>
public float ZoomFactor {
get { return zoomFactor; }
set { zoomFactor = Mathf.Max(0.01f, value); }
}
/// <summary>
/// Obselete - use <see cref="ZoomFactor"/> instead.
/// </summary>
[System.Obsolete]
public float zoomScale {
get { return 1.0f / Mathf.Max(0.001f, zoomFactor); }
set { ZoomFactor = 1.0f / Mathf.Max(0.001f, value); }
}
[SerializeField] float zoomFactor = 1.0f;
[HideInInspector]
/// <summary>
/// Forces the resolution in the editor. This option is only used when tk2dCamera can't detect the game window resolution.
/// </summary>
public bool forceResolutionInEditor = false;
[HideInInspector]
/// <summary>
/// The resolution to force the game window to when <see cref="forceResolutionInEditor"/> is enabled.
/// </summary>
public Vector2 forceResolution = new Vector2(960, 640);
#if UNITY_EDITOR
// When true, overrides the "forceResolutionInEditor" flag above
bool useGameWindowResolutionInEditor = false;
// Usred when useGameWindowResolutionInEditor == true
Vector2 gameWindowResolution = new Vector2(960, 640);
#endif
/// <summary>
/// The camera that sees the screen - i.e. if viewport clipping is enabled, its the camera that sees the entire screen
/// </summary>
public Camera ScreenCamera {
get {
bool viewportClippingEnabled = this.viewportClippingEnabled && this.inheritSettings != null && this.inheritSettings.UnityCamera.rect == unitRect;
return viewportClippingEnabled ? this.inheritSettings.UnityCamera : UnityCamera;
}
}
// Use this for initialization
void Awake () {
Upgrade();
if (allCameras.IndexOf(this) == -1) {
allCameras.Add(this);
}
tk2dCamera settings = SettingsRoot;
tk2dCameraSettings inheritedCameraSettings = settings.CameraSettings;
if (inheritedCameraSettings.projection == tk2dCameraSettings.ProjectionType.Perspective) {
UnityCamera.transparencySortMode = inheritedCameraSettings.transparencySortMode;
}
}
void OnEnable() {
if (UnityCamera != null) {
UpdateCameraMatrix();
}
else {
this.GetComponent<Camera>().enabled = false;
}
if (!viewportClippingEnabled) // the main camera can't display rect
inst = this;
if (allCameras.IndexOf(this) == -1) {
allCameras.Add(this);
}
}
void OnDestroy() {
int idx = allCameras.IndexOf(this);
if (idx != -1) {
allCameras.RemoveAt( idx );
}
}
void OnPreCull() {
// Commit all pending changes - this more or less guarantees
// everything is committed before drawing this camera.
tk2dUpdateManager.FlushQueues();
UpdateCameraMatrix();
}
#if UNITY_EDITOR
void LateUpdate() {
if (!Application.isPlaying) {
UpdateCameraMatrix();
}
}
#endif
Rect _screenExtents;
Rect _nativeScreenExtents;
Rect unitRect = new Rect(0, 0, 1, 1);
// Gives you the size of one pixel in world units at the native resolution
// For perspective cameras, it is dependent on the distance to the camera.
public float GetSizeAtDistance(float distance) {
tk2dCameraSettings cameraSettings = SettingsRoot.CameraSettings;
switch (cameraSettings.projection) {
case tk2dCameraSettings.ProjectionType.Orthographic:
if (cameraSettings.orthographicType == tk2dCameraSettings.OrthographicType.PixelsPerMeter) {
return 1.0f / cameraSettings.orthographicPixelsPerMeter;
}
else {
return 2.0f * cameraSettings.orthographicSize / SettingsRoot.nativeResolutionHeight;
}
case tk2dCameraSettings.ProjectionType.Perspective:
return Mathf.Tan(CameraSettings.fieldOfView * Mathf.Deg2Rad * 0.5f) * distance * 2.0f / SettingsRoot.nativeResolutionHeight;
}
return 1;
}
// This returns the tk2dCamera object which has the settings stored on it
// Trace back to the source, however far up the hierarchy that may be
// You can't change this at runtime
tk2dCamera _settingsRoot;
public tk2dCamera SettingsRoot {
get {
if (_settingsRoot == null) {
_settingsRoot = (inheritSettings == null || inheritSettings == this) ? this : inheritSettings.SettingsRoot;
}
return _settingsRoot;
}
}
#if UNITY_EDITOR
public static tk2dCamera Editor__Inst {
get {
if (inst != null) {
return inst;
}
return GameObject.FindObjectOfType(typeof(tk2dCamera)) as tk2dCamera;
}
}
#endif
#if UNITY_EDITOR
static bool Editor__getGameViewSizeError = false;
public static bool Editor__gameViewReflectionError = false;
// Try and get game view size
// Will return true if it is able to work this out
// If width / height == 0, it means the user has selected an aspect ratio "Resolution"
public static bool Editor__GetGameViewSize(out float width, out float height, out float aspect) {
try {
Editor__gameViewReflectionError = false;
System.Type gameViewType = System.Type.GetType("UnityEditor.GameView,UnityEditor");
System.Reflection.MethodInfo GetMainGameView = gameViewType.GetMethod("GetMainGameView", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic);
object mainGameViewInst = GetMainGameView.Invoke(null, null);
if (mainGameViewInst == null) {
width = height = aspect = 0;
return false;
}
System.Reflection.FieldInfo s_viewModeResolutions = gameViewType.GetField("s_viewModeResolutions", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic);
if (s_viewModeResolutions == null) {
System.Reflection.PropertyInfo currentGameViewSize = gameViewType.GetProperty("currentGameViewSize", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic);
object gameViewSize = currentGameViewSize.GetValue(mainGameViewInst, null);
System.Type gameViewSizeType = gameViewSize.GetType();
int gvWidth = (int)gameViewSizeType.GetProperty("width").GetValue(gameViewSize, null);
int gvHeight = (int)gameViewSizeType.GetProperty("height").GetValue(gameViewSize, null);
int gvSizeType = (int)gameViewSizeType.GetProperty("sizeType").GetValue(gameViewSize, null);
if (gvWidth == 0 || gvHeight == 0) {
width = height = aspect = 0;
return false;
}
else if (gvSizeType == 0) {
width = height = 0;
aspect = (float)gvWidth / (float)gvHeight;
return true;
}
else {
width = gvWidth; height = gvHeight;
aspect = (float)gvWidth / (float)gvHeight;
return true;
}
}
else {
Vector2[] viewModeResolutions = (Vector2[])s_viewModeResolutions.GetValue(null);
float[] viewModeAspects = (float[])gameViewType.GetField("s_viewModeAspects", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic).GetValue(null);
string[] viewModeStrings = (string[])gameViewType.GetField("s_viewModeAspectStrings", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic).GetValue(null);
if (mainGameViewInst != null
&& viewModeStrings != null
&& viewModeResolutions != null && viewModeAspects != null) {
int aspectRatio = (int)gameViewType.GetField("m_AspectRatio", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic).GetValue(mainGameViewInst);
string thisViewModeString = viewModeStrings[aspectRatio];
if (thisViewModeString.Contains("Standalone")) {
width = UnityEditor.PlayerSettings.defaultScreenWidth; height = UnityEditor.PlayerSettings.defaultScreenHeight;
aspect = width / height;
}
else if (thisViewModeString.Contains("Web")) {
width = UnityEditor.PlayerSettings.defaultWebScreenWidth; height = UnityEditor.PlayerSettings.defaultWebScreenHeight;
aspect = width / height;
}
else {
width = viewModeResolutions[ aspectRatio ].x; height = viewModeResolutions[ aspectRatio ].y;
aspect = viewModeAspects[ aspectRatio ];
// this is an error state
if (width == 0 && height == 0 && aspect == 0) {
return false;
}
}
return true;
}
}
}
catch (System.Exception e) {
if (Editor__getGameViewSizeError == false) {
Debug.LogError("tk2dCamera.GetGameViewSize - has a Unity update broken this?\nThis is not a fatal error, but a warning that you've probably not got the latest 2D Toolkit update.\n\n" + e.ToString());
Editor__getGameViewSizeError = true;
}
Editor__gameViewReflectionError = true;
}
width = height = aspect = 0;
return false;
}
#endif
public Matrix4x4 OrthoOffCenter(Vector2 scale, float left, float right, float bottom, float top, float near, float far) {
// Additional half texel offset
// Takes care of texture unit offset, if necessary.
float x = (2.0f) / (right - left) * scale.x;
float y = (2.0f) / (top - bottom) * scale.y;
float z = -2.0f / (far - near);
float a = -(right + left) / (right - left);
float b = -(bottom + top) / (top - bottom);
float c = -(far + near) / (far - near);
Matrix4x4 m = new Matrix4x4();
m[0,0] = x; m[0,1] = 0; m[0,2] = 0; m[0,3] = a;
m[1,0] = 0; m[1,1] = y; m[1,2] = 0; m[1,3] = b;
m[2,0] = 0; m[2,1] = 0; m[2,2] = z; m[2,3] = c;
m[3,0] = 0; m[3,1] = 0; m[3,2] = 0; m[3,3] = 1;
return m;
}
Vector2 GetScaleForOverride(tk2dCamera settings, tk2dCameraResolutionOverride currentOverride, float width, float height) {
Vector2 scale = Vector2.one;
float s = 1.0f;
if (currentOverride == null) {
return scale;
}
switch (currentOverride.autoScaleMode)
{
case tk2dCameraResolutionOverride.AutoScaleMode.PixelPerfect:
s = 1;
scale.Set(s, s);
break;
case tk2dCameraResolutionOverride.AutoScaleMode.FitHeight:
s = height / settings.nativeResolutionHeight;
scale.Set(s, s);
break;
case tk2dCameraResolutionOverride.AutoScaleMode.FitWidth:
s = width / settings.nativeResolutionWidth;
scale.Set(s, s);
break;
case tk2dCameraResolutionOverride.AutoScaleMode.FitVisible:
case tk2dCameraResolutionOverride.AutoScaleMode.ClosestMultipleOfTwo:
float nativeAspect = (float)settings.nativeResolutionWidth / settings.nativeResolutionHeight;
float currentAspect = width / height;
if (currentAspect < nativeAspect)
s = width / settings.nativeResolutionWidth;
else
s = height / settings.nativeResolutionHeight;
if (currentOverride.autoScaleMode == tk2dCameraResolutionOverride.AutoScaleMode.ClosestMultipleOfTwo)
{
if (s > 1.0f)
s = Mathf.Floor(s); // round number
else
s = Mathf.Pow(2, Mathf.Floor(Mathf.Log(s, 2))); // minimise only as power of two
}
scale.Set(s, s);
break;
case tk2dCameraResolutionOverride.AutoScaleMode.StretchToFit:
scale.Set(width / settings.nativeResolutionWidth, height / settings.nativeResolutionHeight);
break;
case tk2dCameraResolutionOverride.AutoScaleMode.Fill:
s = Mathf.Max(width / settings.nativeResolutionWidth,height / settings.nativeResolutionHeight);
scale.Set(s, s);
break;
default:
case tk2dCameraResolutionOverride.AutoScaleMode.None:
s = currentOverride.scale;
scale.Set(s, s);
break;
}
return scale;
}
Vector2 GetOffsetForOverride(tk2dCamera settings, tk2dCameraResolutionOverride currentOverride, Vector2 scale, float width, float height) {
Vector2 offset = Vector2.zero;
if (currentOverride == null) {
return offset;
}
switch (currentOverride.fitMode) {
case tk2dCameraResolutionOverride.FitMode.Center:
if (settings.cameraSettings.orthographicOrigin == tk2dCameraSettings.OrthographicOrigin.BottomLeft) {
offset = new Vector2(Mathf.Round((settings.nativeResolutionWidth * scale.x - width ) / 2.0f),
Mathf.Round((settings.nativeResolutionHeight * scale.y - height) / 2.0f));
}
break;
default:
case tk2dCameraResolutionOverride.FitMode.Constant:
offset = -currentOverride.offsetPixels;
break;
}
return offset;
}
#if UNITY_EDITOR
private Matrix4x4 Editor__GetPerspectiveMatrix() {
float aspect = (float)nativeResolutionWidth / (float)nativeResolutionHeight;
return Matrix4x4.Perspective(SettingsRoot.CameraSettings.fieldOfView, aspect, UnityCamera.nearClipPlane, UnityCamera.farClipPlane);
}
public Matrix4x4 Editor__GetNativeProjectionMatrix( ) {
tk2dCamera settings = SettingsRoot;
if (settings.CameraSettings.projection == tk2dCameraSettings.ProjectionType.Perspective) {
return Editor__GetPerspectiveMatrix();
}
Rect rect1 = new Rect(0, 0, 1, 1);
Rect rect2 = new Rect(0, 0, 1, 1);
return GetProjectionMatrixForOverride( settings, null, nativeResolutionWidth, nativeResolutionHeight, false, out rect1, out rect2 );
}
public Matrix4x4 Editor__GetFinalProjectionMatrix( ) {
tk2dCamera settings = SettingsRoot;
if (settings.CameraSettings.projection == tk2dCameraSettings.ProjectionType.Perspective) {
return Editor__GetPerspectiveMatrix();
}
Vector2 resolution = GetScreenPixelDimensions(settings);
Rect rect1 = new Rect(0, 0, 1, 1);
Rect rect2 = new Rect(0, 0, 1, 1);
return GetProjectionMatrixForOverride( settings, settings.CurrentResolutionOverride, resolution.x, resolution.y, false, out rect1, out rect2 );
}
#endif
Matrix4x4 GetProjectionMatrixForOverride( tk2dCamera settings, tk2dCameraResolutionOverride currentOverride, float pixelWidth, float pixelHeight, bool halfTexelOffset, out Rect screenExtents, out Rect unscaledScreenExtents ) {
Vector2 scale = GetScaleForOverride( settings, currentOverride, pixelWidth, pixelHeight );
Vector2 offset = GetOffsetForOverride( settings, currentOverride, scale, pixelWidth, pixelHeight);
float left = offset.x, bottom = offset.y;
float right = pixelWidth + offset.x, top = pixelHeight + offset.y;
Vector2 nativeResolutionOffset = Vector2.zero;
bool usingLegacyViewportClipping = false;
// Correct for viewport clipping rendering
// Coordinates in subrect are "native" pixels, but origin is from the extrema of screen
if (this.viewportClippingEnabled && this.InheritConfig != null) {
float vw = (right - left) / scale.x;
float vh = (top - bottom) / scale.y;
Vector4 sr = new Vector4((int)this.viewportRegion.x, (int)this.viewportRegion.y,
(int)this.viewportRegion.z, (int)this.viewportRegion.w);
usingLegacyViewportClipping = true;
float viewportLeft = -offset.x / pixelWidth + sr.x / vw;
float viewportBottom = -offset.y / pixelHeight + sr.y / vh;
float viewportWidth = sr.z / vw;
float viewportHeight = sr.w / vh;
if (settings.cameraSettings.orthographicOrigin == tk2dCameraSettings.OrthographicOrigin.Center) {
viewportLeft += (pixelWidth - settings.nativeResolutionWidth * scale.x) / pixelWidth / 2.0f;
viewportBottom += (pixelHeight - settings.nativeResolutionHeight * scale.y) / pixelHeight / 2.0f;
}
Rect r = new Rect( viewportLeft, viewportBottom, viewportWidth, viewportHeight );
if (UnityCamera.rect.x != viewportLeft ||
UnityCamera.rect.y != viewportBottom ||
UnityCamera.rect.width != viewportWidth ||
UnityCamera.rect.height != viewportHeight) {
UnityCamera.rect = r;
}
float maxWidth = Mathf.Min( 1.0f - r.x, r.width );
float maxHeight = Mathf.Min( 1.0f - r.y, r.height );
float rectOffsetX = sr.x * scale.x - offset.x;
float rectOffsetY = sr.y * scale.y - offset.y;
if (settings.cameraSettings.orthographicOrigin == tk2dCameraSettings.OrthographicOrigin.Center) {
rectOffsetX -= settings.nativeResolutionWidth * 0.5f * scale.x;
rectOffsetY -= settings.nativeResolutionHeight * 0.5f * scale.y;
}
if (r.x < 0.0f) {
rectOffsetX += -r.x * pixelWidth;
maxWidth = (r.x + r.width);
}
if (r.y < 0.0f) {
rectOffsetY += -r.y * pixelHeight;
maxHeight = (r.y + r.height);
}
left += rectOffsetX;
bottom += rectOffsetY;
right = pixelWidth * maxWidth + offset.x + rectOffsetX;
top = pixelHeight * maxHeight + offset.y + rectOffsetY;
}
else {
if (UnityCamera.rect != CameraSettings.rect) {
UnityCamera.rect = CameraSettings.rect;
}
// By default the camera is orthographic, bottom left, 1 pixel per meter
if (settings.cameraSettings.orthographicOrigin == tk2dCameraSettings.OrthographicOrigin.Center) {
float w = (right - left) * 0.5f;
left -= w; right -= w;
float h = (top - bottom) * 0.5f;
top -= h; bottom -= h;
nativeResolutionOffset.Set(-nativeResolutionWidth / 2.0f, -nativeResolutionHeight / 2.0f);
}
}
float zoomScale = 1.0f / ZoomFactor;
// Only need the half texel offset on PC/D3D
bool needHalfTexelOffset = (Application.platform == RuntimePlatform.WindowsPlayer ||
Application.platform == RuntimePlatform.WindowsWebPlayer ||
Application.platform == RuntimePlatform.WindowsEditor);
float halfTexel = (halfTexelOffset && needHalfTexelOffset) ? 0.5f : 0.0f;
float orthoSize = settings.cameraSettings.orthographicSize;
switch (settings.cameraSettings.orthographicType) {
case tk2dCameraSettings.OrthographicType.OrthographicSize:
orthoSize = 2.0f * settings.cameraSettings.orthographicSize / settings.nativeResolutionHeight;
break;
case tk2dCameraSettings.OrthographicType.PixelsPerMeter:
orthoSize = 1.0f / settings.cameraSettings.orthographicPixelsPerMeter;
break;
}
// Fixup for clipping
if (!usingLegacyViewportClipping) {
float clipWidth = Mathf.Min(UnityCamera.rect.width, 1.0f - UnityCamera.rect.x);
float clipHeight = Mathf.Min(UnityCamera.rect.height, 1.0f - UnityCamera.rect.y);
if (clipWidth > 0 && clipHeight > 0) {
scale.x /= clipWidth;
scale.y /= clipHeight;
}
}
float s = orthoSize * zoomScale;
screenExtents = new Rect(left * s / scale.x, bottom * s / scale.y,
(right - left) * s / scale.x, (top - bottom) * s / scale.y);
unscaledScreenExtents = new Rect(nativeResolutionOffset.x * s, nativeResolutionOffset.y * s,
nativeResolutionWidth * s, nativeResolutionHeight * s);
// Near and far clip planes are tweakable per camera, so we pull from current camera instance regardless of inherited values
return OrthoOffCenter(scale, orthoSize * (left + halfTexel) * zoomScale, orthoSize * (right + halfTexel) * zoomScale,
orthoSize * (bottom - halfTexel) * zoomScale, orthoSize * (top - halfTexel) * zoomScale,
UnityCamera.nearClipPlane, UnityCamera.farClipPlane);
}
Vector2 GetScreenPixelDimensions(tk2dCamera settings) {
Vector2 dimensions = new Vector2(ScreenCamera.pixelWidth, ScreenCamera.pixelHeight);
#if UNITY_EDITOR
// This bit here allocates memory, but only runs in the editor
float gameViewPixelWidth = 0, gameViewPixelHeight = 0;
float gameViewAspect = 0;
settings.useGameWindowResolutionInEditor = false;
if (Editor__GetGameViewSize( out gameViewPixelWidth, out gameViewPixelHeight, out gameViewAspect)) {
if (gameViewPixelWidth != 0 && gameViewPixelHeight != 0) {
if (!settings.useGameWindowResolutionInEditor ||
settings.gameWindowResolution.x != gameViewPixelWidth ||
settings.gameWindowResolution.y != gameViewPixelHeight) {
settings.useGameWindowResolutionInEditor = true;
settings.gameWindowResolution.x = gameViewPixelWidth;
settings.gameWindowResolution.y = gameViewPixelHeight;
}
dimensions.x = settings.gameWindowResolution.x;
dimensions.y = settings.gameWindowResolution.y;
}
}
if (!settings.useGameWindowResolutionInEditor && settings.forceResolutionInEditor)
{
dimensions.x = settings.forceResolution.x;
dimensions.y = settings.forceResolution.y;
}
#endif
return dimensions;
}
private void Upgrade() {
if (version != CURRENT_VERSION) {
if (version == 0) {
// Backwards compatibility
cameraSettings.orthographicPixelsPerMeter = 1;
cameraSettings.orthographicType = tk2dCameraSettings.OrthographicType.PixelsPerMeter;
cameraSettings.orthographicOrigin = tk2dCameraSettings.OrthographicOrigin.BottomLeft;
cameraSettings.projection = tk2dCameraSettings.ProjectionType.Orthographic;
foreach (tk2dCameraResolutionOverride ovr in resolutionOverride) {
ovr.Upgrade( version );
}
// Mirror camera settings
Camera unityCamera = GetComponent<Camera>();
if (unityCamera != null) {
cameraSettings.rect = unityCamera.rect;
if (!unityCamera.orthographic) {
cameraSettings.projection = tk2dCameraSettings.ProjectionType.Perspective;
cameraSettings.fieldOfView = unityCamera.fieldOfView * ZoomFactor;
}
unityCamera.hideFlags = HideFlags.HideInInspector | HideFlags.HideInHierarchy;
}
}
Debug.Log("tk2dCamera '" + this.name + "' - Upgraded from version " + version.ToString());
version = CURRENT_VERSION;
}
}
/// <summary>
/// Updates the camera matrix to ensure 1:1 pixel mapping
/// Or however the override is set up.
/// </summary>
public void UpdateCameraMatrix()
{
Upgrade();
if (!this.viewportClippingEnabled)
inst = this;
Camera unityCamera = UnityCamera;
tk2dCamera settings = SettingsRoot;
tk2dCameraSettings inheritedCameraSettings = settings.CameraSettings;
if (unityCamera.rect != cameraSettings.rect) unityCamera.rect = cameraSettings.rect;
// Projection type is inherited from base camera
_targetResolution = GetScreenPixelDimensions(settings);
if (inheritedCameraSettings.projection == tk2dCameraSettings.ProjectionType.Perspective) {
if (unityCamera.orthographic == true) unityCamera.orthographic = false;
float fov = Mathf.Min(179.9f, inheritedCameraSettings.fieldOfView / Mathf.Max(0.001f, ZoomFactor));
if (unityCamera.fieldOfView != fov) unityCamera.fieldOfView = fov;
_screenExtents.Set( -unityCamera.aspect, -1, unityCamera.aspect * 2, 2 );
_nativeScreenExtents = _screenExtents;
unityCamera.ResetProjectionMatrix();
}
else {
if (unityCamera.orthographic == false) unityCamera.orthographic = true;
// Find an override if necessary
Matrix4x4 m = GetProjectionMatrixForOverride( settings, settings.CurrentResolutionOverride, _targetResolution.x, _targetResolution.y, true, out _screenExtents, out _nativeScreenExtents );
#if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_1)
// Windows phone?
if (Application.platform == RuntimePlatform.WP8Player &&
(Screen.orientation == ScreenOrientation.LandscapeLeft || Screen.orientation == ScreenOrientation.LandscapeRight)) {
float angle = (Screen.orientation == ScreenOrientation.LandscapeRight) ? 90.0f : -90.0f;
Matrix4x4 m2 = Matrix4x4.TRS(Vector3.zero, Quaternion.Euler(0, 0, angle), Vector3.one);
m = m2 * m;
}
#endif
if (unityCamera.projectionMatrix != m) {
unityCamera.projectionMatrix = m;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Management.Automation;
using System.Management.Automation.Internal;
using System.Management.Automation.Remoting;
using System.Management.Automation.Runspaces;
using System.Management.Automation.Security;
namespace Microsoft.PowerShell.Commands
{
/// <summary>
/// This cmdlet start invocation of jobs in background.
/// </summary>
[Cmdlet(VerbsLifecycle.Start, "Job", DefaultParameterSetName = StartJobCommand.ComputerNameParameterSet, HelpUri = "https://go.microsoft.com/fwlink/?LinkID=2096796")]
[OutputType(typeof(PSRemotingJob))]
public class StartJobCommand : PSExecutionCmdlet, IDisposable
{
#region Private members
private static readonly string s_startJobType = "BackgroundJob";
#endregion
#region Parameters
private const string DefinitionNameParameterSet = "DefinitionName";
/// <summary>
/// JobDefinition Name.
/// </summary>
[Parameter(Position = 0, Mandatory = true,
ParameterSetName = StartJobCommand.DefinitionNameParameterSet)]
[ValidateTrustedData]
[ValidateNotNullOrEmpty]
public string DefinitionName
{
get { return _definitionName; }
set { _definitionName = value; }
}
private string _definitionName;
/// <summary>
/// JobDefinition file path.
/// </summary>
[Parameter(Position = 1,
ParameterSetName = StartJobCommand.DefinitionNameParameterSet)]
[ValidateNotNullOrEmpty]
public string DefinitionPath
{
get { return _definitionPath; }
set { _definitionPath = value; }
}
private string _definitionPath;
/// <summary>
/// Job SourceAdapter type for this job definition.
/// </summary>
[Parameter(Position = 2,
ParameterSetName = StartJobCommand.DefinitionNameParameterSet)]
[ValidateNotNullOrEmpty]
[SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods")]
public string Type
{
get { return _definitionType; }
set { _definitionType = value; }
}
private string _definitionType;
/// <summary>
/// Friendly name for this job object.
/// </summary>
[Parameter(ValueFromPipelineByPropertyName = true,
ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ValueFromPipelineByPropertyName = true,
ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ValueFromPipelineByPropertyName = true,
ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
public virtual string Name
{
get
{
return _name;
}
set
{
if (!string.IsNullOrEmpty(value))
{
_name = value;
}
}
}
private string _name;
/// <summary>
/// Command to execute specified as a string. This can be a single
/// cmdlet, an expression or anything that can be internally
/// converted into a ScriptBlock.
/// </summary>
/// <remarks>This is used in the in process case with a
/// "ValueFromPipelineProperty" enabled in order to maintain
/// compatibility with v1.0</remarks>
[Parameter(Position = 0,
Mandatory = true,
ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[ValidateTrustedData]
[Alias("Command")]
public override ScriptBlock ScriptBlock
{
get
{
return base.ScriptBlock;
}
set
{
base.ScriptBlock = value;
}
}
#region Suppress PSRemotingBaseCmdlet parameters
// suppress all the parameters from PSRemotingBaseCmdlet
// which should not be part of Start-PSJob
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override PSSession[] Session
{
get
{
return null;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string[] ComputerName
{
get
{
return null;
}
}
/// <summary>
/// Not used for OutOfProc jobs. Suppressing this parameter.
/// </summary>
public override SwitchParameter EnableNetworkAccess
{
get { return false; }
}
/// <summary>
/// Suppress SSHTransport.
/// </summary>
public override SwitchParameter SSHTransport
{
get { return false; }
}
/// <summary>
/// Suppress SSHConnection.
/// </summary>
public override Hashtable[] SSHConnection
{
get { return null; }
}
/// <summary>
/// Suppress UserName.
/// </summary>
public override string UserName
{
get { return null; }
}
/// <summary>
/// Suppress KeyFilePath.
/// </summary>
public override string KeyFilePath
{
get { return null; }
}
/// <summary>
/// Suppress HostName.
/// </summary>
public override string[] HostName
{
get { return null; }
}
/// <summary>
/// Suppress Subsystem.
/// </summary>
public override string Subsystem
{
get { return null; }
}
#endregion
/// <summary>
/// Credential to use for this job.
/// </summary>
[Parameter(ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[Credential()]
public override PSCredential Credential
{
get
{
return base.Credential;
}
set
{
base.Credential = value;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override int Port
{
get
{
return 0;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override SwitchParameter UseSSL
{
get
{
return false;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string ConfigurationName
{
get
{
return base.ConfigurationName;
}
set
{
base.ConfigurationName = value;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override Int32 ThrottleLimit
{
get
{
return 0;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string ApplicationName
{
get
{
return null;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override Uri[] ConnectionUri
{
get
{
return null;
}
}
/// <summary>
/// Filepath to execute as a script.
/// </summary>
[Parameter(
Position = 0,
Mandatory = true,
ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[ValidateTrustedData]
public override string FilePath
{
get
{
return base.FilePath;
}
set
{
base.FilePath = value;
}
}
/// <summary>
/// Literal Filepath to execute as a script.
/// </summary>
[Parameter(
Mandatory = true,
ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[ValidateTrustedData]
[Alias("PSPath", "LP")]
public string LiteralPath
{
get
{
return base.FilePath;
}
set
{
base.FilePath = value;
base.IsLiteralPath = true;
}
}
/// <summary>
/// Use basic authentication to authenticate the user.
/// </summary>
[Parameter(ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
public override AuthenticationMechanism Authentication
{
get
{
return base.Authentication;
}
set
{
base.Authentication = value;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string CertificateThumbprint
{
get
{
return base.CertificateThumbprint;
}
set
{
base.CertificateThumbprint = value;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override SwitchParameter AllowRedirection
{
get
{
return false;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override Guid[] VMId
{
get
{
return null;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string[] VMName
{
get
{
return null;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override string[] ContainerId
{
get
{
return null;
}
}
/// <summary>
/// Overriding to suppress this parameter.
/// </summary>
public override SwitchParameter RunAsAdministrator
{
get
{
return false;
}
}
/// <summary>
/// Extended Session Options for controlling the session creation. Use
/// "New-WSManSessionOption" cmdlet to supply value for this parameter.
/// </summary>
/// <remarks>
/// This is not declared as a Parameter for Start-PSJob as this is not
/// used for background jobs.
/// </remarks>
public override PSSessionOption SessionOption
{
get
{
return base.SessionOption;
}
set
{
base.SessionOption = value;
}
}
/// <summary>
/// Script that is used to initialize the background job.
/// </summary>
[Parameter(Position = 1,
ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(Position = 1,
ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(Position = 1,
ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[ValidateTrustedData]
public virtual ScriptBlock InitializationScript
{
get { return _initScript; }
set { _initScript = value; }
}
private ScriptBlock _initScript;
/// <summary>
/// Gets or sets an initial working directory for the powershell background job.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
public string WorkingDirectory { get; set; }
/// <summary>
/// Launches the background job as a 32-bit process. This can be used on
/// 64-bit systems to launch a 32-bit wow process for the background job.
/// </summary>
[Parameter(ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
public virtual SwitchParameter RunAs32 { get; set; }
/// <summary>
/// Powershell Version to execute the background job.
/// </summary>
[Parameter(ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[ValidateNotNullOrEmpty]
public virtual Version PSVersion
{
get
{
return _psVersion;
}
set
{
RemotingCommandUtil.CheckPSVersion(value);
// Check if specified version of PowerShell is installed
RemotingCommandUtil.CheckIfPowerShellVersionIsInstalled(value);
_psVersion = value;
}
}
private Version _psVersion;
/// <summary>
/// InputObject.
/// </summary>
[Parameter(ValueFromPipeline = true,
ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ValueFromPipeline = true,
ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ValueFromPipeline = true,
ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[ValidateTrustedData]
public override PSObject InputObject
{
get { return base.InputObject; }
set { base.InputObject = value; }
}
/// <summary>
/// ArgumentList.
/// </summary>
[Parameter(ParameterSetName = StartJobCommand.FilePathComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.ComputerNameParameterSet)]
[Parameter(ParameterSetName = StartJobCommand.LiteralFilePathComputerNameParameterSet)]
[ValidateTrustedData]
[Alias("Args")]
[SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays")]
public override object[] ArgumentList
{
get { return base.ArgumentList; }
set { base.ArgumentList = value; }
}
#endregion Parameters
#region Overrides
/// <summary>
/// 1. Set the throttling limit and reset operations complete
/// 2. Create helper objects
/// 3. For async case, write the async result object down the
/// pipeline.
/// </summary>
protected override void BeginProcessing()
{
if (!File.Exists(PowerShellProcessInstance.PwshExePath))
{
// The pwsh executable file is not found under $PSHOME.
// This means that PowerShell is currently being hosted in another application,
// and 'Start-Job' is not supported by design in that scenario.
string message = StringUtil.Format(
RemotingErrorIdStrings.IPCPwshExecutableNotFound,
PowerShellProcessInstance.PwshExePath);
var errorRecord = new ErrorRecord(
new PSNotSupportedException(message),
"IPCPwshExecutableNotFound",
ErrorCategory.NotInstalled,
PowerShellProcessInstance.PwshExePath);
ThrowTerminatingError(errorRecord);
}
if (RunAs32.IsPresent && Environment.Is64BitProcess)
{
// We cannot start a 32-bit 'pwsh' process from a 64-bit 'pwsh' installation.
string message = RemotingErrorIdStrings.RunAs32NotSupported;
var errorRecord = new ErrorRecord(
new PSNotSupportedException(message),
"RunAs32NotSupported",
ErrorCategory.InvalidOperation,
targetObject: null);
ThrowTerminatingError(errorRecord);
}
if (WorkingDirectory != null && !Directory.Exists(WorkingDirectory))
{
string message = StringUtil.Format(RemotingErrorIdStrings.StartJobWorkingDirectoryNotFound, WorkingDirectory);
var errorRecord = new ErrorRecord(
new DirectoryNotFoundException(message),
"DirectoryNotFoundException",
ErrorCategory.InvalidOperation,
targetObject: null);
ThrowTerminatingError(errorRecord);
}
if (WorkingDirectory == null)
{
try
{
WorkingDirectory = SessionState.Internal.CurrentLocation.Path;
}
catch (PSInvalidOperationException)
{
}
}
CommandDiscovery.AutoloadModulesWithJobSourceAdapters(this.Context, this.CommandOrigin);
if (ParameterSetName == DefinitionNameParameterSet)
{
return;
}
// since jobs no more depend on WinRM
// we will have to skip the check for the same
SkipWinRMCheck = true;
base.BeginProcessing();
}
/// <summary>
/// Create a throttle operation using NewProcessConnectionInfo
/// ie., Out-Of-Process runspace.
/// </summary>
protected override void CreateHelpersForSpecifiedComputerNames()
{
// If we're in ConstrainedLanguage mode and the system is in lockdown mode,
// ensure that they haven't specified a ScriptBlock or InitScript - as
// we can't protect that boundary
if ((Context.LanguageMode == PSLanguageMode.ConstrainedLanguage) &&
(SystemPolicy.GetSystemLockdownPolicy() != SystemEnforcementMode.Enforce) &&
((ScriptBlock != null) || (InitializationScript != null)))
{
ThrowTerminatingError(
new ErrorRecord(
new PSNotSupportedException(RemotingErrorIdStrings.CannotStartJobInconsistentLanguageMode),
"CannotStartJobInconsistentLanguageMode",
ErrorCategory.PermissionDenied,
Context.LanguageMode));
}
NewProcessConnectionInfo connectionInfo = new NewProcessConnectionInfo(this.Credential);
connectionInfo.InitializationScript = _initScript;
connectionInfo.AuthenticationMechanism = this.Authentication;
connectionInfo.PSVersion = this.PSVersion;
connectionInfo.WorkingDirectory = this.WorkingDirectory;
RemoteRunspace remoteRunspace = (RemoteRunspace)RunspaceFactory.CreateRunspace(connectionInfo, this.Host,
Utils.GetTypeTableFromExecutionContextTLS());
remoteRunspace.Events.ReceivedEvents.PSEventReceived += OnRunspacePSEventReceived;
Pipeline pipeline = CreatePipeline(remoteRunspace);
IThrottleOperation operation =
new ExecutionCmdletHelperComputerName(remoteRunspace, pipeline);
Operations.Add(operation);
}
/// <summary>
/// The expression will be executed in the remote computer if a
/// remote runspace parameter or computer name is specified. If
/// none other than command parameter is specified, then it
/// just executes the command locally without creating a new
/// remote runspace object.
/// </summary>
protected override void ProcessRecord()
{
if (ParameterSetName == DefinitionNameParameterSet)
{
// Get the Job2 object from the Job Manager for this definition name and start the job.
string resolvedPath = null;
if (!string.IsNullOrEmpty(_definitionPath))
{
ProviderInfo provider = null;
System.Collections.ObjectModel.Collection<string> paths =
this.Context.SessionState.Path.GetResolvedProviderPathFromPSPath(_definitionPath, out provider);
// Only file system paths are allowed.
if (!provider.NameEquals(this.Context.ProviderNames.FileSystem))
{
string message = StringUtil.Format(RemotingErrorIdStrings.StartJobDefinitionPathInvalidNotFSProvider,
_definitionName,
_definitionPath,
provider.FullName);
WriteError(new ErrorRecord(new RuntimeException(message), "StartJobFromDefinitionNamePathInvalidNotFileSystemProvider",
ErrorCategory.InvalidArgument, null));
return;
}
// Only a single file path is allowed.
if (paths.Count != 1)
{
string message = StringUtil.Format(RemotingErrorIdStrings.StartJobDefinitionPathInvalidNotSingle,
_definitionName,
_definitionPath);
WriteError(new ErrorRecord(new RuntimeException(message), "StartJobFromDefinitionNamePathInvalidNotSingle",
ErrorCategory.InvalidArgument, null));
return;
}
resolvedPath = paths[0];
}
List<Job2> jobs = JobManager.GetJobToStart(_definitionName, resolvedPath, _definitionType, this, false);
if (jobs.Count == 0)
{
string message = (_definitionType != null) ?
StringUtil.Format(RemotingErrorIdStrings.StartJobDefinitionNotFound2, _definitionType, _definitionName) :
StringUtil.Format(RemotingErrorIdStrings.StartJobDefinitionNotFound1, _definitionName);
WriteError(new ErrorRecord(new RuntimeException(message), "StartJobFromDefinitionNameNotFound",
ErrorCategory.ObjectNotFound, null));
return;
}
if (jobs.Count > 1)
{
string message = StringUtil.Format(RemotingErrorIdStrings.StartJobManyDefNameMatches, _definitionName);
WriteError(new ErrorRecord(new RuntimeException(message), "StartJobFromDefinitionNameMoreThanOneMatch",
ErrorCategory.InvalidResult, null));
return;
}
// Start job.
Job2 job = jobs[0];
job.StartJob();
// Write job object to host.
WriteObject(job);
return;
}
if (_firstProcessRecord)
{
_firstProcessRecord = false;
PSRemotingJob job = new PSRemotingJob(ResolvedComputerNames, Operations,
ScriptBlock.ToString(), ThrottleLimit, _name);
job.PSJobTypeName = s_startJobType;
this.JobRepository.Add(job);
WriteObject(job);
}
// inject input
if (InputObject != AutomationNull.Value)
{
foreach (IThrottleOperation operation in Operations)
{
ExecutionCmdletHelper helper = (ExecutionCmdletHelper)operation;
helper.Pipeline.Input.Write(InputObject);
}
}
}
private bool _firstProcessRecord = true;
/// <summary>
/// InvokeAsync would have been called in ProcessRecord. Wait here
/// for all the results to become available.
/// </summary>
protected override void EndProcessing()
{
// close the input stream on all the pipelines
CloseAllInputStreams();
}
#endregion Overrides
#region IDisposable Overrides
/// <summary>
/// Dispose the cmdlet.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Internal dispose method which does the actual disposing.
/// </summary>
/// <param name="disposing">Whether called from dispose or finalize.</param>
private void Dispose(bool disposing)
{
if (disposing)
{
CloseAllInputStreams();
}
}
#endregion IDisposable Overrides
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Runtime.ExceptionServices;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Input;
using System.Windows.Threading;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Intellisense;
using Microsoft.PythonTools.Interpreter;
namespace Microsoft.PythonTools.EnvironmentsList {
internal sealed partial class PipExtension : UserControl, ICanFocus {
public static readonly ICommand InstallPackage = new RoutedCommand();
public static readonly ICommand UpgradePackage = new RoutedCommand();
public static readonly ICommand UninstallPackage = new RoutedCommand();
public static readonly ICommand InstallPip = new RoutedCommand();
private readonly PipExtensionProvider _provider;
public PipExtension(PipExtensionProvider provider) {
_provider = provider;
DataContextChanged += PackageExtension_DataContextChanged;
InitializeComponent();
}
void ICanFocus.Focus() {
Dispatcher.BeginInvoke((Action)(() => {
try {
Focus();
if (SearchQueryText.IsVisible) {
Keyboard.Focus(SearchQueryText);
} else {
SearchQueryText.IsVisibleChanged += SearchQueryText_IsVisibleChanged;
}
} catch (Exception ex) when (!ex.IsCriticalException()) {
}
}), DispatcherPriority.Loaded);
}
private void SearchQueryText_IsVisibleChanged(object sender, DependencyPropertyChangedEventArgs e) {
SearchQueryText.IsVisibleChanged -= SearchQueryText_IsVisibleChanged;
Keyboard.Focus(SearchQueryText);
}
private void PackageExtension_DataContextChanged(object sender, DependencyPropertyChangedEventArgs e) {
var view = e.NewValue as EnvironmentView;
if (view != null) {
var current = Subcontext.DataContext as PipEnvironmentView;
if (current == null || current.EnvironmentView != view) {
if (current != null) {
current.Dispose();
}
Subcontext.DataContext = new PipEnvironmentView(view, _provider);
}
}
}
private void UninstallPackage_CanExecute(object sender, CanExecuteRoutedEventArgs e) {
e.CanExecute = _provider.CanExecute && e.Parameter is PipPackageView;
e.Handled = true;
}
private async void UninstallPackage_Executed(object sender, ExecutedRoutedEventArgs e) {
try {
var view = (PipPackageView)e.Parameter;
await _provider.UninstallPackage(view.Package);
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(this, ExceptionDispatchInfo.Capture(ex));
}
}
private void UpgradePackage_CanExecute(object sender, CanExecuteRoutedEventArgs e) {
e.Handled = true;
if (!_provider.CanExecute) {
e.CanExecute = false;
return;
}
var view = e.Parameter as PipPackageView;
if (view == null) {
e.CanExecute = false;
return;
}
e.CanExecute = !view.UpgradeVersion.IsEmpty && view.UpgradeVersion.CompareTo(view.Version) > 0;
}
private async void UpgradePackage_Executed(object sender, ExecutedRoutedEventArgs e) {
try {
var view = (PipPackageView)e.Parameter;
// Construct a PackageSpec with the upgraded version.
await _provider.InstallPackage(new PackageSpec(view.Package.Name, view.UpgradeVersion));
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(this, ExceptionDispatchInfo.Capture(ex));
}
}
private void InstallPackage_CanExecute(object sender, CanExecuteRoutedEventArgs e) {
e.CanExecute = _provider.CanExecute && !string.IsNullOrEmpty(e.Parameter as string);
e.Handled = true;
}
private async void InstallPackage_Executed(object sender, ExecutedRoutedEventArgs e) {
try {
await _provider.InstallPackage(new PackageSpec((string)e.Parameter));
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(this, ExceptionDispatchInfo.Capture(ex));
}
}
private void InstallPip_CanExecute(object sender, CanExecuteRoutedEventArgs e) {
e.CanExecute = _provider.CanExecute;
e.Handled = true;
}
private async void InstallPip_Executed(object sender, ExecutedRoutedEventArgs e) {
try {
await _provider.InstallPip();
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(this, ExceptionDispatchInfo.Capture(ex));
}
}
private void ForwardMouseWheel(object sender, MouseWheelEventArgs e) {
PackagesList.RaiseEvent(new MouseWheelEventArgs(
e.MouseDevice,
e.Timestamp,
e.Delta
) { RoutedEvent = UIElement.MouseWheelEvent });
e.Handled = true;
}
private void Delete_CanExecute(object sender, CanExecuteRoutedEventArgs e) {
var tb = e.OriginalSource as TextBox;
if (tb != null) {
e.Handled = true;
e.CanExecute = !string.IsNullOrEmpty(tb.Text);
return;
}
}
private void Delete_Executed(object sender, ExecutedRoutedEventArgs e) {
var tb = e.OriginalSource as TextBox;
if (tb != null) {
tb.Clear();
e.Handled = true;
return;
}
}
}
sealed class PipEnvironmentView : DependencyObject, IDisposable {
private readonly EnvironmentView _view;
private readonly ObservableCollection<PipPackageView> _installed;
private readonly List<PackageResultView> _installable;
private readonly ObservableCollection<PackageResultView> _installableFiltered;
private CollectionViewSource _installedView;
private CollectionViewSource _installableView;
private readonly Timer _installableViewRefreshTimer;
internal readonly PipExtensionProvider _provider;
private readonly InstallPackageView _installCommandView;
private readonly FuzzyStringMatcher _matcher;
internal PipEnvironmentView(
EnvironmentView view,
PipExtensionProvider provider
) {
_view = view;
_provider = provider;
_provider.OperationStarted += PipExtensionProvider_UpdateStarted;
_provider.OperationFinished += PipExtensionProvider_UpdateComplete;
_provider.IsPipInstalledChanged += PipExtensionProvider_IsPipInstalledChanged;
_provider.InstalledPackagesChanged += PipExtensionProvider_InstalledPackagesChanged;
_installCommandView = new InstallPackageView(this);
_matcher = new FuzzyStringMatcher(FuzzyMatchMode.FuzzyIgnoreCase);
_installed = new ObservableCollection<PipPackageView>();
_installedView = new CollectionViewSource { Source = _installed };
_installedView.Filter += InstalledView_Filter;
_installedView.View.CurrentChanged += InstalledView_CurrentChanged;
_installable = new List<PackageResultView>();
_installableFiltered = new ObservableCollection<PackageResultView>();
_installableView = new CollectionViewSource { Source = _installableFiltered };
_installableView.View.CurrentChanged += InstallableView_CurrentChanged;
_installableViewRefreshTimer = new Timer(InstallablePackages_Refresh);
FinishInitialization();
}
private async void PipExtensionProvider_IsPipInstalledChanged(object sender, EventArgs e) {
await Dispatcher.InvokeAsync(() => { IsPipInstalled = _provider.IsPipInstalled ?? true; });
}
private void InstalledView_CurrentChanged(object sender, EventArgs e) {
if (_installedView.View.CurrentItem != null) {
_installableView.View.MoveCurrentTo(null);
}
}
private void InstallableView_CurrentChanged(object sender, EventArgs e) {
if (_installableView.View.CurrentItem != null) {
_installedView.View.MoveCurrentTo(null);
}
}
private async void FinishInitialization() {
try {
await RefreshPackages();
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
}
public void Dispose() {
_provider.OperationStarted -= PipExtensionProvider_UpdateStarted;
_provider.OperationFinished -= PipExtensionProvider_UpdateComplete;
_provider.IsPipInstalledChanged -= PipExtensionProvider_IsPipInstalledChanged;
_provider.InstalledPackagesChanged -= PipExtensionProvider_InstalledPackagesChanged;
_installableViewRefreshTimer.Dispose();
}
public EnvironmentView EnvironmentView {
get { return _view; }
}
public InstallPackageView InstallCommand {
get { return _installCommandView; }
}
private async void PipExtensionProvider_UpdateStarted(object sender, EventArgs e) {
try {
await Dispatcher.InvokeAsync(() => { IsListRefreshing = true; });
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
}
private async void PipExtensionProvider_UpdateComplete(object sender, EventArgs e) {
try {
await RefreshPackages();
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
}
private async void PipExtensionProvider_InstalledPackagesChanged(object sender, EventArgs e) {
try {
await RefreshPackages();
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
}
public bool IsPipInstalled {
get { return (bool)GetValue(IsPipInstalledProperty); }
private set { SetValue(IsPipInstalledPropertyKey, value); }
}
private static readonly DependencyPropertyKey IsPipInstalledPropertyKey = DependencyProperty.RegisterReadOnly(
"IsPipInstalled",
typeof(bool),
typeof(PipEnvironmentView),
new PropertyMetadata(true)
);
public static readonly DependencyProperty IsPipInstalledProperty = IsPipInstalledPropertyKey.DependencyProperty;
public string SearchQuery {
get { return (string)GetValue(SearchQueryProperty); }
set { SetValue(SearchQueryProperty, value); }
}
public static readonly DependencyProperty SearchQueryProperty = DependencyProperty.Register(
"SearchQuery",
typeof(string),
typeof(PipEnvironmentView),
new PropertyMetadata(Filter_Changed)
);
private static void Filter_Changed(DependencyObject d, DependencyPropertyChangedEventArgs e) {
var view = d as PipEnvironmentView;
if (view != null) {
try {
view._installedView.View.Refresh();
view._installableViewRefreshTimer.Change(500, Timeout.Infinite);
} catch (ObjectDisposedException) {
}
}
}
private async void InstallablePackages_Refresh(object state) {
string query = null;
try {
query = await Dispatcher.InvokeAsync(() => SearchQuery);
} catch (OperationCanceledException) {
return;
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
PackageResultView[] installable = null;
lock (_installable) {
if (_installable.Any() && !string.IsNullOrEmpty(query)) {
installable = _installable
.Select(p => Tuple.Create(_matcher.GetSortKey(p.Package.PackageSpec, query), p))
.Where(t => _matcher.IsCandidateMatch(t.Item2.Package.PackageSpec, query, t.Item1))
.OrderByDescending(t => t.Item1)
.Select(t => t.Item2)
.Take(20)
.ToArray();
}
}
try {
await Dispatcher.InvokeAsync(() => {
if (installable != null && installable.Any()) {
_installableFiltered.Merge(installable, PackageViewComparer.Instance, PackageViewComparer.Instance);
} else {
_installableFiltered.Clear();
}
_installableView.View.Refresh();
});
} catch (OperationCanceledException) {
} catch (Exception ex) when (!ex.IsCriticalException()) {
ToolWindow.SendUnhandledException(_provider.WpfObject, ExceptionDispatchInfo.Capture(ex));
}
}
public ICollectionView InstalledPackages {
get {
if (EnvironmentView == null || EnvironmentView.Factory == null) {
return null;
}
return _installedView.View;
}
}
public ICollectionView InstallablePackages {
get {
if (EnvironmentView == null || EnvironmentView.Factory == null) {
return null;
}
return _installableView.View;
}
}
private void InstalledView_Filter(object sender, FilterEventArgs e) {
PipPackageView package;
PackageResultView result;
var query = SearchQuery;
var matcher = string.IsNullOrEmpty(query) ? null : _matcher;
if ((package = e.Item as PipPackageView) != null) {
e.Accepted = matcher == null || matcher.IsCandidateMatch(package.PackageSpec, query);
} else if (e.Item is InstallPackageView) {
e.Accepted = matcher != null;
} else if ((result = e.Item as PackageResultView) != null) {
e.Accepted = matcher != null && matcher.IsCandidateMatch(result.Package.PackageSpec, query);
}
}
private async Task RefreshPackages() {
bool isPipInstalled = true;
await Dispatcher.InvokeAsync(() => {
isPipInstalled = IsPipInstalled;
IsListRefreshing = true;
CommandManager.InvalidateRequerySuggested();
});
try {
if (isPipInstalled) {
await Task.WhenAll(
RefreshInstalledPackages(),
RefreshInstallablePackages()
);
}
} catch (OperationCanceledException) {
// User has probably closed the window or is quitting VS
} finally {
Dispatcher.Invoke(() => {
IsListRefreshing = false;
CommandManager.InvalidateRequerySuggested();
});
}
}
private async Task RefreshInstalledPackages() {
var installed = await _provider.GetInstalledPackagesAsync();
if (installed == null || !installed.Any()) {
return;
}
await Dispatcher.InvokeAsync(() => {
lock (_installed) {
_installed.Merge(installed, PackageViewComparer.Instance, PackageViewComparer.Instance);
}
});
}
private async Task RefreshInstallablePackages() {
var installable = await _provider.GetAvailablePackagesAsync();
lock (_installable) {
_installable.Clear();
_installable.AddRange(installable.Select(pv => new PackageResultView(this, pv)));
}
try {
_installableViewRefreshTimer.Change(100, Timeout.Infinite);
} catch (ObjectDisposedException) {
}
}
public bool IsListRefreshing {
get { return (bool)GetValue(IsListRefreshingProperty); }
private set { SetValue(IsListRefreshingPropertyKey, value); }
}
private static readonly DependencyPropertyKey IsListRefreshingPropertyKey = DependencyProperty.RegisterReadOnly(
"IsListRefreshing",
typeof(bool),
typeof(PipEnvironmentView),
new PropertyMetadata(true, Filter_Changed)
);
public static readonly DependencyProperty IsListRefreshingProperty =
IsListRefreshingPropertyKey.DependencyProperty;
}
class PackageViewComparer :
IEqualityComparer<PipPackageView>,
IComparer<PipPackageView>,
IEqualityComparer<PackageResultView>,
IComparer<PackageResultView> {
public static readonly PackageViewComparer Instance = new PackageViewComparer();
public bool Equals(PipPackageView x, PipPackageView y) {
return StringComparer.OrdinalIgnoreCase.Equals(x.PackageSpec, y.PackageSpec);
}
public int GetHashCode(PipPackageView obj) {
return StringComparer.OrdinalIgnoreCase.GetHashCode(obj.PackageSpec);
}
public int Compare(PipPackageView x, PipPackageView y) {
return StringComparer.OrdinalIgnoreCase.Compare(x.PackageSpec, y.PackageSpec);
}
public bool Equals(PackageResultView x, PackageResultView y) {
return Equals(x.Package, y.Package);
}
public int GetHashCode(PackageResultView obj) {
return StringComparer.OrdinalIgnoreCase.GetHashCode(
obj.IndexName + ":" + obj.Package.PackageSpec
);
}
public int Compare(PackageResultView x, PackageResultView y) {
return Compare(x.Package, y.Package);
}
}
class InstallPackageView {
public InstallPackageView(PipEnvironmentView view) {
View = view;
}
public PipEnvironmentView View { get; }
public string IndexName => View._provider.IndexName;
}
class PackageResultView : INotifyPropertyChanged {
public PackageResultView(PipEnvironmentView view, PipPackageView package) {
View = view;
Package = package;
Package.PropertyChanged += Package_PropertyChanged;
}
private void Package_PropertyChanged(object sender, PropertyChangedEventArgs e) {
switch (e.PropertyName) {
case "Description":
case "DisplayName":
PropertyChanged?.Invoke(this, e);
break;
}
}
public event PropertyChangedEventHandler PropertyChanged;
public PipEnvironmentView View { get; }
public PipPackageView Package { get; }
public string PackageSpec => Package.PackageSpec;
public string IndexName => View._provider.IndexName;
public string DisplayName => Package.DisplayName;
public string Description => Package.Description;
}
class UpgradeMessageConverter : IMultiValueConverter {
public object Convert(object[] values, Type targetType, object parameter, CultureInfo culture) {
if (values.Length != 2) {
return Strings.UpgradeMessage;
}
var name = (string)values[0];
var version = (PackageVersion)values[1];
return Strings.UpgradeMessage_Package.FormatUI(name, version);
}
public object[] ConvertBack(object value, Type[] targetTypes, object parameter, CultureInfo culture) {
throw new NotImplementedException();
}
}
[ValueConversion(typeof(PipPackageView), typeof(string))]
class UninstallMessageConverter : IValueConverter {
public object Convert(object value, Type targetType, object parameter, CultureInfo culture) {
var p = value as PipPackageView;
if (p == null) {
return Strings.UninstallMessage;
}
return Strings.UninstallMessage_Package.FormatUI(p.Name);
}
public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture) {
throw new NotImplementedException();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Moq;
using NuGet.Test.Mocks;
using Xunit;
namespace NuGet.Test
{
public class PackageWalkerTest
{
[Fact]
public void ResolvingDependencyForUpdateWithConflictingDependents()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A 1.0 -> B [1.0]
IPackage A10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
}, content: new[] { "a1" });
// A 2.0 -> B (any version)
IPackage A20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
}, content: new[] { "a2" });
IPackage B10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "b1" });
IPackage B101 = PackageUtility.CreatePackage("B", "1.0.1", content: new[] { "b101" });
IPackage B20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "a2" });
localRepository.Add(A10);
localRepository.Add(B10);
sourceRepository.AddPackage(A10);
sourceRepository.AddPackage(A20);
sourceRepository.AddPackage(B10);
sourceRepository.AddPackage(B101);
sourceRepository.AddPackage(B20);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false) { AcceptedTargets = PackageTargets.Project };
}
[Fact]
public void ReverseDependencyWalkerUsersVersionAndIdToDetermineVisited()
{
// Act
var packages = resolver.ResolveOperations(B101).ToList();
// Assert
Assert.Equal(4, packages.Count);
AssertOperation("A", "1.0", PackageAction.Uninstall, packages[0]);
AssertOperation("B", "1.0", PackageAction.Uninstall, packages[1]);
AssertOperation("A", "2.0", PackageAction.Install, packages[2]);
AssertOperation("B", "1.0.1", PackageAction.Install, packages[3]);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false);
var operations = resolver.ResolveOperations(packageA2).ToList();
// Arrange
// A 1.0 -> B 1.0
IPackage packageA1 = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.0]")
});
// A 2.0 -> B 2.0
IPackage packageA2 = PackageUtility.CreatePackage("A",
"2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[2.0]")
});
IPackage packageB1 = PackageUtility.CreatePackage("B", "1.0");
IPackage packageB2 = PackageUtility.CreatePackage("B", "2.0");
var mockRepository = new MockPackageRepository();
mockRepository.AddPackage(packageA1);
mockRepository.AddPackage(packageA2);
mockRepository.AddPackage(packageB1);
mockRepository.AddPackage(packageB2);
// Act
IDependentsResolver lookup = new DependentsWalker(mockRepository);
// Assert
Assert.Equal(0, lookup.GetDependents(packageA1).Count());
Assert.Equal(0, lookup.GetDependents(packageA2).Count());
Assert.Equal(1, lookup.GetDependents(packageB1).Count());
Assert.Equal(1, lookup.GetDependents(packageB2).Count());
}
[Fact]
public void ResolveDependenciesForInstallPackageWithUnknownDependencyThrows()
{
// Arrange
IPackage package = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
new MockPackageRepository(),
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(package), "Unable to resolve dependency 'B'.");
}
[Fact]
public void ResolveDependenciesForInstallPackageResolvesDependencyUsingDependencyProvider()
{
// Arrange
IPackage packageA = PackageUtility.CreatePackage("A",
"1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B");
var repository = new Mock<PackageRepositoryBase>();
repository.Setup(c => c.GetPackages()).Returns(new[] { packageA }.AsQueryable());
var dependencyProvider = repository.As<IDependencyResolver>();
dependencyProvider.Setup(c => c.ResolveDependency(It.Is<PackageDependency>(p => p.Id == "B"), It.IsAny<IPackageConstraintProvider>(), false, true, DependencyVersion.Lowest))
.Returns(packageB).Verifiable();
var localRepository = new MockPackageRepository();
IPackageOperationResolver resolver = new InstallWalker(localRepository,
repository.Object,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(2, operations.Count);
Assert.Equal(PackageAction.Install, operations.First().Action);
Assert.Equal(packageB, operations.First().Package);
Assert.Equal(PackageAction.Install, operations.Last().Action);
Assert.Equal(packageA, operations.Last().Package);
dependencyProvider.Verify();
}
[Fact]
public void ResolveDependenciesForInstallPackageResolvesDependencyWithConstraintsUsingDependencyResolver()
{
// Arrange
var packageDependency = new PackageDependency("B", new VersionSpec { MinVersion = new SemanticVersion("1.1") });
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> { packageDependency });
IPackage packageB12 = PackageUtility.CreatePackage("B", "1.2");
var repository = new Mock<PackageRepositoryBase>(MockBehavior.Strict);
repository.Setup(c => c.GetPackages()).Returns(new[] { packageA }.AsQueryable());
var dependencyProvider = repository.As<IDependencyResolver>();
dependencyProvider.Setup(c => c.ResolveDependency(packageDependency, It.IsAny<IPackageConstraintProvider>(), false, true, DependencyVersion.Lowest))
.Returns(packageB12).Verifiable();
var localRepository = new MockPackageRepository();
IPackageOperationResolver resolver = new InstallWalker(localRepository,
repository.Object,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(2, operations.Count);
Assert.Equal(PackageAction.Install, operations.First().Action);
Assert.Equal(packageB12, operations.First().Package);
Assert.Equal(PackageAction.Install, operations.Last().Action);
Assert.Equal(packageA, operations.Last().Package);
dependencyProvider.Verify();
}
[Fact]
public void ResolveDependenciesForInstallCircularReferenceThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("A")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Circular dependency detected 'A 1.0 => B 1.0 => A 1.0'.");
}
[Fact]
public void ResolveDependenciesForInstallDiamondDependencyGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
// A
// / \
// B C
// \ /
// D
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var packages = resolver.ResolveOperations(packageA).ToList();
// Assert
var dict = packages.ToDictionary(p => p.Package.Id);
Assert.Equal(4, packages.Count);
Assert.NotNull(dict["A"]);
Assert.NotNull(dict["B"]);
Assert.NotNull(dict["C"]);
Assert.NotNull(dict["D"]);
}
[Fact]
public void ResolveDependenciesForInstallDiamondDependencyGraphWithDifferntVersionOfSamePackage()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D >= 1, E >= 2]
// C -> [D >= 2, E >= 1]
// A
// / \
// B C
// | \ | \
// D1 E2 D2 E1
IPackage packageA = PackageUtility.CreateProjectLevelPackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreateProjectLevelPackage("B", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("D", "1.0"),
PackageDependency.CreateDependency("E", "2.0")
});
IPackage packageC = PackageUtility.CreateProjectLevelPackage("C", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("D", "2.0"),
PackageDependency.CreateDependency("E", "1.0")
});
IPackage packageD10 = PackageUtility.CreateProjectLevelPackage("D", "1.0");
IPackage packageD20 = PackageUtility.CreateProjectLevelPackage("D", "2.0");
IPackage packageE10 = PackageUtility.CreateProjectLevelPackage("E", "1.0");
IPackage packageE20 = PackageUtility.CreateProjectLevelPackage("E", "2.0");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD20);
sourceRepository.AddPackage(packageD10);
sourceRepository.AddPackage(packageE20);
sourceRepository.AddPackage(packageE10);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var operations = resolver.ResolveOperations(packageA).ToList();
var projectOperations = resolver.ResolveOperations(packageA).ToList();
// Assert
Assert.Equal(5, operations.Count);
Assert.Equal("E", operations[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), operations[0].Package.Version);
Assert.Equal("B", operations[1].Package.Id);
Assert.Equal("D", operations[2].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), operations[2].Package.Version);
Assert.Equal("C", operations[3].Package.Id);
Assert.Equal("A", operations[4].Package.Id);
Assert.Equal(5, projectOperations.Count);
Assert.Equal("E", projectOperations[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), projectOperations[0].Package.Version);
Assert.Equal("B", projectOperations[1].Package.Id);
Assert.Equal("D", projectOperations[2].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), projectOperations[2].Package.Version);
Assert.Equal("C", projectOperations[3].Package.Id);
Assert.Equal("A", projectOperations[4].Package.Id);
}
[Fact]
public void UninstallWalkerIgnoresMissingDependencies()
{
// Arrange
var localRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(3, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["C"]);
Assert.NotNull(packages["D"]);
}
[Fact]
public void ResolveDependenciesForUninstallDiamondDependencyGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
// A -> [B, C]
// B -> [D]
// C -> [D]
// A
// / \
// B C
// \ /
// D
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("D")
});
IPackage packageD = PackageUtility.CreatePackage("D", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(4, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
Assert.NotNull(packages["C"]);
Assert.NotNull(packages["D"]);
}
[Fact]
public void ResolveDependencyForInstallCircularReferenceWithDifferentVersionOfPackageReferenceThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageA15 = PackageUtility.CreatePackage("A", "1.5",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("A", "[1.5]")
});
sourceRepository.AddPackage(packageA10);
sourceRepository.AddPackage(packageA15);
sourceRepository.AddPackage(packageB10);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA10), "Circular dependency detected 'A 1.0 => B 1.0 => A 1.5'.");
}
[Fact]
public void ResolvingDependencyForUpdateThatHasAnUnsatisfiedConstraint()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
var constraintProvider = new Mock<IPackageConstraintProvider>();
constraintProvider.Setup(m => m.GetConstraint("B")).Returns(VersionUtility.ParseVersionSpec("[1.4]"));
constraintProvider.Setup(m => m.Source).Returns("foo");
IPackage A10 = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.5")
});
IPackage A20 = PackageUtility.CreatePackage("A", "2.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0")
});
IPackage B15 = PackageUtility.CreatePackage("B", "1.5");
IPackage B20 = PackageUtility.CreatePackage("B", "2.0");
localRepository.Add(A10);
localRepository.Add(B15);
sourceRepository.AddPackage(A10);
sourceRepository.AddPackage(A20);
sourceRepository.AddPackage(B15);
sourceRepository.AddPackage(B20);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
constraintProvider.Object,
null,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(A20), "Unable to resolve dependency 'B (\u2265 2.0)'.'B' has an additional constraint (= 1.4) defined in foo.");
}
[Fact]
public void ResolveDependencyForInstallPackageWithDependencyThatDoesntMeetMinimumVersionThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.5")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.4");
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Unable to resolve dependency 'B (\u2265 1.5)'.");
}
[Fact]
public void ResolveDependencyForInstallPackageWithDependencyThatDoesntMeetExactVersionThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.5]")
});
sourceRepository.AddPackage(packageA);
IPackage packageB = PackageUtility.CreatePackage("B", "1.4");
sourceRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageA), "Unable to resolve dependency 'B (= 1.5)'.");
}
[Fact]
public void ResolveOperationsForInstallSameDependencyAtDifferentLevelsInGraph()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A1 -> B1, C1
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("C", "1.0")
});
// B1
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
// C1 -> B1, D1
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("D", "1.0")
});
// D1 -> B1
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new InstallWalker(localRepository,
sourceRepository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act & Assert
var packages = resolver.ResolveOperations(packageA).ToList();
Assert.Equal(4, packages.Count);
Assert.Equal("B", packages[0].Package.Id);
Assert.Equal("D", packages[1].Package.Id);
Assert.Equal("C", packages[2].Package.Id);
Assert.Equal("A", packages[3].Package.Id);
}
[Fact]
public void ResolveDependenciesForInstallSameDependencyAtDifferentLevelsInGraphDuringUpdate()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
// A1 -> B1, C1
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
content: new[] { "A1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("C", "1.0")
});
// B1
IPackage packageB = PackageUtility.CreatePackage("B", "1.0", new[] { "B1" });
// C1 -> B1, D1
IPackage packageC = PackageUtility.CreatePackage("C", "1.0",
content: new[] { "C1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0"),
PackageDependency.CreateDependency("D", "1.0")
});
// D1 -> B1
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
content: new[] { "A1" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "1.0")
});
// A2 -> B2, C2
IPackage packageA2 = PackageUtility.CreatePackage("A", "2.0",
content: new[] { "A2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0"),
PackageDependency.CreateDependency("C", "2.0")
});
// B2
IPackage packageB2 = PackageUtility.CreatePackage("B", "2.0", new[] { "B2" });
// C2 -> B2, D2
IPackage packageC2 = PackageUtility.CreatePackage("C", "2.0",
content: new[] { "C2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0"),
PackageDependency.CreateDependency("D", "2.0")
});
// D2 -> B2
IPackage packageD2 = PackageUtility.CreatePackage("D", "2.0",
content: new[] { "D2" },
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "2.0")
});
sourceRepository.AddPackage(packageA);
sourceRepository.AddPackage(packageB);
sourceRepository.AddPackage(packageC);
sourceRepository.AddPackage(packageD);
sourceRepository.AddPackage(packageA2);
sourceRepository.AddPackage(packageB2);
sourceRepository.AddPackage(packageC2);
sourceRepository.AddPackage(packageD2);
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false);
var operations = resolver.ResolveOperations(packageA2).ToList();
Assert.Equal(8, operations.Count);
AssertOperation("A", "1.0", PackageAction.Uninstall, operations[0]);
AssertOperation("C", "1.0", PackageAction.Uninstall, operations[1]);
AssertOperation("D", "1.0", PackageAction.Uninstall, operations[2]);
AssertOperation("B", "1.0", PackageAction.Uninstall, operations[3]);
AssertOperation("B", "2.0", PackageAction.Install, operations[4]);
AssertOperation("D", "2.0", PackageAction.Install, operations[5]);
AssertOperation("C", "2.0", PackageAction.Install, operations[6]);
AssertOperation("A", "2.0", PackageAction.Install, operations[7]);
}
[Fact]
public void ResolveDependenciesForInstallPackageWithDependencyReturnsPackageAndDependency()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(2, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: false,
forceRemove: false);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageB), "Unable to uninstall 'B 1.0' because 'A 1.0' depends on it.");
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentAndRemoveDependenciesThrows()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act & Assert
ExceptionAssert.Throws<InvalidOperationException>(() => resolver.ResolveOperations(packageB), "Unable to uninstall 'B 1.0' because 'A 1.0' depends on it.");
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithDependentAndForceReturnsPackage()
{
// Arrange
var localRepository = new MockPackageRepository();
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: false,
forceRemove: true);
// Act
var packages = resolver.ResolveOperations(packageB)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(1, packages.Count);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithRemoveDependenciesExcludesDependencyIfDependencyInUse()
{
// Arrange
var localRepository = new MockPackageRepository();
// A 1.0 -> [B, C]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
IPackage packageC = PackageUtility.CreatePackage("C", "1.0");
// D -> [C]
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("C"),
});
localRepository.AddPackage(packageD);
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: false);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(2, packages.Count);
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
}
[Fact]
public void ResolveDependenciesForUninstallPackageWithRemoveDependenciesSetAndForceReturnsAllDependencies()
{
// Arrange
var localRepository = new MockPackageRepository();
// A 1.0 -> [B, C]
IPackage packageA = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
});
IPackage packageB = PackageUtility.CreatePackage("B", "1.0");
IPackage packageC = PackageUtility.CreatePackage("C", "1.0");
// D -> [C]
IPackage packageD = PackageUtility.CreatePackage("D", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("C"),
});
localRepository.AddPackage(packageA);
localRepository.AddPackage(packageB);
localRepository.AddPackage(packageC);
localRepository.AddPackage(packageD);
IPackageOperationResolver resolver = new UninstallWalker(localRepository,
new DependentsWalker(localRepository),
NullLogger.Instance,
removeDependencies: true,
forceRemove: true);
// Act
var packages = resolver.ResolveOperations(packageA)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.NotNull(packages["A"]);
Assert.NotNull(packages["B"]);
Assert.NotNull(packages["C"]);
}
[Fact]
public void ProjectInstallWalkerIgnoresSolutionLevelPackages()
{
// Arrange
var localRepository = new MockPackageRepository();
var sourceRepository = new MockPackageRepository();
IPackage projectPackage = PackageUtility.CreatePackage("A", "1.0",
dependencies: new List<PackageDependency> {
PackageDependency.CreateDependency("B", "[1.5]")
},
content: new[] { "content" });
sourceRepository.AddPackage(projectPackage);
IPackage toolsPackage = PackageUtility.CreatePackage("B", "1.5",
content: Enumerable.Empty<string>(),
tools: new[] { "init.ps1" });
sourceRepository.AddPackage(toolsPackage);
IPackageOperationResolver resolver = new UpdateWalker(localRepository,
sourceRepository,
new DependentsWalker(localRepository),
NullConstraintProvider.Instance,
NullLogger.Instance,
updateDependencies: true,
allowPrereleaseVersions: false) { AcceptedTargets = PackageTargets.Project };
// Act
var packages = resolver.ResolveOperations(projectPackage)
.ToDictionary(p => p.Package.Id);
// Assert
Assert.Equal(1, packages.Count);
Assert.NotNull(packages["A"]);
}
[Fact]
public void AfterPackageWalkMetaPackageIsClassifiedTheSameAsDependencies()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage metaPackage = PackageUtility.CreatePackage(
"A", "1.0",
content: Enumerable.Empty<string>(),
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
},
createRealStream: false);
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
IPackage projectPackageB = PackageUtility.CreatePackage("C", "1.0", content: new[] { "contentC" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(projectPackageB);
Assert.Equal(PackageTargets.None, walker.GetPackageInfo(metaPackage).Target);
// Act
walker.Walk(metaPackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(metaPackage).Target);
}
[Fact]
public void LocalizedIntelliSenseFileCountsAsProjectTarget()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage runtimePackage = PackageUtility.CreatePackage("A", "1.0",
assemblyReferences: new[] { @"lib\A.dll", @"lib\A.xml" });
IPackage satellitePackage = PackageUtility.CreatePackage("A.fr-fr", "1.0",
dependencies: new[] { new PackageDependency("A") },
satelliteAssemblies: new[] { @"lib\fr-fr\A.xml" },
language: "fr-fr");
mockRepository.AddPackage(runtimePackage);
mockRepository.AddPackage(satellitePackage);
// Act
walker.Walk(satellitePackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(satellitePackage).Target);
}
[Fact]
public void AfterPackageWalkSatellitePackageIsClassifiedTheSameAsDependencies()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage runtimePackage = PackageUtility.CreatePackage("A", "1.0",
assemblyReferences: new[] { @"lib\A.dll" });
IPackage satellitePackage = PackageUtility.CreatePackage("A.fr-fr", "1.0",
dependencies: new[] { new PackageDependency("A") },
satelliteAssemblies: new[] { @"lib\fr-fr\A.resources.dll" },
language: "fr-fr");
mockRepository.AddPackage(runtimePackage);
mockRepository.AddPackage(satellitePackage);
// Act
walker.Walk(satellitePackage);
// Assert
Assert.Equal(PackageTargets.Project, walker.GetPackageInfo(satellitePackage).Target);
}
[Fact]
public void MetaPackageWithMixedTargetsThrows()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage metaPackage = PackageUtility.CreatePackage("A", "1.0",
content: Enumerable.Empty<string>(),
dependencies: new List<PackageDependency> {
new PackageDependency("B"),
new PackageDependency("C")
},
createRealStream: false);
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
IPackage solutionPackage = PackageUtility.CreatePackage("C", "1.0", content: Enumerable.Empty<string>(), tools: new[] { "tools" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(solutionPackage);
// Act && Assert
ExceptionAssert.Throws<InvalidOperationException>(() => walker.Walk(metaPackage), "Child dependencies of dependency only packages cannot mix external and project packages.");
}
[Fact]
public void ExternalPackagesThatDepdendOnProjectLevelPackagesThrows()
{
// Arrange
var mockRepository = new MockPackageRepository();
var walker = new TestWalker(mockRepository);
IPackage solutionPackage = PackageUtility.CreatePackage(
"A", "1.0",
dependencies: new List<PackageDependency> {
new PackageDependency("B")
},
content: Enumerable.Empty<string>(),
tools: new[] { "install.ps1" });
IPackage projectPackageA = PackageUtility.CreatePackage("B", "1.0", content: new[] { "contentB" });
mockRepository.AddPackage(projectPackageA);
mockRepository.AddPackage(solutionPackage);
// Act && Assert
ExceptionAssert.Throws<InvalidOperationException>(() => walker.Walk(solutionPackage), "External packages cannot depend on packages that target projects.");
}
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorVersionForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(
new MockPackageRepository(),
repository,
NullLogger.Instance,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.HighestPatch);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(4, packages.Count);
Assert.Equal("D", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), packages[0].Package.Version);
Assert.Equal("C", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.1.3"), packages[1].Package.Version);
Assert.Equal("B", packages[2].Package.Id);
Assert.Equal(new SemanticVersion("1.0.9"), packages[2].Package.Version);
Assert.Equal("A", packages[3].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[3].Package.Version);
}
// Tests that when DependencyVersion is lowest, the dependency with the lowest major minor and patch version
// is picked.
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorAndPatchVersionOfListedPackagesForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }, listed: false),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }, listed: false),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
repository,
constraintProvider: null,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(2, packages.Count);
Assert.Equal("B", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("1.0.1"), packages[0].Package.Version);
Assert.Equal("A", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[1].Package.Version);
}
// Tests that when DependencyVersion is HighestPatch, the dependency with the lowest major minor and highest patch version
// is picked.
[Fact]
public void InstallWalkerResolvesLowestMajorAndMinorHighestPatchVersionOfListedPackagesForDependencies()
{
// Arrange
// A 1.0 -> B 1.0
// B 1.0 -> C 1.1
// C 1.1 -> D 1.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.0") });
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.0", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }, listed: false),
PackageUtility.CreatePackage("B", "1.0.1"),
A10,
PackageUtility.CreatePackage("D", "2.0"),
PackageUtility.CreatePackage("C", "1.1.3", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("C", "1.1.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }, listed: false),
PackageUtility.CreatePackage("C", "1.5.1", dependencies: new[] { PackageDependency.CreateDependency("D", "1.0") }),
PackageUtility.CreatePackage("B", "1.0.9", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") }),
PackageUtility.CreatePackage("B", "1.1", dependencies: new[] { PackageDependency.CreateDependency("C", "1.1") })
};
IPackageOperationResolver resolver = new InstallWalker(new MockPackageRepository(),
repository,
constraintProvider: null,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.HighestPatch);
// Act
var packages = resolver.ResolveOperations(A10).ToList();
// Assert
Assert.Equal(4, packages.Count);
Assert.Equal("D", packages[0].Package.Id);
Assert.Equal(new SemanticVersion("2.0"), packages[0].Package.Version);
Assert.Equal("C", packages[1].Package.Id);
Assert.Equal(new SemanticVersion("1.1.3"), packages[1].Package.Version);
Assert.Equal("B", packages[2].Package.Id);
Assert.Equal(new SemanticVersion("1.0.9"), packages[2].Package.Version);
Assert.Equal("A", packages[3].Package.Id);
Assert.Equal(new SemanticVersion("1.0"), packages[3].Package.Version);
}
[Fact]
public void ResolveOperationsForPackagesWherePackagesOrderIsDifferentFromItsDependencyOrder()
{
// Arrange
// A 1.0 -> B 1.0 to 1.5
// A 2.0 -> B 1.8
// B 1.0
// B 2.0
// C 1.0
// C 2.0
var A10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new[] { PackageDependency.CreateDependency("B", "[1.0, 1.5]") });
var A20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new[] { PackageDependency.CreateDependency("B", "1.8") });
var B10 = PackageUtility.CreatePackage("B", "1.0");
var B20 = PackageUtility.CreatePackage("B", "2.0");
var C10 = PackageUtility.CreatePackage("C", "1.0");
var C20 = PackageUtility.CreatePackage("C", "2.0");
var sourceRepository = new MockPackageRepository() {
A10,
A20,
B10,
B20,
C10,
C20,
};
var localRepository = new MockPackageRepository() {
A10,
B10,
C10
};
var resolver = new InstallWalker(localRepository,
sourceRepository,
constraintProvider: NullConstraintProvider.Instance,
logger: NullLogger.Instance,
targetFramework: null,
ignoreDependencies: false,
allowPrereleaseVersions: false,
dependencyVersion: DependencyVersion.Lowest);
var updatePackages = new List<IPackage> { A20, B20, C20 };
IList<IPackage> allUpdatePackagesByDependencyOrder;
// Act
var operations = resolver.ResolveOperations(updatePackages, out allUpdatePackagesByDependencyOrder);
// Assert
Assert.True(operations.Count == 3);
Assert.True(operations[0].Package == B20 && operations[0].Action == PackageAction.Install);
Assert.True(operations[1].Package == A20 && operations[1].Action == PackageAction.Install);
Assert.True(operations[2].Package == C20 && operations[2].Action == PackageAction.Install);
Assert.True(allUpdatePackagesByDependencyOrder[0] == B20);
Assert.True(allUpdatePackagesByDependencyOrder[1] == A20);
Assert.True(allUpdatePackagesByDependencyOrder[2] == C20);
}
private void AssertOperation(string expectedId, string expectedVersion, PackageAction expectedAction, PackageOperation operation)
{
Assert.Equal(expectedAction, operation.Action);
Assert.Equal(expectedId, operation.Package.Id);
Assert.Equal(new SemanticVersion(expectedVersion), operation.Package.Version);
}
private class TestWalker : PackageWalker
{
private readonly IPackageRepository _repository;
public TestWalker(IPackageRepository repository)
{
_repository = repository;
}
protected override IPackage ResolveDependency(PackageDependency dependency)
{
return PackageRepositoryExtensions.ResolveDependency(_repository, dependency, AllowPrereleaseVersions, false);
}
}
}
}
| |
#region license
// Sqloogle
// Copyright 2013-2017 Dale Newman
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
namespace Sqloogle.Libs.DBDiff.Schema.SqlServer2005.Model
{
/// <summary>
/// Clase de constraints de Columnas (Default Constraint y Check Constraint)
/// </summary>
public class ColumnConstraint : SQLServerSchemaBase
{
private Constraint.ConstraintType type;
private string definition;
private Boolean notForReplication;
private Boolean disabled;
private Boolean withNoCheck;
public ColumnConstraint(Column parent)
: base(parent, Enums.ObjectType.Constraint)
{
}
/// <summary>
/// Clona el objeto ColumnConstraint en una nueva instancia.
/// </summary>
public ColumnConstraint Clone(Column parent)
{
ColumnConstraint ccons = new ColumnConstraint(parent);
ccons.Name = this.Name;
ccons.Type = this.Type;
ccons.Definition = this.Definition;
ccons.Status = this.Status;
ccons.Disabled = this.Disabled;
ccons.Owner = this.Owner;
return ccons;
}
/// <summary>
/// Indica si la constraint esta deshabilitada.
/// </summary>
public Boolean Disabled
{
get { return disabled; }
set { disabled = value; }
}
/// <summary>
/// Indica si la constraint va a ser usada en replicacion.
/// </summary>
public Boolean NotForReplication
{
get { return notForReplication; }
set { notForReplication = value; }
}
/// <summary>
/// Gets or sets a value indicating whether [with no check].
/// </summary>
/// <value><c>true</c> if [with no check]; otherwise, <c>false</c>.</value>
public Boolean WithNoCheck
{
get { return withNoCheck; }
set { withNoCheck = value; }
}
/// <summary>
/// Valor de la constraint.
/// </summary>
public string Definition
{
get { return definition; }
set { definition = value; }
}
/// <summary>
/// Indica el tipo de constraint (Default o Check constraint).
/// </summary>
public Constraint.ConstraintType Type
{
get { return type; }
set { type = value; }
}
/// <summary>
/// Convierte el schema de la constraint en XML.
/// </summary>
public string ToXML()
{
string xml = "";
if (this.Type == Constraint.ConstraintType.Default)
{
xml += "<COLUMNCONSTRAINT name=\"" + Name + "\" type=\"DF\" value=\"" + definition + "\"/>\n";
}
if (this.Type == Constraint.ConstraintType.Check)
{
xml += "<COLUMNCONSTRAINT name=\"" + Name + "\" type=\"C\" value=\"" + definition + "\" notForReplication=\"" + (NotForReplication?"1":"0") + "\"/>\n";
}
return xml;
}
/// <summary>
/// Compara dos campos y devuelve true si son iguales, caso contrario, devuelve false.
/// </summary>
public static Boolean Compare(ColumnConstraint origen, ColumnConstraint destino)
{
if (destino == null) throw new ArgumentNullException("destino");
if (origen == null) throw new ArgumentNullException("origen");
if (origen.NotForReplication != destino.NotForReplication) return false;
if (origen.Disabled != destino.Disabled) return false;
if ((!origen.Definition.Equals(destino.Definition)) && (!origen.Definition.Equals("(" + destino.Definition + ")"))) return false;
return true;
}
public override SQLScript Create()
{
Enums.ScripActionType action = Enums.ScripActionType.AddConstraint;
if (!GetWasInsertInDiffList(action))
{
SetWasInsertInDiffList(action);
return new SQLScript(this.ToSqlAdd(), 0, action);
}
else
return null;
}
public override SQLScript Drop()
{
Enums.ScripActionType action = Enums.ScripActionType.DropConstraint;
if (!GetWasInsertInDiffList(action))
{
SetWasInsertInDiffList(action);
return new SQLScript(this.ToSqlDrop(), 0, action);
}
else
return null;
}
public Boolean CanCreate
{
get
{
Enums.ObjectStatusType tableStatus = this.Parent.Parent.Status;
Enums.ObjectStatusType columnStatus = this.Parent.Status;
return ((columnStatus != Enums.ObjectStatusType.DropStatus) && (((tableStatus == Enums.ObjectStatusType.AlterStatus) || (tableStatus == Enums.ObjectStatusType.OriginalStatus) || (tableStatus == Enums.ObjectStatusType.RebuildDependenciesStatus)) && (this.Status == Enums.ObjectStatusType.OriginalStatus)));
}
}
/// <summary>
/// Devuelve el schema de la constraint en formato SQL.
/// </summary>
public override string ToSql()
{
string sql = "";
if (this.Type == Constraint.ConstraintType.Default)
sql = " CONSTRAINT [" + Name + "] DEFAULT " + definition;
return sql;
}
/// <summary>
/// Toes the SQL add.
/// </summary>
/// <returns></returns>
public override string ToSqlAdd()
{
if (this.Type == Constraint.ConstraintType.Default)
return "ALTER TABLE " + ((Table)Parent.Parent).FullName + " ADD" + ToSql() + " FOR [" + Parent.Name + "]\r\nGO\r\n";
if (this.Type == Constraint.ConstraintType.Check)
return "ALTER TABLE " + ((Table)Parent.Parent).FullName + " ADD" + ToSql() + "\r\nGO\r\n";
return "";
}
/// <summary>
/// Toes the SQL drop.
/// </summary>
/// <returns></returns>
public override string ToSqlDrop()
{
return "ALTER TABLE " + ((Table)Parent.Parent).FullName + " DROP CONSTRAINT [" + Name + "]\r\nGO\r\n";
}
public override SQLScriptList ToSqlDiff()
{
SQLScriptList list = new SQLScriptList();
if (this.HasState(Enums.ObjectStatusType.DropStatus))
list.Add(Drop());
if (this.HasState(Enums.ObjectStatusType.CreateStatus))
list.Add(Create());
if (this.Status == Enums.ObjectStatusType.AlterStatus)
{
list.Add(Drop());
list.Add(Create());
}
return list;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Test.Common;
using System.Security.Authentication;
using System.Security.Principal;
using System.Text;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.Security.Tests
{
public class KDCSetup : IDisposable
{
private const string Krb5ConfigFile = "/etc/krb5.conf";
private const string KDestroyCmd = "kdestroy";
private const string SudoCommand = "sudo";
private const string ScriptName = "setup-kdc.sh";
private const string ScriptUninstallArgs = "--uninstall --yes";
private const string ScriptInstallArgs = "--password {0} --yes";
private const int InstalledButUnconfiguredExitCode = 2;
private readonly bool _isKrbPreInstalled ;
public readonly string password;
public KDCSetup()
{
_isKrbPreInstalled = File.Exists(Krb5ConfigFile) &&
File.ReadAllText(Krb5ConfigFile).Contains(TestConfiguration.Realm);
if (!_isKrbPreInstalled)
{
password = Guid.NewGuid().ToString("N");
int exitCode = RunSetupScript(string.Format(ScriptInstallArgs, password));
if (exitCode != 0)
{
if (exitCode != InstalledButUnconfiguredExitCode)
{
Dispose();
}
Assert.True(false, "KDC setup failure");
}
}
else
{
password = TestConfiguration.DefaultPassword;
}
}
public void Dispose()
{
if (!_isKrbPreInstalled)
{
RunSetupScript(ScriptUninstallArgs);
}
}
// checks for availability of Kerberos related infrastructure
// on the host. Returns true available, false otherwise
public bool CheckAndClearCredentials(ITestOutputHelper output)
{
// Clear the credentials
var startInfo = new ProcessStartInfo(KDestroyCmd);
startInfo.UseShellExecute = true;
startInfo.CreateNoWindow = true;
startInfo.Arguments = "-A";
using (Process clearCreds = Process.Start(startInfo))
{
clearCreds.WaitForExit();
output.WriteLine("kdestroy returned {0}", clearCreds.ExitCode);
return (clearCreds.ExitCode == 0);
}
}
private static int RunSetupScript(string args = null)
{
ProcessStartInfo startInfo = new ProcessStartInfo();
// since ProcessStartInfo does not support Verb, we use sudo as
// the program to be run
startInfo.FileName = SudoCommand;
startInfo.Arguments = string.Format("bash {0} {1}", ScriptName, args);
using (Process kdcSetup = Process.Start(startInfo))
{
kdcSetup.WaitForExit();
return kdcSetup.ExitCode;
}
}
}
[PlatformSpecific(PlatformID.Linux)]
public class KerberosTest : IDisposable, IClassFixture<KDCSetup>
{
private readonly byte[] _firstMessage = Encoding.UTF8.GetBytes("Sample First Message");
private readonly byte[] _secondMessage = Encoding.UTF8.GetBytes("Sample Second Message");
private readonly bool _isKrbAvailable; // tests are no-op if kerberos is not available on the host machine
private readonly KDCSetup _fixture;
private readonly ITestOutputHelper _output;
public KerberosTest(KDCSetup fixture, ITestOutputHelper output)
{
_fixture = fixture;
_output = output;
_isKrbAvailable = _fixture.CheckAndClearCredentials(_output);
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthentication_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthentication_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated");
Task[] auth = new Task[2];
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(credential, target);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
// Expected Client property values:
Assert.True(client.IsAuthenticated, "client.IsAuthenticated");
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted, "client.IsEncrypted");
Assert.True(client.IsMutuallyAuthenticated, "client.IsMutuallyAuthenticated");
Assert.False(client.IsServer, "client.IsServer");
Assert.True(client.IsSigned, "client.IsSigned");
Assert.False(client.LeaveInnerStreamOpen, "client.LeaveInnerStreamOpen");
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("Kerberos", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated, "serverIdentity.IsAuthenticated");
IdentityValidator.AssertHasName(serverIdentity, target);
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_AuthToHttpTarget_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_AuthToHttpTarget_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Task[] auth = new Task[2];
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}",TestConfiguration.HttpTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(credential, target);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
// Expected Client property values:
Assert.True(client.IsAuthenticated, "client.IsAuthenticated");
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted, "client.IsEncrypted");
Assert.True(client.IsMutuallyAuthenticated, "client.IsMutuallyAuthenticated");
Assert.False(client.IsServer, "client.IsServer");
Assert.True(client.IsSigned, "client.IsSigned");
Assert.False(client.LeaveInnerStreamOpen, "client.LeaveInnerStream");
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("Kerberos", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated, "serverIdentity.IsAuthenticated");
IdentityValidator.AssertHasName(serverIdentity, target);
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthWithoutRealm_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthWithoutRealm_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated);
Task[] auth = new Task[2];
NetworkCredential credential = new NetworkCredential(TestConfiguration.KerberosUser, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(credential, TestConfiguration.HostTarget);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
// Expected Client property values:
Assert.True(client.IsAuthenticated, "client.IsAuthenticated");
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted, "client.IsEncrypted");
Assert.True(client.IsMutuallyAuthenticated, "client.IsMutuallyAuthenticated");
Assert.False(client.IsServer, "client.IsServer");
Assert.True(client.IsSigned, "client.IsSigned");
Assert.False(client.LeaveInnerStreamOpen, "client.LeaveInnerStreamOpen");
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("Kerberos", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated, "serverIdentity.IsAuthenticated");
IdentityValidator.AssertHasName(serverIdentity, TestConfiguration.HostTarget);
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthDefaultCredentials_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthDefaultCredentials_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated before AuthenticateAsClient call");
Task[] auth = new Task[2];
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
// Seed the default Kerberos cache with the TGT
UnixGssFakeNegotiateStream.GetDefaultKerberosCredentials(user, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(CredentialCache.DefaultNetworkCredentials, target);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
// Expected Client property values:
Assert.True(client.IsAuthenticated, "client.IsAuthenticated");
Assert.Equal(TokenImpersonationLevel.Identification, client.ImpersonationLevel);
Assert.True(client.IsEncrypted, "client.IsEncrypted");
Assert.True(client.IsMutuallyAuthenticated, "client.IsMutuallyAuthenticated");
Assert.False(client.IsServer, "client.IsServer");
Assert.True(client.IsSigned, "client.IsSigned");
Assert.False(client.LeaveInnerStreamOpen, "client.LeaveInnerStreamOpen");
IIdentity serverIdentity = client.RemoteIdentity;
Assert.Equal("Kerberos", serverIdentity.AuthenticationType);
Assert.True(serverIdentity.IsAuthenticated,"serverIdentity.IsAuthenticated");
IdentityValidator.AssertHasName(serverIdentity, target);
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_EchoServer_ClientWriteRead_Successive_Sync_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_EchoServer_ClientWriteRead_Successive_Sync_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
byte[] firstRecvBuffer = new byte[_firstMessage.Length];
byte[] secondRecvBuffer = new byte[_secondMessage.Length];
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated before AuthenticateAsClient call");
Task[] auth = new Task[2];
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(credential, target);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
Task svrMsgTask = server.PollMessageAsync(2);
client.Write(_firstMessage, 0, _firstMessage.Length);
client.Write(_secondMessage, 0, _secondMessage.Length);
client.Read(firstRecvBuffer, 0, firstRecvBuffer.Length);
client.Read(secondRecvBuffer, 0, secondRecvBuffer.Length);
Assert.True(_firstMessage.SequenceEqual(firstRecvBuffer), "first message received is as expected");
Assert.True(_secondMessage.SequenceEqual(secondRecvBuffer), "second message received is as expected");
finished = svrMsgTask.Wait(TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Message roundtrip completed in the allotted time");
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_EchoServer_ClientWriteRead_Successive_Async_Success()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_EchoServer_ClientWriteRead_Successive_Async_Success");
return;
}
VirtualNetwork network = new VirtualNetwork();
byte[] firstRecvBuffer = new byte[_firstMessage.Length];
byte[] secondRecvBuffer = new byte[_secondMessage.Length];
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var serverStream = new VirtualNetworkStream(network, isServer: true))
using (var client = new NegotiateStream(clientStream))
using (var server = new UnixGssFakeNegotiateStream(serverStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated before AuthenticateAsClient call");
Task[] auth = new Task[2];
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password);
auth[0] = client.AuthenticateAsClientAsync(credential, target);
auth[1] = server.AuthenticateAsServerAsync();
bool finished = Task.WaitAll(auth, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Handshake completed in the allotted time");
Task serverTask = server.PollMessageAsync(2);
Task[] msgTasks = new Task[3];
msgTasks[0] = client.WriteAsync(_firstMessage, 0, _firstMessage.Length).ContinueWith((t) =>
client.WriteAsync(_secondMessage, 0, _secondMessage.Length)).Unwrap();
msgTasks[1] = ReadAllAsync(client, firstRecvBuffer, 0, firstRecvBuffer.Length).ContinueWith((t) =>
ReadAllAsync(client, secondRecvBuffer, 0, secondRecvBuffer.Length)).Unwrap();
msgTasks[2] = serverTask;
finished = Task.WaitAll(msgTasks, TestConfiguration.PassingTestTimeoutMilliseconds);
Assert.True(finished, "Messages sent and received in the allotted time");
Assert.True(_firstMessage.SequenceEqual(firstRecvBuffer), "The first message received is as expected");
Assert.True(_secondMessage.SequenceEqual(secondRecvBuffer), "The second message received is as expected");
}
}
private static async Task ReadAllAsync(Stream source, byte[] buffer, int offset, int count)
{
while (count > 0)
{
int bytesRead = await source.ReadAsync(buffer, offset, count).ConfigureAwait(false);
if (bytesRead == 0) break;
offset += bytesRead;
count -= bytesRead;
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthDefaultCredentialsNoSeed_Failure()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthDefaultCredentialsNoSeed_Failure");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var client = new NegotiateStream(clientStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated before AuthenticateAsClient call");
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
Assert.ThrowsAsync<AuthenticationException>(() => client.AuthenticateAsClientAsync(CredentialCache.DefaultNetworkCredentials, target));
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthInvalidUser_Failure()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthInvalidUser_Failure");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var client = new NegotiateStream(clientStream))
{
Assert.False(client.IsAuthenticated, "client is not authenticated by default");
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user.Substring(1), _fixture.password);
Assert.Throws<AuthenticationException>(() =>
{
client.AuthenticateAsClientAsync(credential, target);
});
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthInvalidPassword_Failure()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthInvalidPassword_Failure");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var client = new NegotiateStream(clientStream))
{
Assert.False(client.IsAuthenticated, "client stream is not authenticated by default");
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password.Substring(1));
Assert.Throws<AuthenticationException>(() =>
{
client.AuthenticateAsClientAsync(credential, target);
});
}
}
[Fact, OuterLoop]
[PlatformSpecific(PlatformID.Linux)]
public void NegotiateStream_StreamToStream_KerberosAuthInvalidTarget_Failure()
{
if (!_isKrbAvailable)
{
_output.WriteLine("skipping NegotiateStream_StreamToStream_KerberosAuthInvalidTarget_Failure");
return;
}
VirtualNetwork network = new VirtualNetwork();
using (var clientStream = new VirtualNetworkStream(network, isServer: false))
using (var client = new NegotiateStream(clientStream))
{
Assert.False(client.IsAuthenticated, "client stream is not authenticated by default");
string user = string.Format("{0}@{1}", TestConfiguration.KerberosUser, TestConfiguration.Realm);
string target = string.Format("{0}@{1}", TestConfiguration.HostTarget, TestConfiguration.Realm);
NetworkCredential credential = new NetworkCredential(user, _fixture.password);
Assert.ThrowsAsync<AuthenticationException>(() => client.AuthenticateAsClientAsync(credential, target.Substring(1)));
}
}
public void Dispose()
{
try
{
_fixture.CheckAndClearCredentials(_output);
}
catch
{
}
}
}
}
| |
using System;
using System.Text;
using System.Collections.Generic;
namespace angeldnd.dap {
public static class DataConvertorConsts {
public const string DataBegin = "{";
public const string DataEnd = "}";
public const string KeyBegin = ":";
public const string ValueBegin = "=";
public const string Space = " ";
public readonly static char[] WordChars = new char[]{':', '=', '{', '}'};
public static bool IsWordChar(char ch) {
foreach (char word in WordChars) {
if (word == ch) return true;
}
return false;
}
public static bool IsWordChar(String str) {
foreach (char word in WordChars) {
if (word.ToString() == str) return true;
}
return false;
}
}
public class PartialData {
public enum ExpectKind {Type = 0, Key, Value};
public DataType ValueType = DataType.Invalid;
public string Key = null;
public void Clear() {
ValueType = DataType.Invalid;
Key = null;
}
public ExpectKind GetExpectKind() {
if (ValueType == DataType.Invalid) {
return ExpectKind.Type;
} else if (Key == null) {
return ExpectKind.Key;
} else {
return ExpectKind.Value;
}
}
}
public class DataConvertor : Convertor<Data> {
public bool TryParse(string source, string content, out Data val, bool isDebug = false) {
try {
val = Parse(source, content);
return true;
} catch (Exception e) {
Log.ErrorOrDebug(isDebug, "Parse Failed: <{0}> {1}\n\n{2}\n\n{3}",
typeof(Data).FullName,
CaretException.GetMessage(source, e),
e.StackTrace, WordSplitter.AppendLineNumber(content));
}
val = null;
return false;
}
public string Convert(Data val, string indent) {
if (val == null) return Convertor.Null;
if (indent == "") indent = null;
System.Text.StringBuilder builder = new System.Text.StringBuilder();
AppendData(builder, indent, 0, val);
return builder.ToString();
}
public override string Convert(Data val) {
return Convert(val, "\t");
}
public override Data Parse(string str) {
return Parse(string.Empty, str);
}
public Data Parse(string source, string content) {
if (string.IsNullOrEmpty(content) || content == Convertor.Null) return null;
Stack<Data> dataStack = new Stack<Data>();
Data lastData = null;
PartialData partialData = new PartialData();
partialData.ValueType = DataType.Data;
partialData.Key = "";
Word lastWord = new Word(source, 0, 0, DataConvertorConsts.ValueBegin);
WordSplitter.Split(source, content, DataConvertorConsts.WordChars, (Word word) => {
ProcessWord(dataStack, ref lastData, partialData, lastWord, word);
lastWord = word;
});
if (dataStack.Count == 0 && lastData != null) {
return lastData;
} else {
throw new WordException(lastWord, "Data Stack Error: {0}, {1}", dataStack.Count, lastData);
}
}
private void ProcessWord(Stack<Data> dataStack, ref Data lastData,
PartialData partialData,
Word lastWord, Word word) {
switch (partialData.GetExpectKind()) {
case PartialData.ExpectKind.Type:
ProcessType(dataStack, ref lastData, partialData, lastWord, word);
break;
case PartialData.ExpectKind.Key:
ProcessKey(dataStack, ref lastData, partialData, lastWord, word);
break;
case PartialData.ExpectKind.Value:
ProcessValue(dataStack, ref lastData, partialData, lastWord, word);
break;
}
}
private void PopDataFromStack(Stack<Data> dataStack, ref Data lastData, Word word) {
if (dataStack.Count == 0) {
throw new WordException(word, "Empty DataStack");
}
lastData = dataStack.Pop();
}
private void ProcessType(Stack<Data> dataStack, ref Data lastData,
PartialData partialData,
Word lastWord, Word word) {
if (DataConvertorConsts.IsWordChar(word.Value)) {
if (word.Value == DataConvertorConsts.DataEnd) {
PopDataFromStack(dataStack, ref lastData, word);
} else {
throw new WordException(word, "Expecting DataType");
}
} else {
partialData.ValueType = Convertor.DataTypeConvertor.Parse(word.Value);
if (partialData.ValueType == DataType.Invalid) {
throw new WordException(word, "Invalid DataType");
}
}
}
private void ProcessKey(Stack<Data> dataStack, ref Data lastData,
PartialData partialData,
Word lastWord, Word word) {
if (DataConvertorConsts.IsWordChar(word.Value)) {
if (lastWord.Value == DataConvertorConsts.KeyBegin
|| word.Value != DataConvertorConsts.KeyBegin) {
throw new WordException(word, "Syntax Error");
}
} else if (lastWord.Value == DataConvertorConsts.KeyBegin) {
partialData.Key = word.Value;
} else {
throw new WordException(word, "Syntax Error");
}
}
private void ProcessValue(Stack<Data> dataStack, ref Data lastData,
PartialData partialData,
Word lastWord, Word word) {
Data data = dataStack.Count > 0 ? dataStack.Peek() : null;
if (DataConvertorConsts.IsWordChar(word.Value)) {
if (word.Value == DataConvertorConsts.DataBegin) {
Data subData = new RealData();
dataStack.Push(subData);
if (data != null) {
data.SetData(partialData.Key, subData);
}
partialData.Clear();
} else if (word.Value == DataConvertorConsts.DataEnd) {
if (partialData.ValueType == DataType.String) {
SetSimpleDataValue(data, partialData, new Word(word.Caret, ""));
PopDataFromStack(dataStack, ref lastData, word);
} else {
throw new WordException(word, "Missing Value");
}
} else if (lastWord.Value == DataConvertorConsts.ValueBegin
|| word.Value != DataConvertorConsts.ValueBegin) {
throw new WordException(word, "Syntax Error");
}
} else if (lastWord.Value == DataConvertorConsts.ValueBegin) {
SetSimpleDataValue(data, partialData, word);
} else {
throw new WordException(word, "Syntax Error");
}
}
private void SetSimpleDataValue(Data data, PartialData partialData, Word word) {
if (data == null) {
throw new WordException(word, "Syntax Error");
}
bool ok = false;
switch (partialData.ValueType) {
case DataType.Bool:
ok = data.SetBool(partialData.Key, Convertor.BoolConvertor.Parse(word.Value));
break;
case DataType.Int:
ok = data.SetInt(partialData.Key, Convertor.IntConvertor.Parse(word.Value));
break;
case DataType.Long:
ok = data.SetLong(partialData.Key, Convertor.LongConvertor.Parse(word.Value));
break;
case DataType.Float:
ok = data.SetFloat(partialData.Key, Convertor.FloatConvertor.Parse(word.Value));
break;
case DataType.Double:
ok = data.SetDouble(partialData.Key, Convertor.DoubleConvertor.Parse(word.Value));
break;
case DataType.String:
ok = data.SetString(partialData.Key, Convertor.StringConvertor.Parse(word.Value));
break;
case DataType.Data:
if (word.Value == Convertor.Null) {
ok = data.SetData(partialData.Key, null);
}
break;
}
if (ok) {
partialData.Clear();
} else {
throw new WordException(word, "Syntax Error");
}
}
private void AppendIndents(StringBuilder builder, string indent, int indentLevel) {
if (indentLevel <= 0 || indent == null) return;
for (int i = 0; i < indentLevel; i++) {
builder.Append(indent);
}
}
private void AppendData(StringBuilder builder, string indent, int indentLevel, Data data) {
if (data == null) {
builder.Append(Convertor.Null);
return;
}
builder.Append(DataConvertorConsts.DataBegin);
if (indent != null && data.Count > 0) {
builder.AppendLine();
}
AppendDataValues(builder, indent, indentLevel, data);
if (indent != null && data.Count > 0) {
AppendIndents(builder, indent, indentLevel);
}
builder.Append(DataConvertorConsts.DataEnd);
}
protected void AppendValue(StringBuilder builder, string indent, int indentLevel, Data data, string key, string end) {
DataType valueType = data.GetValueType(key);
AppendIndents(builder, indent, indentLevel);
AppendTypeAndKey(builder, valueType, key, indent != null);
switch (valueType) {
case DataType.Bool:
builder.Append(Convertor.BoolConvertor.Convert(data.GetBool(key)));
break;
case DataType.Int:
builder.Append(Convertor.IntConvertor.Convert(data.GetInt(key)));
break;
case DataType.Long:
builder.Append(Convertor.LongConvertor.Convert(data.GetLong(key)));
break;
case DataType.Float:
builder.Append(Convertor.FloatConvertor.Convert(data.GetFloat(key)));
break;
case DataType.Double:
builder.Append(Convertor.DoubleConvertor.Convert(data.GetDouble(key)));
break;
case DataType.String:
AppendString(builder, data.GetString(key));
break;
case DataType.Data:
AppendData(builder, indent, indentLevel, data.GetData(key));
break;
}
if (end != null) {
builder.Append(end);
}
if (indent != null) {
builder.AppendLine();
} else {
builder.Append(DataConvertorConsts.Space);
}
}
protected virtual void AppendDataValues(StringBuilder builder, string indent, int indentLevel, Data data) {
foreach (string key in data.Keys) {
AppendValue(builder, indent, indentLevel + 1, data, key, null);
}
}
protected virtual void AppendTypeAndKey(StringBuilder builder, DataType valueType, string key, bool withSpace) {
builder.Append(Convertor.DataTypeConvertor.Convert(valueType));
builder.Append(DataConvertorConsts.KeyBegin);
if (withSpace) {
builder.Append(DataConvertorConsts.Space);
}
AppendString(builder, key);
if (withSpace) {
builder.Append(DataConvertorConsts.Space);
}
builder.Append(DataConvertorConsts.ValueBegin);
if (withSpace) {
builder.Append(DataConvertorConsts.Space);
}
}
protected virtual void AppendString(StringBuilder builder, string str) {
if (string.IsNullOrEmpty(str)) {
builder.Append(WordSplitterConsts.EncloseBeginChar);
builder.Append(WordSplitterConsts.EncloseEndChar);
return;
};
for (int i = 0; i < str.Length; i++) {
char ch = str[i];
if (WordSplitterConsts.IsEmptyChar(ch)
|| WordSplitterConsts.IsEncloseChar(ch)
|| DataConvertorConsts.IsWordChar(ch)) {
builder.Append(WordSplitterConsts.EscapeChar);
}
builder.Append(ch);
}
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Linq;
using Avalonia.Collections;
using Avalonia.Controls.Generators;
using Avalonia.Controls.Presenters;
using Avalonia.Controls.Primitives;
using Avalonia.Controls.Templates;
using Avalonia.Layout;
using Avalonia.Rendering;
using Avalonia.UnitTests;
using Xunit;
namespace Avalonia.Controls.UnitTests.Presenters
{
public class ItemsPresenterTests_Virtualization
{
[Fact]
public void Should_Not_Create_Items_Before_Added_To_Visual_Tree()
{
var items = Enumerable.Range(0, 10).Select(x => $"Item {x}").ToList();
var target = new TestItemsPresenter(true)
{
Items = items,
ItemsPanel = VirtualizingPanelTemplate(Orientation.Vertical),
ItemTemplate = ItemTemplate(),
VirtualizationMode = ItemVirtualizationMode.Simple,
};
var scroller = new ScrollContentPresenter
{
Content = target,
};
scroller.UpdateChild();
target.ApplyTemplate();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Empty(target.Panel.Children);
var root = new TestRoot
{
Child = scroller,
};
target.InvalidateMeasure();
target.Panel.InvalidateMeasure();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(10, target.Panel.Children.Count);
}
[Fact]
public void Should_Return_IsLogicalScrollEnabled_False_When_Has_No_Virtualizing_Panel()
{
var target = CreateTarget();
target.ClearValue(ItemsPresenter.ItemsPanelProperty);
target.ApplyTemplate();
Assert.False(((ILogicalScrollable)target).IsLogicalScrollEnabled);
}
[Fact]
public void Should_Return_IsLogicalScrollEnabled_False_When_VirtualizationMode_None()
{
var target = CreateTarget(ItemVirtualizationMode.None);
target.ApplyTemplate();
Assert.False(((ILogicalScrollable)target).IsLogicalScrollEnabled);
}
[Fact]
public void Should_Return_IsLogicalScrollEnabled_False_When_Doesnt_Have_ScrollPresenter_Parent()
{
var target = new ItemsPresenter
{
ItemsPanel = VirtualizingPanelTemplate(),
ItemTemplate = ItemTemplate(),
VirtualizationMode = ItemVirtualizationMode.Simple,
};
target.ApplyTemplate();
Assert.False(((ILogicalScrollable)target).IsLogicalScrollEnabled);
}
[Fact]
public void Should_Return_IsLogicalScrollEnabled_True()
{
var target = CreateTarget();
target.ApplyTemplate();
Assert.True(((ILogicalScrollable)target).IsLogicalScrollEnabled);
}
[Fact]
public void Parent_ScrollContentPresenter_Properties_Should_Be_Set()
{
var target = CreateTarget();
target.ApplyTemplate();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
var scroll = (ScrollContentPresenter)target.Parent;
Assert.Equal(new Size(0, 20), scroll.Extent);
Assert.Equal(new Size(0, 10), scroll.Viewport);
}
[Fact]
public void Should_Fill_Panel_With_Containers()
{
var target = CreateTarget();
target.ApplyTemplate();
target.Measure(new Size(100, 100));
Assert.Equal(10, target.Panel.Children.Count);
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(10, target.Panel.Children.Count);
}
[Fact]
public void Should_Only_Create_Enough_Containers_To_Display_All_Items()
{
var target = CreateTarget(itemCount: 2);
target.ApplyTemplate();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(2, target.Panel.Children.Count);
}
[Fact]
public void Should_Expand_To_Fit_Containers_When_Flexible_Size()
{
var target = CreateTarget();
target.ApplyTemplate();
target.Measure(Size.Infinity);
target.Arrange(new Rect(target.DesiredSize));
Assert.Equal(new Size(10, 200), target.DesiredSize);
Assert.Equal(new Size(10, 200), target.Bounds.Size);
Assert.Equal(20, target.Panel.Children.Count);
}
[Fact]
public void Initial_Item_DataContexts_Should_Be_Correct()
{
var target = CreateTarget();
var items = (IList<string>)target.Items;
target.ApplyTemplate();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
for (var i = 0; i < target.Panel.Children.Count; ++i)
{
Assert.Equal(items[i], target.Panel.Children[i].DataContext);
}
}
[Fact]
public void Should_Add_New_Items_When_Control_Is_Enlarged()
{
var target = CreateTarget();
var items = (IList<string>)target.Items;
target.ApplyTemplate();
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(10, target.Panel.Children.Count);
target.Measure(new Size(120, 120));
target.Arrange(new Rect(0, 0, 100, 120));
Assert.Equal(12, target.Panel.Children.Count);
for (var i = 0; i < target.Panel.Children.Count; ++i)
{
Assert.Equal(items[i], target.Panel.Children[i].DataContext);
}
}
[Fact]
public void Changing_VirtualizationMode_None_To_Simple_Should_Update_Control()
{
var target = CreateTarget(mode: ItemVirtualizationMode.None);
var scroll = (ScrollContentPresenter)target.Parent;
scroll.Measure(new Size(100, 100));
scroll.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(20, target.Panel.Children.Count);
Assert.Equal(new Size(10, 200), scroll.Extent);
Assert.Equal(new Size(100, 100), scroll.Viewport);
target.VirtualizationMode = ItemVirtualizationMode.Simple;
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(10, target.Panel.Children.Count);
Assert.Equal(new Size(0, 20), scroll.Extent);
Assert.Equal(new Size(0, 10), scroll.Viewport);
}
[Fact]
public void Changing_VirtualizationMode_None_To_Simple_Should_Add_Correct_Number_Of_Controls()
{
using (UnitTestApplication.Start(TestServices.RealLayoutManager))
{
var target = CreateTarget(mode: ItemVirtualizationMode.None);
var scroll = (ScrollContentPresenter)target.Parent;
scroll.Measure(new Size(100, 100));
scroll.Arrange(new Rect(0, 0, 100, 100));
// Ensure than an intermediate measure pass doesn't add more controls than it
// should. This can happen if target gets measured with Size.Infinity which
// is what the available size should be when VirtualizationMode == None but not
// what it should after VirtualizationMode is changed to Simple.
target.Panel.Children.CollectionChanged += (s, e) =>
{
Assert.InRange(target.Panel.Children.Count, 0, 10);
};
target.VirtualizationMode = ItemVirtualizationMode.Simple;
LayoutManager.Instance.ExecuteLayoutPass();
Assert.Equal(10, target.Panel.Children.Count);
}
}
[Fact]
public void Changing_VirtualizationMode_Simple_To_None_Should_Update_Control()
{
var target = CreateTarget();
var scroll = (ScrollContentPresenter)target.Parent;
scroll.Measure(new Size(100, 100));
scroll.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(10, target.Panel.Children.Count);
Assert.Equal(new Size(0, 20), scroll.Extent);
Assert.Equal(new Size(0, 10), scroll.Viewport);
target.VirtualizationMode = ItemVirtualizationMode.None;
target.Measure(new Size(100, 100));
target.Arrange(new Rect(0, 0, 100, 100));
// Here - unlike changing the other way - we need to do a layout pass on the scroll
// content presenter as non-logical scroll values are only updated on arrange.
scroll.Measure(new Size(100, 100));
scroll.Arrange(new Rect(0, 0, 100, 100));
Assert.Equal(20, target.Panel.Children.Count);
Assert.Equal(new Size(10, 200), scroll.Extent);
Assert.Equal(new Size(100, 100), scroll.Viewport);
}
private static ItemsPresenter CreateTarget(
ItemVirtualizationMode mode = ItemVirtualizationMode.Simple,
Orientation orientation = Orientation.Vertical,
bool useContainers = true,
int itemCount = 20)
{
ItemsPresenter result;
var items = Enumerable.Range(0, itemCount).Select(x => $"Item {x}").ToList();
var scroller = new TestScroller
{
Content = result = new TestItemsPresenter(useContainers)
{
Items = items,
ItemsPanel = VirtualizingPanelTemplate(orientation),
ItemTemplate = ItemTemplate(),
VirtualizationMode = mode,
}
};
scroller.UpdateChild();
return result;
}
private static IDataTemplate ItemTemplate()
{
return new FuncDataTemplate<string>(x => new Canvas
{
Width = 10,
Height = 10,
});
}
private static ITemplate<IPanel> VirtualizingPanelTemplate(
Orientation orientation = Orientation.Vertical)
{
return new FuncTemplate<IPanel>(() => new VirtualizingStackPanel
{
Orientation = orientation,
});
}
private class TestScroller : ScrollContentPresenter, IRenderRoot
{
public IRenderQueueManager RenderQueueManager { get; }
public Point PointToClient(Point point)
{
throw new NotImplementedException();
}
public Point PointToScreen(Point point)
{
throw new NotImplementedException();
}
}
private class TestItemsPresenter : ItemsPresenter
{
private bool _useContainers;
public TestItemsPresenter(bool useContainers)
{
_useContainers = useContainers;
}
protected override IItemContainerGenerator CreateItemContainerGenerator()
{
return _useContainers ?
new ItemContainerGenerator<TestContainer>(this, TestContainer.ContentProperty, null) :
new ItemContainerGenerator(this);
}
}
private class TestContainer : ContentControl
{
public TestContainer()
{
Width = 10;
Height = 10;
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.